/NAudio.Win8/Wave/WaveOutputs/WasapiOutRT.cs

# · C# · 647 lines · 439 code · 80 blank · 128 comment · 51 complexity · 3a70521f365a9e83964100b746dace37 MD5 · raw file

  1. using System;
  2. using System.Linq;
  3. using System.Runtime.CompilerServices;
  4. using System.Runtime.InteropServices;
  5. using System.Threading;
  6. using System.Threading.Tasks;
  7. using Windows.System.Threading;
  8. using NAudio.CoreAudioApi;
  9. using NAudio.CoreAudioApi.Interfaces;
  10. using NAudio.Dsp;
  11. using NAudio.Wave;
  12. using Windows.Media.Devices;
  13. using NAudio.Wave.SampleProviders;
  14. namespace NAudio.Win8.Wave.WaveOutputs
  15. {
  16. enum WasapiOutState
  17. {
  18. Uninitialized,
  19. Stopped,
  20. Paused,
  21. Playing,
  22. Stopping,
  23. Disposing,
  24. Disposed
  25. }
  26. /// <summary>
  27. /// WASAPI Out for Windows RT
  28. /// </summary>
  29. public class WasapiOutRT : IWavePlayer
  30. {
  31. private AudioClient audioClient;
  32. private readonly string device;
  33. private readonly AudioClientShareMode shareMode;
  34. private AudioRenderClient renderClient;
  35. private int latencyMilliseconds;
  36. private int bufferFrameCount;
  37. private int bytesPerFrame;
  38. private byte[] readBuffer;
  39. private volatile WasapiOutState playbackState;
  40. private WaveFormat outputFormat;
  41. private bool resamplerNeeded;
  42. private IntPtr frameEventWaitHandle;
  43. private readonly SynchronizationContext syncContext;
  44. private bool isInitialized;
  45. private readonly AutoResetEvent playThreadEvent;
  46. /// <summary>
  47. /// Playback Stopped
  48. /// </summary>
  49. public event EventHandler<StoppedEventArgs> PlaybackStopped;
  50. /// <summary>
  51. /// WASAPI Out using default audio endpoint
  52. /// </summary>
  53. /// <param name="shareMode">ShareMode - shared or exclusive</param>
  54. /// <param name="latency">Desired latency in milliseconds</param>
  55. public WasapiOutRT(AudioClientShareMode shareMode, int latency) :
  56. this(GetDefaultAudioEndpoint(), shareMode, latency)
  57. {
  58. }
  59. /// <summary>
  60. /// Creates a new WASAPI Output
  61. /// </summary>
  62. /// <param name="device">Device to use</param>
  63. /// <param name="shareMode"></param>
  64. /// <param name="latency"></param>
  65. public WasapiOutRT(string device, AudioClientShareMode shareMode, int latency)
  66. {
  67. this.device = device;
  68. this.shareMode = shareMode;
  69. this.latencyMilliseconds = latency;
  70. this.syncContext = SynchronizationContext.Current;
  71. playThreadEvent = new AutoResetEvent(false);
  72. }
  73. /// <summary>
  74. /// Properties of the client's audio stream.
  75. /// Set before calling init
  76. /// </summary>
  77. private AudioClientProperties? audioClientProperties = null;
  78. private Func<IWaveProvider> waveProviderFunc;
  79. /// <summary>
  80. /// Sets the parameters that describe the properties of the client's audio stream.
  81. /// </summary>
  82. /// <param name="useHardwareOffload">Boolean value to indicate whether or not the audio stream is hardware-offloaded.</param>
  83. /// <param name="category">An enumeration that is used to specify the category of the audio stream.</param>
  84. /// <param name="options">A bit-field describing the characteristics of the stream. Supported in Windows 8.1 and later.</param>
  85. public void SetClientProperties(bool useHardwareOffload, AudioStreamCategory category, AudioClientStreamOptions options)
  86. {
  87. audioClientProperties = new AudioClientProperties()
  88. {
  89. cbSize = (uint) Marshal.SizeOf(typeof (AudioClientProperties)),
  90. bIsOffload = Convert.ToInt32(useHardwareOffload),
  91. eCategory = category,
  92. Options = options
  93. };
  94. }
  95. private async Task Activate()
  96. {
  97. var icbh = new ActivateAudioInterfaceCompletionHandler(
  98. ac2 =>
  99. {
  100. if (this.audioClientProperties != null)
  101. {
  102. IntPtr p = Marshal.AllocHGlobal(Marshal.SizeOf(this.audioClientProperties.Value));
  103. Marshal.StructureToPtr(this.audioClientProperties.Value, p, false);
  104. ac2.SetClientProperties(p);
  105. // TODO: consider whether we can marshal this without the need for AllocHGlobal
  106. }
  107. /*var wfx = new WaveFormat(44100, 16, 2);
  108. int hr = ac2.Initialize(AudioClientShareMode.Shared,
  109. AudioClientStreamFlags.EventCallback | AudioClientStreamFlags.NoPersist,
  110. 10000000, 0, wfx, IntPtr.Zero);*/
  111. });
  112. var IID_IAudioClient2 = new Guid("726778CD-F60A-4eda-82DE-E47610CD78AA");
  113. IActivateAudioInterfaceAsyncOperation activationOperation;
  114. NativeMethods.ActivateAudioInterfaceAsync(device, IID_IAudioClient2, IntPtr.Zero, icbh, out activationOperation);
  115. var audioClient2 = await icbh;
  116. this.audioClient = new AudioClient((IAudioClient)audioClient2);
  117. }
  118. private static string GetDefaultAudioEndpoint()
  119. {
  120. // can't use the MMDeviceEnumerator in WinRT
  121. return MediaDevice.GetDefaultAudioRenderId(AudioDeviceRole.Default);
  122. }
  123. private async void PlayThread()
  124. {
  125. await Activate();
  126. var playbackProvider = Init();
  127. bool isClientRunning = false;
  128. try
  129. {
  130. if (this.resamplerNeeded)
  131. {
  132. var resampler = new WdlResamplingSampleProvider(playbackProvider.ToSampleProvider(), outputFormat.SampleRate);
  133. playbackProvider = new SampleToWaveProvider(resampler);
  134. }
  135. // fill a whole buffer
  136. bufferFrameCount = audioClient.BufferSize;
  137. bytesPerFrame = outputFormat.Channels*outputFormat.BitsPerSample/8;
  138. readBuffer = new byte[bufferFrameCount*bytesPerFrame];
  139. FillBuffer(playbackProvider, bufferFrameCount);
  140. int timeout = 3 * latencyMilliseconds;
  141. while (playbackState != WasapiOutState.Disposed)
  142. {
  143. if (playbackState != WasapiOutState.Playing)
  144. {
  145. playThreadEvent.WaitOne(500);
  146. }
  147. // If still playing and notification is ok
  148. if (playbackState == WasapiOutState.Playing)
  149. {
  150. if (!isClientRunning)
  151. {
  152. audioClient.Start();
  153. isClientRunning = true;
  154. }
  155. // If using Event Sync, Wait for notification from AudioClient or Sleep half latency
  156. var r = NativeMethods.WaitForSingleObjectEx(frameEventWaitHandle, timeout, true);
  157. if (r != 0) throw new InvalidOperationException("Timed out waiting for event");
  158. // See how much buffer space is available.
  159. int numFramesPadding = 0;
  160. // In exclusive mode, always ask the max = bufferFrameCount = audioClient.BufferSize
  161. numFramesPadding = (shareMode == AudioClientShareMode.Shared) ? audioClient.CurrentPadding : 0;
  162. int numFramesAvailable = bufferFrameCount - numFramesPadding;
  163. if (numFramesAvailable > 0)
  164. {
  165. FillBuffer(playbackProvider, numFramesAvailable);
  166. }
  167. }
  168. if (playbackState == WasapiOutState.Stopping)
  169. {
  170. // play the buffer out
  171. while (audioClient.CurrentPadding > 0)
  172. {
  173. await Task.Delay(latencyMilliseconds / 2);
  174. }
  175. audioClient.Stop();
  176. isClientRunning = false;
  177. audioClient.Reset();
  178. playbackState = WasapiOutState.Stopped;
  179. RaisePlaybackStopped(null);
  180. }
  181. if (playbackState == WasapiOutState.Disposing)
  182. {
  183. audioClient.Stop();
  184. isClientRunning = false;
  185. audioClient.Reset();
  186. playbackState = WasapiOutState.Disposed;
  187. var disposablePlaybackProvider = playbackProvider as IDisposable;
  188. if (disposablePlaybackProvider!=null)
  189. disposablePlaybackProvider.Dispose(); // do everything on this thread, even dispose in case it is Media Foundation
  190. RaisePlaybackStopped(null);
  191. }
  192. }
  193. }
  194. catch (Exception e)
  195. {
  196. RaisePlaybackStopped(e);
  197. }
  198. finally
  199. {
  200. audioClient.Dispose();
  201. audioClient = null;
  202. renderClient = null;
  203. NativeMethods.CloseHandle(frameEventWaitHandle);
  204. }
  205. }
  206. private void RaisePlaybackStopped(Exception e)
  207. {
  208. var handler = PlaybackStopped;
  209. if (handler != null)
  210. {
  211. if (this.syncContext == null)
  212. {
  213. handler(this, new StoppedEventArgs(e));
  214. }
  215. else
  216. {
  217. syncContext.Post(state => handler(this, new StoppedEventArgs(e)), null);
  218. }
  219. }
  220. }
  221. private void FillBuffer(IWaveProvider playbackProvider, int frameCount)
  222. {
  223. IntPtr buffer = renderClient.GetBuffer(frameCount);
  224. int readLength = frameCount*bytesPerFrame;
  225. int read = playbackProvider.Read(readBuffer, 0, readLength);
  226. if (read == 0)
  227. {
  228. playbackState = WasapiOutState.Stopping;
  229. }
  230. Marshal.Copy(readBuffer, 0, buffer, read);
  231. int actualFrameCount = read/bytesPerFrame;
  232. /*if (actualFrameCount != frameCount)
  233. {
  234. Debug.WriteLine(String.Format("WASAPI wanted {0} frames, supplied {1}", frameCount, actualFrameCount ));
  235. }*/
  236. renderClient.ReleaseBuffer(actualFrameCount, AudioClientBufferFlags.None);
  237. }
  238. #region IWavePlayer Members
  239. /// <summary>
  240. /// Begin Playback
  241. /// </summary>
  242. public void Play()
  243. {
  244. if (playbackState != WasapiOutState.Playing)
  245. {
  246. playbackState = WasapiOutState.Playing;
  247. playThreadEvent.Set();
  248. }
  249. }
  250. /// <summary>
  251. /// Stop playback and flush buffers
  252. /// </summary>
  253. public void Stop()
  254. {
  255. if (playbackState == WasapiOutState.Playing || playbackState == WasapiOutState.Paused)
  256. {
  257. playbackState = WasapiOutState.Stopping;
  258. playThreadEvent.Set();
  259. }
  260. }
  261. /// <summary>
  262. /// Stop playback without flushing buffers
  263. /// </summary>
  264. public void Pause()
  265. {
  266. if (playbackState == WasapiOutState.Playing)
  267. {
  268. playbackState = WasapiOutState.Paused;
  269. playThreadEvent.Set();
  270. }
  271. }
  272. /// <summary>
  273. /// Old init implementation. Use the func one
  274. /// </summary>
  275. /// <param name="provider"></param>
  276. /// <returns></returns>
  277. [Obsolete]
  278. public async Task Init(IWaveProvider provider)
  279. {
  280. Init(() => provider);
  281. }
  282. /// <summary>
  283. /// Initializes with a function to create the provider that is made on the playback thread
  284. /// </summary>
  285. /// <param name="waveProviderFunc">Creates the wave provider</param>
  286. public void Init(Func<IWaveProvider> waveProviderFunc)
  287. {
  288. if (isInitialized) throw new InvalidOperationException("Already Initialized");
  289. isInitialized = true;
  290. this.waveProviderFunc = waveProviderFunc;
  291. ThreadPool.RunAsync(s => PlayThread());
  292. }
  293. /// <summary>
  294. /// Initialize for playing the specified wave stream
  295. /// </summary>
  296. private IWaveProvider Init()
  297. {
  298. var waveProvider = waveProviderFunc();
  299. long latencyRefTimes = latencyMilliseconds*10000;
  300. outputFormat = waveProvider.WaveFormat;
  301. // first attempt uses the WaveFormat from the WaveStream
  302. WaveFormatExtensible closestSampleRateFormat;
  303. if (!audioClient.IsFormatSupported(shareMode, outputFormat, out closestSampleRateFormat))
  304. {
  305. // Use closesSampleRateFormat (in sharedMode, it equals usualy to the audioClient.MixFormat)
  306. // See documentation : http://msdn.microsoft.com/en-us/library/ms678737(VS.85).aspx
  307. // They say : "In shared mode, the audio engine always supports the mix format"
  308. // The MixFormat is more likely to be a WaveFormatExtensible.
  309. if (closestSampleRateFormat == null)
  310. {
  311. WaveFormat correctSampleRateFormat = audioClient.MixFormat;
  312. /*WaveFormat.CreateIeeeFloatWaveFormat(
  313. audioClient.MixFormat.SampleRate,
  314. audioClient.MixFormat.Channels);*/
  315. if (!audioClient.IsFormatSupported(shareMode, correctSampleRateFormat))
  316. {
  317. // Iterate from Worst to Best Format
  318. WaveFormatExtensible[] bestToWorstFormats =
  319. {
  320. new WaveFormatExtensible(
  321. outputFormat.SampleRate, 32,
  322. outputFormat.Channels),
  323. new WaveFormatExtensible(
  324. outputFormat.SampleRate, 24,
  325. outputFormat.Channels),
  326. new WaveFormatExtensible(
  327. outputFormat.SampleRate, 16,
  328. outputFormat.Channels),
  329. };
  330. // Check from best Format to worst format ( Float32, Int24, Int16 )
  331. for (int i = 0; i < bestToWorstFormats.Length; i++)
  332. {
  333. correctSampleRateFormat = bestToWorstFormats[i];
  334. if (audioClient.IsFormatSupported(shareMode, correctSampleRateFormat))
  335. {
  336. break;
  337. }
  338. correctSampleRateFormat = null;
  339. }
  340. // If still null, then test on the PCM16, 2 channels
  341. if (correctSampleRateFormat == null)
  342. {
  343. // Last Last Last Chance (Thanks WASAPI)
  344. correctSampleRateFormat = new WaveFormatExtensible(outputFormat.SampleRate, 16, 2);
  345. if (!audioClient.IsFormatSupported(shareMode, correctSampleRateFormat))
  346. {
  347. throw new NotSupportedException("Can't find a supported format to use");
  348. }
  349. }
  350. }
  351. outputFormat = correctSampleRateFormat;
  352. }
  353. else
  354. {
  355. outputFormat = closestSampleRateFormat;
  356. }
  357. // just check that we can make it.
  358. //using (new MediaFoundationResampler(waveProvider, outputFormat))
  359. {
  360. }
  361. this.resamplerNeeded = true;
  362. }
  363. else
  364. {
  365. resamplerNeeded = false;
  366. }
  367. // Init Shared or Exclusive
  368. if (shareMode == AudioClientShareMode.Shared)
  369. {
  370. // With EventCallBack and Shared,
  371. audioClient.Initialize(shareMode, AudioClientStreamFlags.EventCallback, latencyRefTimes, 0,
  372. outputFormat, Guid.Empty);
  373. // Get back the effective latency from AudioClient
  374. latencyMilliseconds = (int) (audioClient.StreamLatency/10000);
  375. }
  376. else
  377. {
  378. // With EventCallBack and Exclusive, both latencies must equals
  379. audioClient.Initialize(shareMode, AudioClientStreamFlags.EventCallback, latencyRefTimes, latencyRefTimes,
  380. outputFormat, Guid.Empty);
  381. }
  382. // Create the Wait Event Handle
  383. frameEventWaitHandle = NativeMethods.CreateEventEx(IntPtr.Zero, IntPtr.Zero, 0, EventAccess.EVENT_ALL_ACCESS);
  384. audioClient.SetEventHandle(frameEventWaitHandle);
  385. // Get the RenderClient
  386. renderClient = audioClient.AudioRenderClient;
  387. return waveProvider;
  388. }
  389. /// <summary>
  390. /// Playback State
  391. /// </summary>
  392. public PlaybackState PlaybackState
  393. {
  394. get
  395. {
  396. switch (playbackState)
  397. {
  398. case WasapiOutState.Playing:
  399. return PlaybackState.Playing;
  400. case WasapiOutState.Paused:
  401. return PlaybackState.Paused;
  402. default:
  403. return PlaybackState.Stopped;
  404. }
  405. }
  406. }
  407. #endregion
  408. /// <summary>
  409. /// Dispose
  410. /// </summary>
  411. public void Dispose()
  412. {
  413. if (audioClient != null)
  414. {
  415. playbackState = WasapiOutState.Disposing;
  416. playThreadEvent.Set();
  417. }
  418. }
  419. }
  420. /// <summary>
  421. /// Come useful native methods for Windows 8 support
  422. /// </summary>
  423. class NativeMethods
  424. {
  425. [DllImport("kernel32.dll", CharSet = CharSet.Unicode, ExactSpelling = false, PreserveSig = true,
  426. SetLastError = true)]
  427. internal static extern IntPtr CreateEventEx(IntPtr lpEventAttributes, IntPtr lpName, int dwFlags,
  428. EventAccess dwDesiredAccess);
  429. [DllImport("kernel32.dll", ExactSpelling = true, PreserveSig = true, SetLastError = true)]
  430. public static extern bool CloseHandle(IntPtr hObject);
  431. [DllImport("kernel32", ExactSpelling = true, PreserveSig = true, SetLastError = true)]
  432. public static extern int WaitForSingleObjectEx(IntPtr hEvent, int milliseconds, bool bAlertable);
  433. /// <summary>
  434. /// Enables Windows Store apps to access preexisting Component Object Model (COM) interfaces in the WASAPI family.
  435. /// </summary>
  436. /// <param name="deviceInterfacePath">A device interface ID for an audio device. This is normally retrieved from a DeviceInformation object or one of the methods of the MediaDevice class.</param>
  437. /// <param name="riid">The IID of a COM interface in the WASAPI family, such as IAudioClient.</param>
  438. /// <param name="activationParams">Interface-specific activation parameters. For more information, see the pActivationParams parameter in IMMDevice::Activate. </param>
  439. /// <param name="completionHandler"></param>
  440. /// <param name="activationOperation"></param>
  441. [DllImport("Mmdevapi.dll", ExactSpelling = true, PreserveSig = false)]
  442. public static extern void ActivateAudioInterfaceAsync(
  443. [In, MarshalAs(UnmanagedType.LPWStr)] string deviceInterfacePath,
  444. [In, MarshalAs(UnmanagedType.LPStruct)] Guid riid,
  445. [In] IntPtr activationParams, // n.b. is actually a pointer to a PropVariant, but we never need to pass anything but null
  446. [In] IActivateAudioInterfaceCompletionHandler completionHandler,
  447. out IActivateAudioInterfaceAsyncOperation activationOperation);
  448. }
  449. // trying some ideas from Lucian Wischik (ljw1004):
  450. // http://www.codeproject.com/Articles/460145/Recording-and-playing-PCM-audio-on-Windows-8-VB
  451. [Flags]
  452. internal enum EventAccess
  453. {
  454. STANDARD_RIGHTS_REQUIRED = 0xF0000,
  455. SYNCHRONIZE = 0x100000,
  456. EVENT_ALL_ACCESS = STANDARD_RIGHTS_REQUIRED | SYNCHRONIZE | 0x3
  457. }
  458. internal class ActivateAudioInterfaceCompletionHandler :
  459. IActivateAudioInterfaceCompletionHandler, IAgileObject
  460. {
  461. private Action<IAudioClient2> initializeAction;
  462. private TaskCompletionSource<IAudioClient2> tcs = new TaskCompletionSource<IAudioClient2>();
  463. public ActivateAudioInterfaceCompletionHandler(
  464. Action<IAudioClient2> initializeAction)
  465. {
  466. this.initializeAction = initializeAction;
  467. }
  468. public void ActivateCompleted(IActivateAudioInterfaceAsyncOperation activateOperation)
  469. {
  470. // First get the activation results, and see if anything bad happened then
  471. int hr = 0;
  472. object unk = null;
  473. activateOperation.GetActivateResult(out hr, out unk);
  474. if (hr != 0)
  475. {
  476. tcs.TrySetException(Marshal.GetExceptionForHR(hr, new IntPtr(-1)));
  477. return;
  478. }
  479. var pAudioClient = (IAudioClient2) unk;
  480. // Next try to call the client's (synchronous, blocking) initialization method.
  481. try
  482. {
  483. initializeAction(pAudioClient);
  484. tcs.SetResult(pAudioClient);
  485. }
  486. catch (Exception ex)
  487. {
  488. tcs.TrySetException(ex);
  489. }
  490. }
  491. public TaskAwaiter<IAudioClient2> GetAwaiter()
  492. {
  493. return tcs.Task.GetAwaiter();
  494. }
  495. }
  496. [ComImport, InterfaceType(ComInterfaceType.InterfaceIsIUnknown), Guid("41D949AB-9862-444A-80F6-C261334DA5EB")]
  497. interface IActivateAudioInterfaceCompletionHandler
  498. {
  499. //virtual HRESULT STDMETHODCALLTYPE ActivateCompleted(/*[in]*/ _In_
  500. // IActivateAudioInterfaceAsyncOperation *activateOperation) = 0;
  501. void ActivateCompleted(IActivateAudioInterfaceAsyncOperation activateOperation);
  502. }
  503. [ComImport, InterfaceType(ComInterfaceType.InterfaceIsIUnknown), Guid("72A22D78-CDE4-431D-B8CC-843A71199B6D")]
  504. interface IActivateAudioInterfaceAsyncOperation
  505. {
  506. //virtual HRESULT STDMETHODCALLTYPE GetActivateResult(/*[out]*/ _Out_
  507. // HRESULT *activateResult, /*[out]*/ _Outptr_result_maybenull_ IUnknown **activatedInterface) = 0;
  508. void GetActivateResult([Out] out int activateResult,
  509. [Out, MarshalAs(UnmanagedType.IUnknown)] out object activateInterface);
  510. }
  511. [ComImport, InterfaceType(ComInterfaceType.InterfaceIsIUnknown), Guid("726778CD-F60A-4eda-82DE-E47610CD78AA")]
  512. interface IAudioClient2
  513. {
  514. [PreserveSig]
  515. int Initialize(AudioClientShareMode shareMode,
  516. AudioClientStreamFlags streamFlags,
  517. long hnsBufferDuration, // REFERENCE_TIME
  518. long hnsPeriodicity, // REFERENCE_TIME
  519. [In] WaveFormat pFormat,
  520. [In] IntPtr audioSessionGuid);
  521. // ref Guid AudioSessionGuid
  522. /// <summary>
  523. /// The GetBufferSize method retrieves the size (maximum capacity) of the endpoint buffer.
  524. /// </summary>
  525. int GetBufferSize(out uint bufferSize);
  526. [return: MarshalAs(UnmanagedType.I8)]
  527. long GetStreamLatency();
  528. int GetCurrentPadding(out int currentPadding);
  529. [PreserveSig]
  530. int IsFormatSupported(
  531. AudioClientShareMode shareMode,
  532. [In] WaveFormat pFormat,
  533. [Out, MarshalAs(UnmanagedType.LPStruct)] out WaveFormatExtensible closestMatchFormat);
  534. int GetMixFormat(out IntPtr deviceFormatPointer);
  535. // REFERENCE_TIME is 64 bit int
  536. int GetDevicePeriod(out long defaultDevicePeriod, out long minimumDevicePeriod);
  537. int Start();
  538. int Stop();
  539. int Reset();
  540. int SetEventHandle(IntPtr eventHandle);
  541. /// <summary>
  542. /// The GetService method accesses additional services from the audio client object.
  543. /// </summary>
  544. /// <param name="interfaceId">The interface ID for the requested service.</param>
  545. /// <param name="interfacePointer">Pointer to a pointer variable into which the method writes the address of an instance of the requested interface. </param>
  546. [PreserveSig]
  547. int GetService([In, MarshalAs(UnmanagedType.LPStruct)] Guid interfaceId,
  548. [Out, MarshalAs(UnmanagedType.IUnknown)] out object interfacePointer);
  549. //virtual HRESULT STDMETHODCALLTYPE IsOffloadCapable(/*[in]*/ _In_
  550. // AUDIO_STREAM_CATEGORY Category, /*[in]*/ _Out_ BOOL *pbOffloadCapable) = 0;
  551. void IsOffloadCapable(int category, out bool pbOffloadCapable);
  552. //virtual HRESULT STDMETHODCALLTYPE SetClientProperties(/*[in]*/ _In_
  553. // const AudioClientProperties *pProperties) = 0;
  554. void SetClientProperties([In] IntPtr pProperties);
  555. // TODO: try this: void SetClientProperties([In, MarshalAs(UnmanagedType.LPStruct)] AudioClientProperties pProperties);
  556. //virtual HRESULT STDMETHODCALLTYPE GetBufferSizeLimits(/*[in]*/ _In_
  557. // const WAVEFORMATEX *pFormat, /*[in]*/ _In_ BOOL bEventDriven, /*[in]*/
  558. // _Out_ REFERENCE_TIME *phnsMinBufferDuration, /*[in]*/ _Out_
  559. // REFERENCE_TIME *phnsMaxBufferDuration) = 0;
  560. void GetBufferSizeLimits(IntPtr pFormat, bool bEventDriven,
  561. out long phnsMinBufferDuration, out long phnsMaxBufferDuration);
  562. }
  563. [ComImport, InterfaceType(ComInterfaceType.InterfaceIsIUnknown), Guid("94ea2b94-e9cc-49e0-c0ff-ee64ca8f5b90")]
  564. interface IAgileObject
  565. {
  566. }
  567. }