Finished work on Vanara.PInvoke.CoreAudio

pull/375/head
David Hall 2023-01-31 15:00:12 -08:00
parent 5a71e37846
commit 139ec44dd4
20 changed files with 19824 additions and 16699 deletions

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,83 @@
using System.Runtime.InteropServices;
namespace Vanara.PInvoke;
public static partial class CoreAudio
{
/// <summary>Specifies the activation type for an AUDIOCLIENT_ACTIVATION_PARAMS structure passed into a call to ActivateAudioInterfaceAsync.</summary>
// https://learn.microsoft.com/en-us/windows/win32/api/audioclientactivationparams/ne-audioclientactivationparams-audioclient_activation_type
// typedef enum AUDIOCLIENT_ACTIVATION_TYPE { AUDIOCLIENT_ACTIVATION_TYPE_DEFAULT, AUDIOCLIENT_ACTIVATION_TYPE_PROCESS_LOOPBACK } ;
[PInvokeData("audioclientactivationparams.h", MSDNShortId = "NE:audioclientactivationparams.AUDIOCLIENT_ACTIVATION_TYPE")]
public enum AUDIOCLIENT_ACTIVATION_TYPE
{
/// <summary>Default activation.</summary>
AUDIOCLIENT_ACTIVATION_TYPE_DEFAULT,
/// <summary>
/// <para>
/// Process loopback activation, allowing for the inclusion or exclusion of audio rendered by the specified process and its child
/// processes. For sample code that demonstrates the process loopback capture scenario, see the
/// </para>
/// <para>Application Loopback API Capture Sample</para>
/// <para>.</para>
/// </summary>
AUDIOCLIENT_ACTIVATION_TYPE_PROCESS_LOOPBACK,
}
/// <summary>Specifies the loopback mode for an AUDIOCLIENT_ACTIVATION_PARAMS structure passed into a call to ActivateAudioInterfaceAsync.</summary>
// https://learn.microsoft.com/en-us/windows/win32/api/audioclientactivationparams/ne-audioclientactivationparams-process_loopback_mode
// typedef enum PROCESS_LOOPBACK_MODE { PROCESS_LOOPBACK_MODE_INCLUDE_TARGET_PROCESS_TREE,
// PROCESS_LOOPBACK_MODE_EXCLUDE_TARGET_PROCESS_TREE } ;
[PInvokeData("audioclientactivationparams.h", MSDNShortId = "NE:audioclientactivationparams.PROCESS_LOOPBACK_MODE")]
public enum PROCESS_LOOPBACK_MODE
{
/// <summary>
/// Render streams from the specified process and its child processes are included in the activated process loopback stream.
/// </summary>
PROCESS_LOOPBACK_MODE_INCLUDE_TARGET_PROCESS_TREE,
/// <summary>
/// Render streams from the specified process and its child processes are excluded from the activated process loopback stream.
/// </summary>
PROCESS_LOOPBACK_MODE_EXCLUDE_TARGET_PROCESS_TREE,
}
/// <summary>Specifies the activation parameters for a call to ActivateAudioInterfaceAsync.</summary>
// https://learn.microsoft.com/en-us/windows/win32/api/audioclientactivationparams/ns-audioclientactivationparams-audioclient_activation_params
// typedef struct AUDIOCLIENT_ACTIVATION_PARAMS { AUDIOCLIENT_ACTIVATION_TYPE ActivationType; union { AUDIOCLIENT_PROCESS_LOOPBACK_PARAMS
// ProcessLoopbackParams; } DUMMYUNIONNAME; } AUDIOCLIENT_ACTIVATION_PARAMS;
[PInvokeData("audioclientactivationparams.h", MSDNShortId = "NS:audioclientactivationparams.AUDIOCLIENT_ACTIVATION_PARAMS")]
[StructLayout(LayoutKind.Sequential)]
public struct AUDIOCLIENT_ACTIVATION_PARAMS
{
/// <summary>
/// A member of the AUDIOCLIENT_ACTIVATION_TYPE specifying the type of audio interface activation. Currently default activation and
/// loopback activation are supported.
/// </summary>
public AUDIOCLIENT_ACTIVATION_TYPE ActivationType;
/// <summary>A AUDIOCLIENT_PROCESS_LOOPBACK_PARAMS specifying the loopback parameters for the audio interface activation.</summary>
private AUDIOCLIENT_PROCESS_LOOPBACK_PARAMS ProcessLoopbackParams;
}
/// <summary>Specifies parameters for a call to ActivateAudioInterfaceAsync where loopback activation is requested.</summary>
// https://learn.microsoft.com/en-us/windows/win32/api/audioclientactivationparams/ns-audioclientactivationparams-audioclient_process_loopback_params
// typedef struct AUDIOCLIENT_PROCESS_LOOPBACK_PARAMS { DWORD TargetProcessId; PROCESS_LOOPBACK_MODE ProcessLoopbackMode; } AUDIOCLIENT_PROCESS_LOOPBACK_PARAMS;
[PInvokeData("audioclientactivationparams.h", MSDNShortId = "NS:audioclientactivationparams.AUDIOCLIENT_PROCESS_LOOPBACK_PARAMS")]
[StructLayout(LayoutKind.Sequential)]
public struct AUDIOCLIENT_PROCESS_LOOPBACK_PARAMS
{
/// <summary>
/// The ID of the process for which the render streams, and the render streams of its child processes, will be included or excluded
/// when activating the process loopback stream.
/// </summary>
public uint TargetProcessId;
/// <summary>
/// A value from the PROCESS_LOOPBACK_MODE enumeration specifying whether the render streams for the process and child processes
/// specified in the TargetProcessId field should be included or excluded when activating the audio interface. For sample code that
/// demonstrates the process loopback capture scenario, see the Application Loopback API Capture Sample.
/// </summary>
public PROCESS_LOOPBACK_MODE ProcessLoopbackMode;
}
}

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,704 @@
#nullable enable
using System;
using System.Runtime.InteropServices;
using Vanara.InteropServices;
using static Vanara.PInvoke.Ole32;
using static Vanara.PInvoke.PropSys;
namespace Vanara.PInvoke;
public static partial class CoreAudio
{
/// <summary>Specifies the level of an APO event logged with IAudioProcessingObjectLoggingService::ApoLog.</summary>
// https://learn.microsoft.com/en-us/windows/win32/api/audioengineextensionapo/ne-audioengineextensionapo-apo_log_level typedef enum
// APO_LOG_LEVEL { APO_LOG_LEVEL_ALWAYS, APO_LOG_LEVEL_CRITICAL, APO_LOG_LEVEL_ERROR, APO_LOG_LEVEL_WARNING, APO_LOG_LEVEL_INFO,
// APO_LOG_LEVEL_VERBOSE } ;
[PInvokeData("audioengineextensionapo.h", MSDNShortId = "NE:audioengineextensionapo.APO_LOG_LEVEL")]
public enum APO_LOG_LEVEL
{
/// <summary>All events.</summary>
APO_LOG_LEVEL_ALWAYS = 0,
/// <summary>Abnormal exit or termination events.</summary>
APO_LOG_LEVEL_CRITICAL,
/// <summary>Severe error events.</summary>
APO_LOG_LEVEL_ERROR,
/// <summary>Warning events such as allocation failures.</summary>
APO_LOG_LEVEL_WARNING,
/// <summary>Non-error events such as entry or exit events.</summary>
APO_LOG_LEVEL_INFO,
/// <summary>Detailed trace events.</summary>
APO_LOG_LEVEL_VERBOSE,
}
/// <summary>Specifies the type of an APO_NOTIFICATION.</summary>
// https://learn.microsoft.com/en-us/windows/win32/api/audioengineextensionapo/ne-audioengineextensionapo-apo_notification_type typedef
// enum APO_NOTIFICATION_TYPE { APO_NOTIFICATION_TYPE_NONE, APO_NOTIFICATION_TYPE_ENDPOINT_VOLUME,
// APO_NOTIFICATION_TYPE_ENDPOINT_PROPERTY_CHANGE, APO_NOTIFICATION_TYPE_SYSTEM_EFFECTS_PROPERTY_CHANGE,
// APO_NOTIFICATION_TYPE_ENDPOINT_VOLUME2, APO_NOTIFICATION_TYPE_DEVICE_ORIENTATION, APO_NOTIFICATION_TYPE_MICROPHONE_BOOST } ;
[PInvokeData("audioengineextensionapo.h", MSDNShortId = "NE:audioengineextensionapo.APO_NOTIFICATION_TYPE")]
public enum APO_NOTIFICATION_TYPE
{
/// <summary>None.</summary>
APO_NOTIFICATION_TYPE_NONE = 0,
/// <summary>Endpoint volume notification.</summary>
APO_NOTIFICATION_TYPE_ENDPOINT_VOLUME = 1,
/// <summary>Endpoint property change notification.</summary>
APO_NOTIFICATION_TYPE_ENDPOINT_PROPERTY_CHANGE = 2,
/// <summary>System effects property change notification.</summary>
APO_NOTIFICATION_TYPE_SYSTEM_EFFECTS_PROPERTY_CHANGE = 3,
/// <summary>Endpoint volume notifications for an endpoint that includes master and channel volume in dB.</summary>
APO_NOTIFICATION_TYPE_ENDPOINT_VOLUME2 = 4,
/// <summary>Orientation notifications for the device.</summary>
APO_NOTIFICATION_TYPE_DEVICE_ORIENTATION = 5,
/// <summary>Microphone boost notifications</summary>
APO_NOTIFICATION_TYPE_MICROPHONE_BOOST = 6
}
/// <summary>Specifies the state of a System Effects Audio Processing Object (sAPO) audio effect.</summary>
// https://learn.microsoft.com/en-us/windows/win32/api/audioengineextensionapo/ne-audioengineextensionapo-audio_systemeffect_state
// typedef enum AUDIO_SYSTEMEFFECT_STATE { AUDIO_SYSTEMEFFECT_STATE_OFF, AUDIO_SYSTEMEFFECT_STATE_ON } ;
[PInvokeData("audioengineextensionapo.h", MSDNShortId = "NE:audioengineextensionapo.AUDIO_SYSTEMEFFECT_STATE")]
public enum AUDIO_SYSTEMEFFECT_STATE
{
/// <summary>The audio effect is off.</summary>
AUDIO_SYSTEMEFFECT_STATE_OFF,
/// <summary>The audio effect is on.</summary>
AUDIO_SYSTEMEFFECT_STATE_ON,
}
/// <summary>Undocumented</summary>
[PInvokeData("audioengineextensionapo.h")]
public enum DEVICE_ORIENTATION_TYPE
{
/// <summary>Undocumented</summary>
DEVICE_NOT_ROTATED,
/// <summary>Undocumented</summary>
DEVICE_ROTATED_90_DEGREES_CLOCKWISE,
/// <summary>Undocumented</summary>
DEVICE_ROTATED_180_DEGREES_CLOCKWISE,
/// <summary>Undocumented</summary>
DEVICE_ROTATED_270_DEGREES_CLOCKWISE
}
/// <summary>Represents a logging service for APOs.</summary>
/// <remarks>
/// <para>
/// Get an instance of this interface by QueryService on the object in the pServiceProvider field of the APOInitSystemEffects3 structure
/// passed in the pbyData parameter to IAudioProcessingObject::Initialize. Specify <c>SID_AudioProcessingObjectLoggingService</c> as the
/// identifier in the guidService parameter.
/// </para>
/// <para>
/// <para>Note</para>
/// <para>
/// IAudioProcessingObjectLoggingService::ApoLog should never be called from a real-time priority thread. For more information on thread
/// priorities, see Scheduling Priorities.
/// </para>
/// </para>
/// </remarks>
// https://learn.microsoft.com/en-us/windows/win32/api/audioengineextensionapo/nn-audioengineextensionapo-iaudioprocessingobjectloggingservice
[PInvokeData("audioengineextensionapo.h", MSDNShortId = "NN:audioengineextensionapo.IAudioProcessingObjectLoggingService")]
[ComImport, Guid("698f0107-1745-4708-95a5-d84478a62a65"), InterfaceType(ComInterfaceType.InterfaceIsIUnknown)]
public interface IAudioProcessingObjectLoggingService
{
/// <summary>Logs an APO event.</summary>
/// <param name="level">A value from the APO_LOG_LEVEL enumeration specifying the level of the event being logged.</param>
/// <param name="format">The format-control string for the log message.</param>
/// <param name="args">Format argument list.</param>
/// <returns>None</returns>
/// <remarks>
/// <para>Note</para>
/// <para>
/// This method should never be called from a real-time priority thread. For more information on thread priorities, see Scheduling Priorities.
/// </para>
/// </remarks>
// https://learn.microsoft.com/en-us/windows/win32/api/audioengineextensionapo/nf-audioengineextensionapo-iaudioprocessingobjectloggingservice-apolog
// void ApoLog( APO_LOG_LEVEL level, LPCWSTR format, ... );
[PreserveSig]
void ApoLog(APO_LOG_LEVEL level, string format, IntPtr args);
}
/// <summary>
/// Implemented by clients to register for and receive common audio-related notifications for APO endpoint and system effect notifications.
/// </summary>
// https://learn.microsoft.com/en-us/windows/win32/api/audioengineextensionapo/nn-audioengineextensionapo-iaudioprocessingobjectnotifications
[PInvokeData("audioengineextensionapo.h", MSDNShortId = "NN:audioengineextensionapo.IAudioProcessingObjectNotifications")]
[ComImport, Guid("56B0C76F-02FD-4B21-A52E-9F8219FC86E4"), InterfaceType(ComInterfaceType.InterfaceIsIUnknown)]
public interface IAudioProcessingObjectNotifications
{
/// <summary>
/// Called by the system to allow clients to register to receive notification callbacks for APO endpoint and system effect notifications.
/// </summary>
/// <param name="apoNotifications">
/// Output parameter that returns a pointer to an array of APO_NOTIFICATION_DESCRIPTOR specifying the set of APO changes for which
/// notifications are requested.
/// </param>
/// <param name="count">Output parameter specifying the number of items returned in apoNotifications.</param>
/// <returns>An HRESULT.</returns>
// https://learn.microsoft.com/en-us/windows/win32/api/audioengineextensionapo/nf-audioengineextensionapo-iaudioprocessingobjectnotifications-getaponotificationregistrationinfo
// HRESULT GetApoNotificationRegistrationInfo( APO_NOTIFICATION_DESCRIPTOR **apoNotifications, DWORD *count );
[PreserveSig]
HRESULT GetApoNotificationRegistrationInfo(
[Out, MarshalAs(UnmanagedType.LPArray, SizeParamIndex = 1)] out APO_NOTIFICATION_DESCRIPTOR[] apoNotifications, out uint count);
/// <summary>Called by the system to notify clients of changes to APO endpoints or system effects.</summary>
/// <param name="apoNotification">An APO_NOTIFICATION representing the APO change associated with the notification.</param>
/// <returns>None</returns>
/// <remarks>
/// <para>Specify the set of APO changes for which this method is called by implementing IAudioProcessingObjectNotifications::GetApoNotificationRegistrationInfo.</para>
/// <para>
/// This method will be called after LockForProcess is called and will stop being called before UnlockForProcess. If there are any
/// notifications in flight, they might get executed during or after <c>UnlockForProcess</c>. The APO must handle synchronization in
/// this case.
/// </para>
/// <para>
/// <para>Note</para>
/// <para>
/// APOs must query each property once to get its initial value because <c>HandleNotification</c> method is only invoked when any of
/// the properties have changed. The exceptions to this are the initial audio endpoint volume when the APO registers for
/// APO_NOTIFICATION_TYPE_ENDPOINT_VOLUME and the value of PKEY_AudioEndpoint_Disable_SysFx if the APO registers for APO_NOTIFICATION_TYPE_ENDPOINT_PROPERTY_CHANGE
/// </para>
/// </para>
/// </remarks>
// https://learn.microsoft.com/en-us/windows/win32/api/audioengineextensionapo/nf-audioengineextensionapo-iaudioprocessingobjectnotifications-handlenotification
// void HandleNotification( APO_NOTIFICATION *apoNotification );
[PreserveSig]
void HandleNotification(in APO_NOTIFICATION apoNotification);
}
/// <summary>Undocumented</summary>
/// <seealso cref="Vanara.PInvoke.CoreAudio.IAudioProcessingObjectNotifications"/>
[ComImport, Guid("ca2cfbde-a9d6-4eb0-bc95-c4d026b380f0"), InterfaceType(ComInterfaceType.InterfaceIsIUnknown)]
public interface IAudioProcessingObjectNotifications2 : IAudioProcessingObjectNotifications
{
/// <summary>
/// Called by the system to allow clients to register to receive notification callbacks for APO endpoint and system effect notifications.
/// </summary>
/// <param name="apoNotifications">
/// Output parameter that returns a pointer to an array of APO_NOTIFICATION_DESCRIPTOR specifying the set of APO changes for which
/// notifications are requested.
/// </param>
/// <param name="count">Output parameter specifying the number of items returned in apoNotifications.</param>
/// <returns>An HRESULT.</returns>
// https://learn.microsoft.com/en-us/windows/win32/api/audioengineextensionapo/nf-audioengineextensionapo-iaudioprocessingobjectnotifications-getaponotificationregistrationinfo
// HRESULT GetApoNotificationRegistrationInfo( APO_NOTIFICATION_DESCRIPTOR **apoNotifications, DWORD *count );
[PreserveSig]
new HRESULT GetApoNotificationRegistrationInfo(
[Out, MarshalAs(UnmanagedType.LPArray, SizeParamIndex = 1)] out APO_NOTIFICATION_DESCRIPTOR[] apoNotifications, out uint count);
/// <summary>Called by the system to notify clients of changes to APO endpoints or system effects.</summary>
/// <param name="apoNotification">An APO_NOTIFICATION representing the APO change associated with the notification.</param>
/// <returns>None</returns>
/// <remarks>
/// <para>Specify the set of APO changes for which this method is called by implementing IAudioProcessingObjectNotifications::GetApoNotificationRegistrationInfo.</para>
/// <para>
/// This method will be called after LockForProcess is called and will stop being called before UnlockForProcess. If there are any
/// notifications in flight, they might get executed during or after <c>UnlockForProcess</c>. The APO must handle synchronization in
/// this case.
/// </para>
/// <para>
/// <para>Note</para>
/// <para>
/// APOs must query each property once to get its initial value because <c>HandleNotification</c> method is only invoked when any of
/// the properties have changed. The exceptions to this are the initial audio endpoint volume when the APO registers for
/// APO_NOTIFICATION_TYPE_ENDPOINT_VOLUME and the value of PKEY_AudioEndpoint_Disable_SysFx if the APO registers for APO_NOTIFICATION_TYPE_ENDPOINT_PROPERTY_CHANGE
/// </para>
/// </para>
/// </remarks>
// https://learn.microsoft.com/en-us/windows/win32/api/audioengineextensionapo/nf-audioengineextensionapo-iaudioprocessingobjectnotifications-handlenotification
// void HandleNotification( APO_NOTIFICATION *apoNotification );
[PreserveSig]
new void HandleNotification(in APO_NOTIFICATION apoNotification);
/// <summary>Undocumented</summary>
[PreserveSig]
HRESULT GetApoNotificationRegistrationInfo2(APO_NOTIFICATION_TYPE maxApoNotificationTypeSupported,
[Out, MarshalAs(UnmanagedType.LPArray, SizeParamIndex = 2)] out APO_NOTIFICATION_DESCRIPTOR[] apoNotifications, out uint count);
}
/// <summary>Provides access to the real time work queue for APOs.</summary>
/// <remarks>
/// <para>
/// Get an instance of this interface by calling QueryService on the object in the pServiceProvider field of the APOInitSystemEffects3
/// structure passed in the pbyData parameter to IAudioProcessingObject::Initialize. Specify <c>SID_AudioProcessingObjectRTQueue</c> as
/// the identifier in the guidService parameter.
/// </para>
/// <para>For information on using the real-time work queue APIs, see rtworkq.h header.</para>
/// </remarks>
// https://learn.microsoft.com/en-us/windows/win32/api/audioengineextensionapo/nn-audioengineextensionapo-iaudioprocessingobjectrtqueueservice
[PInvokeData("audioengineextensionapo.h", MSDNShortId = "NN:audioengineextensionapo.IAudioProcessingObjectRTQueueService")]
[ComImport, Guid("ACD65E2F-955B-4B57-B9BF-AC297BB752C9"), InterfaceType(ComInterfaceType.InterfaceIsIUnknown)]
public interface IAudioProcessingObjectRTQueueService
{
/// <summary>Gets the ID of a work queue that the APO can use to schedule tasks that need to run at a real-time priority.</summary>
/// <returns>A DWORD containing the work queue ID.</returns>
/// <remarks>The returned work queue ID is used with the real-time work queue APIs. For more information see rtworkq.h header.</remarks>
// https://learn.microsoft.com/en-us/windows/win32/api/audioengineextensionapo/nf-audioengineextensionapo-iaudioprocessingobjectrtqueueservice-getrealtimeworkqueue
// HRESULT GetRealTimeWorkQueue( DWORD *workQueueId );
uint GetRealTimeWorkQueue();
}
/// <summary>
/// <para>
/// Implementing this interface also implies that the APO supports the APO Settings framework and allows the APO to subscribe for common
/// audio related notifications from the Audio Engine
/// </para>
/// <para>
/// This interface is also implemented by clients that require an APOInitSystemEffects3 structure to be passed into the
/// IAudioProcessingObject::Initialize method. <c>APOInitSystemEffects3</c> adds the ability to obtain a service provider such as
/// IAudioProcessingObjectLoggingService or IAudioProcessingObjectRTQueueService.
/// </para>
/// <para>
/// <para>Note</para>
/// <para>
/// On OS versions earlier than Windows Build 22000, the system will not pass an <c>APOInitSystemEffects3</c> into
/// <c>IAudioProcessingObject::Initialize</c> even if the client implements <c>IAudioSystemEffects3</c>, but will instead pass an older
/// version of the structure, APOInitSystemEffects2 or APOInitSystemEffects, into <c>Initialize</c>.
/// </para>
/// </para>
/// </summary>
// https://learn.microsoft.com/en-us/windows/win32/api/audioengineextensionapo/nn-audioengineextensionapo-iaudiosystemeffects3
[PInvokeData("audioengineextensionapo.h", MSDNShortId = "NN:audioengineextensionapo.IAudioSystemEffects3")]
[ComImport, Guid("C58B31CD-FC6A-4255-BC1F-AD29BB0A4A17"), InterfaceType(ComInterfaceType.InterfaceIsIUnknown)]
public interface IAudioSystemEffects3 : IAudioSystemEffects2
{
/// <summary>
/// The GetEffectsList method is used for retrieving the list of audio processing effects that are currently active, and stores an
/// event to be signaled if the list changes.
/// </summary>
/// <param name="ppEffectsIds">
/// Pointer to the list of GUIDs that represent audio processing effects. The caller is responsible for freeing this memory by
/// calling CoTaskMemFree.
/// </param>
/// <param name="pcEffects">A count of the audio processing effects in the list.</param>
/// <param name="Event">The HANDLE of the event that will be signaled if the list changes.</param>
/// <returns>
/// The <c>GetEffectsList</c> method returns S_OK, If the method call is successful. If there are no effects in the list, the
/// function still succeeds, <c>ppEffectsIds</c> returns a NULL pointer, and <c>pcEffects</c> returns a count of 0.
/// </returns>
/// <remarks>
/// <para>
/// The APO signals the specified event when the list of audio processing effects changes from the list that was returned by
/// <c>GetEffectsList</c>. The APO uses this event until either <c>GetEffectsList</c> is called again, or the APO is destroyed. The
/// passed handle can be NULL, in which case the APO stops using any previous handle and does not signal an event.
/// </para>
/// <para>
/// An APO implements this method to allow Windows to discover the current effects applied by the APO. The list of effects may depend
/// on the processing mode that the APO initialized, and on any end user configuration. The processing mode is indicated by the
/// <c>AudioProcessingMode</c> member of APOInitSystemEffects2.
/// </para>
/// <para>
/// APOs should identify effects using GUIDs defined by Windows, such as AUDIO_EFFECT_TYPE_ACOUSTIC_ECHO_CANCELLATION. An APO should
/// only define and return a custom GUID in rare cases where the type of effect is clearly different from the ones defined by Windows.
/// </para>
/// </remarks>
// https://learn.microsoft.com/en-us/windows/win32/api/audioenginebaseapo/nf-audioenginebaseapo-iaudiosystemeffects2-geteffectslist
// HRESULT GetEffectsList( [out] LPGUID *ppEffectsIds, [out] UINT *pcEffects, [in] HANDLE Event );
[PreserveSig]
new HRESULT GetEffectsList(out SafeCoTaskMemHandle ppEffectsIds, out uint pcEffects, [In] IntPtr Event);
/// <summary>
/// Implemented by System Effects Audio Processing Object (sAPO) audio effects to allow the caller to get the current list of effects.
/// </summary>
/// <param name="effects">
/// Receives a pointer to an array of AUDIO_SYSTEMEFFECT_STATE structures representing the current list of audio effects.
/// </param>
/// <param name="numEffects">Receives the number of <c>AUDIO_EFFECT</c> structures returned in effects.</param>
/// <param name="event">The HANDLE of the event that will be signaled if the list changes.</param>
/// <returns>An HRESULT.</returns>
// https://learn.microsoft.com/en-us/windows/win32/api/audioengineextensionapo/nf-audioengineextensionapo-iaudiosystemeffects3-getcontrollablesystemeffectslist
// HRESULT GetControllableSystemEffectsList( AUDIO_SYSTEMEFFECT **effects, UINT *numEffects, HANDLE event );
[PreserveSig]
HRESULT GetControllableSystemEffectsList([Out, MarshalAs(UnmanagedType.LPArray, SizeParamIndex = 1)] out AUDIO_SYSTEMEFFECT[]? effects,
out uint numEffects, [In, Optional] HEVENT @event);
/// <summary>
/// Implemented by System Effects Audio Processing Object (sAPO) audio effects to allow the caller to set the state of effects.
/// </summary>
/// <param name="effectId">The GUID identifier for an audio effect. Audio effect GUIDs are defined in ksmedia.h.</param>
/// <param name="state">A value from the AUDIO_SYSTEMEFFECT_STATE enumerating specifying the state to set.</param>
/// <returns>An HRESULT.</returns>
// https://learn.microsoft.com/en-us/windows/win32/api/audioengineextensionapo/nf-audioengineextensionapo-iaudiosystemeffects3-setaudiosystemeffectstate
// HRESULT SetAudioSystemEffectState( GUID effectId, AUDIO_SYSTEMEFFECT_STATE state );
[PreserveSig]
HRESULT SetAudioSystemEffectState(Guid effectId, AUDIO_SYSTEMEFFECT_STATE state);
}
/// <summary>Represents a notification for a change to an APO endpoint or system effects.</summary>
/// <remarks>
/// Register for the types of notifications you want to receive by implementing
/// IAudioProcessingObjectNotifications::GetApoNotificationRegistrationInfo. Receive the registered notifications by implementing IAudioProcessingObjectNotifications::HandleNotification.
/// </remarks>
// https://learn.microsoft.com/en-us/windows/win32/api/audioengineextensionapo/ns-audioengineextensionapo-apo_notification typedef struct
// APO_NOTIFICATION { APO_NOTIFICATION_TYPE type; union { AUDIO_ENDPOINT_VOLUME_CHANGE_NOTIFICATION audioEndpointVolumeChange;
// AUDIO_ENDPOINT_PROPERTY_CHANGE_NOTIFICATION audioEndpointPropertyChange; AUDIO_SYSTEMEFFECTS_PROPERTY_CHANGE_NOTIFICATION
// audioSystemEffectsPropertyChange; AUDIO_ENDPOINT_VOLUME_CHANGE_NOTIFICATION2 audioEndpointVolumeChange2; DEVICE_ORIENTATION_TYPE
// deviceOrientation; AUDIO_MICROPHONE_BOOST_NOTIFICATION audioMicrophoneBoostChange; } DUMMYUNIONNAME; } APO_NOTIFICATION;
[PInvokeData("audioengineextensionapo.h", MSDNShortId = "NS:audioengineextensionapo.APO_NOTIFICATION")]
[StructLayout(LayoutKind.Explicit)]
public struct APO_NOTIFICATION
{
/// <summary>A value from the APO_NOTIFICATION_TYPE enumeration specifying the type of change the notification represents.</summary>
[FieldOffset(0)]
public APO_NOTIFICATION_TYPE type;
/// <summary>An AUDIO_ENDPOINT_VOLUME_CHANGE_NOTIFICATION representing a notification of a change to APO endpoint volume.</summary>
[FieldOffset(8)]
public AUDIO_ENDPOINT_VOLUME_CHANGE_NOTIFICATION audioEndpointVolumeChange;
/// <summary>An AUDIO_ENDPOINT_PROPERTY_CHANGE_NOTIFICATION representing a notification of a change to an APO endpoint property.</summary>
[FieldOffset(8)]
public AUDIO_ENDPOINT_PROPERTY_CHANGE_NOTIFICATION audioEndpointPropertyChange;
/// <summary>
/// An AUDIO_SYSTEMEFFECTS_PROPERTY_CHANGE_NOTIFICATION representing a notification of a change to an APO system effect property.
/// </summary>
[FieldOffset(8)]
public AUDIO_SYSTEMEFFECTS_PROPERTY_CHANGE_NOTIFICATION audioSystemEffectsPropertyChange;
/// <summary>Used when type is APO_NOTIFICATION_TYPE_ENDPOINT_VOLUME2.</summary>
[FieldOffset(8)]
public AUDIO_ENDPOINT_VOLUME_CHANGE_NOTIFICATION2 audioEndpointVolumeChange2;
/// <summary>Used when type is APO_NOTIFICATION_TYPE_DEVICE_ORIENTATION.</summary>
[FieldOffset(8)]
public DEVICE_ORIENTATION_TYPE deviceOrientation;
/// <summary>Used when type is APO_NOTIFICATION_TYPE_MICROPHONE_BOOST.</summary>
[FieldOffset(8)]
public AUDIO_MICROPHONE_BOOST_NOTIFICATION audioMicrophoneBoostChange;
}
/// <summary>Specifies a requested APO notification.</summary>
/// <remarks>
/// Return this structure from an implementation ofIAudioProcessingObjectNotifications::GetApoNotificationRegistrationInfo to specify a
/// requested APO notifications.
/// </remarks>
// https://learn.microsoft.com/en-us/windows/win32/api/audioengineextensionapo/ns-audioengineextensionapo-apo_notification_descriptor
// typedef struct APO_NOTIFICATION_DESCRIPTOR { APO_NOTIFICATION_TYPE type; union { AUDIO_ENDPOINT_VOLUME_APO_NOTIFICATION_DESCRIPTOR
// audioEndpointVolume; AUDIO_ENDPOINT_PROPERTY_CHANGE_APO_NOTIFICATION_DESCRIPTOR audioEndpointPropertyChange;
// AUDIO_SYSTEMEFFECTS_PROPERTY_CHANGE_APO_NOTIFICATION_DESCRIPTOR audioSystemEffectsPropertyChange;
// AUDIO_MICROPHONE_BOOST_APO_NOTIFICATION_DESCRIPTOR audioMicrophoneBoost; } DUMMYUNIONNAME; } APO_NOTIFICATION_DESCRIPTOR;
[PInvokeData("audioengineextensionapo.h", MSDNShortId = "NS:audioengineextensionapo.APO_NOTIFICATION_DESCRIPTOR")]
[StructLayout(LayoutKind.Explicit)]
public struct APO_NOTIFICATION_DESCRIPTOR
{
/// <summary>A value from the APO_NOTIFICATION_TYPE enumeration</summary>
[FieldOffset(0)]
public APO_NOTIFICATION_TYPE type;
/// <summary>An AUDIO_ENDPOINT_VOLUME_APO_NOTIFICATION_DESCRIPTOR specifying an endpoint volume change APO notification.</summary>
[FieldOffset(8)]
public AUDIO_ENDPOINT_VOLUME_APO_NOTIFICATION_DESCRIPTOR audioEndpointVolume;
/// <summary>An AUDIO_ENDPOINT_PROPERTY_CHANGE_APO_NOTIFICATION_DESCRIPTOR specifying an endpoint property change APO notification.</summary>
[FieldOffset(8)]
public AUDIO_ENDPOINT_PROPERTY_CHANGE_APO_NOTIFICATION_DESCRIPTOR audioEndpointPropertyChange;
/// <summary>
/// An AUDIO_SYSTEMEFFECTS_PROPERTY_CHANGE_APO_NOTIFICATION_DESCRIPTOR specifying a system effects property change APO notification.
/// </summary>
[FieldOffset(8)]
public AUDIO_SYSTEMEFFECTS_PROPERTY_CHANGE_APO_NOTIFICATION_DESCRIPTOR audioSystemEffectsPropertyChange;
/// <summary>Used for microphone boost notifications.</summary>
[FieldOffset(8)]
public AUDIO_MICROPHONE_BOOST_APO_NOTIFICATION_DESCRIPTOR audioMicrophoneBoost;
}
/// <summary>
/// Provides audio processing object (APO) initialization parameters, extending APOInitSystemEffects2 to add the ability to specify a
/// service provider for logging.
/// </summary>
// https://learn.microsoft.com/en-us/windows/win32/api/audioengineextensionapo/ns-audioengineextensionapo-apoinitsystemeffects3 typedef
// struct APOInitSystemEffects3 { APOInitBaseStruct APOInit; IPropertyStore *pAPOEndpointProperties; IServiceProvider *pServiceProvider;
// IMMDeviceCollection *pDeviceCollection; UINT nSoftwareIoDeviceInCollection; UINT nSoftwareIoConnectorIndex; GUID AudioProcessingMode;
// BOOL InitializeForDiscoveryOnly; } APOInitSystemEffects3;
[PInvokeData("audioengineextensionapo.h", MSDNShortId = "NS:audioengineextensionapo.APOInitSystemEffects3")]
[StructLayout(LayoutKind.Sequential)]
public struct APOInitSystemEffects3
{
/// <summary>An APOInitBaseStruct structure.</summary>
public APOInitBaseStruct APOInit;
/// <summary>A pointer to an ../propsys/nn-propsys-ipropertystore object.</summary>
[MarshalAs(UnmanagedType.Interface)]
public IPropertyStore pAPOEndpointProperties;
/// <summary>An IServiceProvider interface.</summary>
public IServiceProvider pServiceProvider;
/// <summary>
/// A pointer to an IMMDeviceCollection object. The last item in the pDeviceCollection is always the IMMDevice representing the audio endpoint.
/// </summary>
[MarshalAs(UnmanagedType.Interface)]
public IMMDeviceCollection pDeviceCollection;
/// <summary>
/// Specifies the <c>MMDevice</c> that implements the DeviceTopology that includes the software connector for which the APO is
/// initializing. The <c>MMDevice</c> is contained in pDeviceCollection.
/// </summary>
public uint nSoftwareIoDeviceInCollection;
/// <summary>Specifies the index of a <c>Software_IO</c> connector in the DeviceTopology.</summary>
public uint nSoftwareIoConnectorIndex;
/// <summary>Specifies the processing mode for the audio graph.</summary>
public Guid AudioProcessingMode;
/// <summary>Indicates whether the audio system is initializing the APO for effects discovery only.</summary>
[MarshalAs(UnmanagedType.Bool)]
public bool InitializeForDiscoveryOnly;
}
/// <summary>Specifies an endpoint property change APO notification.</summary>
/// <remarks>
/// Return an APO_NOTIFICATION_DESCRIPTOR containing this structure from an implementation
/// ofIAudioProcessingObjectNotifications::GetApoNotificationRegistrationInfo to request endpoint property change APO notifications.
/// </remarks>
// https://learn.microsoft.com/en-us/windows/win32/api/audioengineextensionapo/ns-audioengineextensionapo-audio_endpoint_property_change_apo_notification_descriptor
// typedef struct AUDIO_ENDPOINT_PROPERTY_CHANGE_APO_NOTIFICATION_DESCRIPTOR { IMMDevice *device; } AUDIO_ENDPOINT_PROPERTY_CHANGE_APO_NOTIFICATION_DESCRIPTOR;
[PInvokeData("audioengineextensionapo.h", MSDNShortId = "NS:audioengineextensionapo.AUDIO_ENDPOINT_PROPERTY_CHANGE_APO_NOTIFICATION_DESCRIPTOR")]
[StructLayout(LayoutKind.Sequential)]
public struct AUDIO_ENDPOINT_PROPERTY_CHANGE_APO_NOTIFICATION_DESCRIPTOR
{
/// <summary>An IMMDevice representing the audio endpoint associated with the notification.</summary>
[MarshalAs(UnmanagedType.Interface)]
public IMMDevice device;
}
/// <summary>Represents a property change APO notification.</summary>
// https://learn.microsoft.com/en-us/windows/win32/api/audioengineextensionapo/ns-audioengineextensionapo-audio_endpoint_property_change_notification
// typedef struct AUDIO_ENDPOINT_PROPERTY_CHANGE_NOTIFICATION { IMMDevice *endpoint; IPropertyStore *propertyStore; PROPERTYKEY
// propertyKey; } AUDIO_ENDPOINT_PROPERTY_CHANGE_NOTIFICATION;
[PInvokeData("audioengineextensionapo.h", MSDNShortId = "NS:audioengineextensionapo.AUDIO_ENDPOINT_PROPERTY_CHANGE_NOTIFICATION")]
[StructLayout(LayoutKind.Sequential)]
public struct AUDIO_ENDPOINT_PROPERTY_CHANGE_NOTIFICATION
{
/// <summary>An IMMDevice representing the audio endpoint associated with the notification.</summary>
[MarshalAs(UnmanagedType.Interface)]
public IMMDevice endpoint;
/// <summary>An IPropertyStore representing the property store associated with the notification.</summary>
[MarshalAs(UnmanagedType.Interface)]
public IPropertyStore propertyStore;
/// <summary>A PROPERTYKEY structure identifying the property associated with the notification.</summary>
public PROPERTYKEY propertyKey;
}
/// <summary>Specifies an endpoint volume APO notification.</summary>
/// <remarks>
/// Return an APO_NOTIFICATION_DESCRIPTOR containing this structure from an implementation
/// ofIAudioProcessingObjectNotifications::GetApoNotificationRegistrationInfo to request endpoint volume change APO notifications.
/// </remarks>
// https://learn.microsoft.com/en-us/windows/win32/api/audioengineextensionapo/ns-audioengineextensionapo-audio_endpoint_volume_apo_notification_descriptor
// typedef struct AUDIO_ENDPOINT_VOLUME_APO_NOTIFICATION_DESCRIPTOR { IMMDevice *device; } AUDIO_ENDPOINT_VOLUME_APO_NOTIFICATION_DESCRIPTOR;
[PInvokeData("audioengineextensionapo.h", MSDNShortId = "NS:audioengineextensionapo.AUDIO_ENDPOINT_VOLUME_APO_NOTIFICATION_DESCRIPTOR")]
[StructLayout(LayoutKind.Sequential)]
public struct AUDIO_ENDPOINT_VOLUME_APO_NOTIFICATION_DESCRIPTOR
{
/// <summary>The IMMDevice representing the audio endpoint associated with the notification request.</summary>
[MarshalAs(UnmanagedType.Interface)]
public IMMDevice device;
}
/// <summary>Represents an audio endpoint volume change APO notification.</summary>
// https://learn.microsoft.com/en-us/windows/win32/api/audioengineextensionapo/ns-audioengineextensionapo-audio_endpoint_volume_change_notification
// typedef struct AUDIO_ENDPOINT_VOLUME_CHANGE_NOTIFICATION { IMMDevice *endpoint; PAUDIO_VOLUME_NOTIFICATION_DATA volume; } AUDIO_ENDPOINT_VOLUME_CHANGE_NOTIFICATION;
[PInvokeData("audioengineextensionapo.h", MSDNShortId = "NS:audioengineextensionapo.AUDIO_ENDPOINT_VOLUME_CHANGE_NOTIFICATION")]
[StructLayout(LayoutKind.Sequential)]
public struct AUDIO_ENDPOINT_VOLUME_CHANGE_NOTIFICATION
{
/// <summary>An IMMDevice representing the audio endpoint associated with the notification.</summary>
[MarshalAs(UnmanagedType.Interface)]
public IMMDevice endpoint;
/// <summary>A pointer to a AUDIO_VOLUME_NOTIFICATION_DATA representing the new endpoint volume.</summary>
public IntPtr /*AUDIO_VOLUME_NOTIFICATION_DATA*/ volume;
}
/// <summary>Undocumented</summary>
[PInvokeData("audioengineextensionapo.h")]
[StructLayout(LayoutKind.Sequential)]
public struct AUDIO_ENDPOINT_VOLUME_CHANGE_NOTIFICATION2
{
/// <summary>Undocumented</summary>
[MarshalAs(UnmanagedType.Interface)]
public IMMDevice endpoint;
/// <summary>Undocumented</summary>
public IntPtr /*PAUDIO_VOLUME_NOTIFICATION_DATA2*/ volume;
}
/// <summary>Used to request microphone boost notifications.</summary>
[StructLayout(LayoutKind.Sequential)]
public struct AUDIO_MICROPHONE_BOOST_APO_NOTIFICATION_DESCRIPTOR
{
/// <summary>Undocumented</summary>
[MarshalAs(UnmanagedType.Interface)]
public IMMDevice device;
}
/// <summary>Undocumented</summary>
[PInvokeData("audioengineextensionapo.h")]
[StructLayout(LayoutKind.Sequential)]
public struct AUDIO_MICROPHONE_BOOST_NOTIFICATION
{
/// <summary>Device associated with mic boost notification.</summary>
[MarshalAs(UnmanagedType.Interface)]
public IMMDevice endpoint;
/// <summary>
/// Context associated with the originator of the event. A client can use this method to keep track of control changes made by other
/// processes and by the hardware. The functions IAudioVolumeLevel::SetLevel and IAudioMute::SetMute use the context. When this
/// notification is recieved, a client can inspect the context Guid to discover whether it or another client is the source of the notification.
/// </summary>
public Guid eventContext;
/// <summary>Indicates the presence of a "Microphone Boost" part (connector or subunit) of an audio capture device topology.</summary>
[MarshalAs(UnmanagedType.Bool)]
public bool microphoneBoostEnabled;
/// <summary>The volume level in decibels.</summary>
public float levelInDb;
/// <summary>The minimum volume level in decibels.</summary>
public float levelMinInDb;
/// <summary>The maximum volume level in decibels.</summary>
public float levelMaxInDb;
/// <summary>The stepping value between consecutive volume levels in the range levelMinInDb to levelMaxInDb</summary>
public float levelStepInDb;
/// <summary>Indicates if the IAudioMute interface is supported by the "Microphone Boost" part of the audio capture device topology.</summary>
[MarshalAs(UnmanagedType.Bool)]
public bool muteSupported;
/// <summary>The current state (enabled or disabled) of the mute control</summary>
[MarshalAs(UnmanagedType.Bool)]
public bool mute;
}
/// <summary>Represents a System Effects Audio Processing Object (sAPO) audio effect.</summary>
// https://learn.microsoft.com/en-us/windows/win32/api/audioengineextensionapo/ns-audioengineextensionapo-audio_systemeffect typedef
// struct AUDIO_SYSTEMEFFECT { GUID id; BOOL canSetState; AUDIO_SYSTEMEFFECT_STATE state; } AUDIO_SYSTEMEFFECT;
[PInvokeData("audioengineextensionapo.h", MSDNShortId = "NS:audioengineextensionapo.AUDIO_SYSTEMEFFECT")]
[StructLayout(LayoutKind.Sequential)]
public struct AUDIO_SYSTEMEFFECT
{
/// <summary>The GUID identifier for an audio effect. Audio effect GUIDs are defined in ksmedia.h.</summary>
public Guid id;
/// <summary>A boolean value specifying whether the effect state can be modified.</summary>
[MarshalAs(UnmanagedType.Bool)]
public bool canSetState;
/// <summary>A member of the AUDIO_SYSTEMEFFECT_STATE enumeration specifying the state of the audio effect.</summary>
public AUDIO_SYSTEMEFFECT_STATE state;
}
/// <summary>Used to request audio system effects property change notifications on a specific endpoint and property context.</summary>
[StructLayout(LayoutKind.Sequential)]
public struct AUDIO_SYSTEMEFFECTS_PROPERTY_CHANGE_APO_NOTIFICATION_DESCRIPTOR
{
/// <summary>Undocumented</summary>
[MarshalAs(UnmanagedType.Interface)]
public IMMDevice device;
/// <summary>Undocumented</summary>
public Guid propertyStoreContext;
}
/// <summary>Represents a system audio effect APO notification.</summary>
// https://learn.microsoft.com/en-us/windows/win32/api/audioengineextensionapo/ns-audioengineextensionapo-audio_systemeffects_property_change_notification
// typedef struct AUDIO_SYSTEMEFFECTS_PROPERTY_CHANGE_NOTIFICATION { IMMDevice *endpoint; GUID propertyStoreContext;
// AUDIO_SYSTEMEFFECTS_PROPERTYSTORE_TYPE propertyStoreType; IPropertyStore *propertyStore; PROPERTYKEY propertyKey; } AUDIO_SYSTEMEFFECTS_PROPERTY_CHANGE_NOTIFICATION;
[PInvokeData("audioengineextensionapo.h", MSDNShortId = "NS:audioengineextensionapo.AUDIO_SYSTEMEFFECTS_PROPERTY_CHANGE_NOTIFICATION")]
[StructLayout(LayoutKind.Sequential)]
public struct AUDIO_SYSTEMEFFECTS_PROPERTY_CHANGE_NOTIFICATION
{
/// <summary>An IMMDevice representing the audio endpoint associated with the notification.</summary>
[MarshalAs(UnmanagedType.Interface)]
public IMMDevice endpoint;
/// <summary>A GUID identifying the APO property store associated with the notification.</summary>
public Guid propertyStoreContext;
/// <summary>
/// A value from the AUDIO_SYSTEMEFFECTS_PROPERTYSTORE_TYPE enumeration specifying the type of the property store associated with the notification.
/// </summary>
public AUDIO_SYSTEMEFFECTS_PROPERTYSTORE_TYPE propertyStoreType;
/// <summary>An IPropertyStore representing the property store associated with the notification.</summary>
[MarshalAs(UnmanagedType.Interface)]
public IPropertyStore propertyStore;
/// <summary>A PROPERTYKEY structure identifying the property associated with the notification.</summary>
public PROPERTYKEY propertyKey;
}
/// <summary>Undocumented</summary>
[PInvokeData("audioengineextensionapo.h")]
[StructLayout(LayoutKind.Sequential)]
public struct AUDIO_VOLUME_NOTIFICATION_DATA2
{
/// <summary>Undocumented</summary>
public IntPtr /*PAUDIO_VOLUME_NOTIFICATION_DATA*/ notificationData;
/// <summary>Specifies the current master volume level of the audio stream in dB.</summary>
public float masterVolumeInDb;
/// <summary>
/// The minimum volume level of the endpoint in decibels. This value remains constant for the lifetime of audio device specified in AUDIO_ENDPOINT_VOLUME_APO_NOTIFICATION_DESCRIPTOR.
/// </summary>
public float volumeMinInDb;
/// <summary>
/// The maximum volume level of the endpoint in decibels. This value remains constant for the lifetime of the audio device specified
/// in AUDIO_ENDPOINT_VOLUME_APO_NOTIFICATION_DESCRIPTOR.
/// </summary>
public float volumeMaxInDb;
/// <summary>
/// The volume increment in decibels. This increment remains constant for the lifetime the audio device specified in AUDIO_ENDPOINT_VOLUME_APO_NOTIFICATION_DESCRIPTOR.
/// </summary>
public float volumeIncrementInDb;
/// <summary>
/// Current step in the volume range. Is a value in the range from 0 to stepCount-1, where 0 represents the minimum volume level and
/// stepCount1 represents the maximum level. Audio applications can call the IAudioEndpointVolume::VolumeStepUp and
/// IAudioEndpointVolume::VolumeStepDown methods to increase or decrease the volume level by one interval.
/// </summary>
public uint step;
/// <summary>
/// The number of steps in the volume range. This number remains constant for the lifetime of the audio device specified in AUDIO_ENDPOINT_VOLUME_APO_NOTIFICATION_DESCRIPTOR.
/// </summary>
public uint stepCount;
/// <summary>
/// The first element in an array of channel volumes in dB. This element contains the current volume level of channel 0 in the audio
/// stream. If the audio stream contains more than one channel, the volume levels for the additional channels immediately follow the
/// AUDIO_VOLUME_NOTIFICATION_DATA2 structure.
/// </summary>
[MarshalAs(UnmanagedType.ByValArray, SizeConst = 1)]
public float[] channelVolumesInDb;
}
}

View File

@ -0,0 +1,177 @@
using System;
using System.Runtime.InteropServices;
using static Vanara.PInvoke.WinMm;
namespace Vanara.PInvoke;
public static partial class CoreAudio
{
/// <summary>The function uses the format specified by the caller to create a media type object that describes the audio format.</summary>
/// <param name="pAudioFormat">Specifies a pointer to a WAVEFORMATEX structure.</param>
/// <param name="cbAudioFormatSize">Specifies the size of the WAVEFORMATEX structure pointed to by the <c>pAudioFormat</c> parameter.</param>
/// <param name="ppIAudioMediaType">Specifies a pointer to an IAudioMediaType interface.</param>
/// <returns>
/// The function returns S_OK if the call to the function is successful. Otherwise, it returns an appropriate HRESULT error code.
/// </returns>
/// <remarks>
/// When you implement custom audio system effects, the function works with IAudioSystemEffectsCustomFormats::GetFormat to represent a
/// custom audio data format and to provide information about the custom format.
/// </remarks>
// https://learn.microsoft.com/en-us/windows/win32/api/audiomediatype/nf-audiomediatype-createaudiomediatype HRESULT
// CreateAudioMediaType( const WAVEFORMATEX *pAudioFormat, UINT32 cbAudioFormatSize, IAudioMediaType **ppIAudioMediaType );
[PInvokeData("audiomediatype.h", MSDNShortId = "NF:audiomediatype.CreateAudioMediaType")]
public delegate HRESULT CreateAudioMediaType(in WAVEFORMATEX pAudioFormat, uint cbAudioFormatSize,
out IAudioMediaType ppIAudioMediaType);
/// <summary>
/// The function uses the information provided in the UNCOMPRESSEDAUDIOFORMAT structure to create a media type object that describes the
/// audio format.
/// </summary>
/// <param name="pUncompressedAudioFormat">Specifies a pointer to an UNCOMPRESSEDAUDIOFORMAT structure.</param>
/// <param name="ppIAudioMediaType">Specifies a pointer to an IAudioMediaType interface.</param>
/// <returns>
/// The function returns S_OK if the call to the function is successful. Otherwise, it returns an appropriate HRESULT error code.
/// </returns>
/// <remarks>
/// When you implement custom audio system effects, the function works with IAudioSystemEffectsCustomFormats::GetFormat to represent a
/// custom audio data format and to provide information about the custom format.
/// </remarks>
// https://learn.microsoft.com/en-us/windows/win32/api/audiomediatype/nf-audiomediatype-createaudiomediatypefromuncompressedaudioformat
// HRESULT CreateAudioMediaTypeFromUncompressedAudioFormat( const UNCOMPRESSEDAUDIOFORMAT *pUncompressedAudioFormat, IAudioMediaType
// **ppIAudioMediaType );
[PInvokeData("audiomediatype.h", MSDNShortId = "NF:audiomediatype.CreateAudioMediaTypeFromUncompressedAudioFormat")]
public delegate HRESULT CreateAudioMediaTypeFromUncompressedAudioFormat(in UNCOMPRESSEDAUDIOFORMAT pUncompressedAudioFormat,
out IAudioMediaType ppIAudioMediaType);
/// <summary>These flags indicate the degree of similarity between the two media types.</summary>
[PInvokeData("audiomediatype.h", MSDNShortId = "NN:audiomediatype.IAudioMediaType")]
[Flags]
public enum UDIOMEDIATYPE_EQUAL
{
/// <summary>The audio format types are the same.</summary>
AUDIOMEDIATYPE_EQUAL_FORMAT_TYPES = 0x00000002,
/// <summary>The format information matches, not including extra data beyond the base WAVEFORMATEX structure.</summary>
AUDIOMEDIATYPE_EQUAL_FORMAT_DATA = 0x00000004,
/// <summary>The extra data is identical, or neither media type contains extra data.</summary>
AUDIOMEDIATYPE_EQUAL_FORMAT_USER_DATA = 0x00000008,
}
/// <summary>
/// <para>
/// The interface exposes methods that allow an sAPO to get information that is used to negotiate with the audio engine for the
/// appropriate audio data format. An sAPO also returns this interface in response to a call to IAudioSystemEffectsCustomFormats::GetFormat.
/// </para>
/// <para>inherits from <c>IUnknown</c> and also supports the following methods:</para>
/// <list/>
/// </summary>
// https://learn.microsoft.com/en-us/windows/win32/api/audiomediatype/nn-audiomediatype-iaudiomediatype
[PInvokeData("audiomediatype.h", MSDNShortId = "NN:audiomediatype.IAudioMediaType")]
[ComImport, Guid("4E997F73-B71F-4798-873B-ED7DFCF15B4D"), InterfaceType(ComInterfaceType.InterfaceIsIUnknown)]
public interface IAudioMediaType
{
/// <summary>The method determines whether the audio data format is a compressed format.</summary>
/// <returns>Receives a Boolean value. The value is <c>TRUE</c> if the format is compressed or <c>FALSE</c> if the format is uncompressed.</returns>
/// <remarks>None.</remarks>
// https://learn.microsoft.com/en-us/windows/win32/api/audiomediatype/nf-audiomediatype-iaudiomediatype-iscompressedformat HRESULT
// IsCompressedFormat( [out] BOOL *pfCompressed );
[return: MarshalAs(UnmanagedType.Bool)]
bool IsCompressedFormat();
/// <summary>The method compares two media types and determines whether they are identical.</summary>
/// <param name="pIAudioType">Specifies a pointer to an IAudioMediaType interface of the media type to compare.</param>
/// <returns>
/// <para>
/// Specifies a pointer to a DWORD variable that contains the bitwise OR result of zero or more flags. These flags indicate the
/// degree of similarity between the two media types. The following table shows the supported flags.
/// </para>
/// <list type="table">
/// <listheader>
/// <term>Flag</term>
/// <term>Meaning</term>
/// </listheader>
/// <item>
/// <term>AUDIOMEDIATYPE_EQUAL_FORMAT_TYPES</term>
/// <term>The audio format types are the same.</term>
/// </item>
/// <item>
/// <term>AUDIOMEDIATYPE_EQUAL_FORMAT_DATA</term>
/// <term>The format information matches, not including extra data beyond the base WAVEFORMATEX structure.</term>
/// </item>
/// <item>
/// <term>AUDIOMEDIATYPE_EQUAL_FORMAT_USER_DATA</term>
/// <term>The extra data is identical, or neither media type contains extra data.</term>
/// </item>
/// </list>
/// </returns>
/// <remarks>
/// <para>
/// Both media types must have a major type, otherwise the method returns E_INVALIDARG. For more information about media types, see
/// Media Types.
/// </para>
/// <para>
/// The MF_MEDIATYPE_EQUAL_FORMAT_DATA flag indicates that both media types have compatible attributes, although one might be a
/// superset of the other. This method of comparison means that you can compare a partially-specified media type against a complete
/// media type. For example, you might have two video types that describe the same format, but one type includes attributes for
/// extended color information (chroma siting, nominal range, and so forth).
/// </para>
/// <para>
/// If the method succeeds and all the comparison flags are set in <c>pdwFlags</c>, the return value is S_OK. If the method succeeds
/// but some comparison flags are not set, the method returns S_FALSE.
/// </para>
/// </remarks>
// https://learn.microsoft.com/en-us/windows/win32/api/audiomediatype/nf-audiomediatype-iaudiomediatype-isequal HRESULT IsEqual( [in]
// IAudioMediaType *pIAudioType, [out] DWORD *pdwFlags );
UDIOMEDIATYPE_EQUAL IsEqual([In] IAudioMediaType pIAudioType);
/// <summary>The method returns the WAVEFORMATEX structure for the audio data format.</summary>
/// <returns>The method returns a pointer to a WAVEFORMATEX structure.</returns>
/// <remarks>The pointer that is returned is valid only while the <c>IAudioMediaType</c> interface is referenced.</remarks>
// https://learn.microsoft.com/en-us/windows/win32/api/audiomediatype/nf-audiomediatype-iaudiomediatype-getaudioformat const
// WAVEFORMATEX * GetAudioFormat();
[PreserveSig]
IntPtr GetAudioFormat();
/// <summary>The returns information about the audio data format.</summary>
/// <returns>Specifies a pointer to an UNCOMPRESSEDAUDIOFORMAT structure.</returns>
/// <remarks>
/// The information that is returned is useful for uncompressed formats. However, this method call will succeed for compressed
/// formats as well. When you make this function call for a compressed audio data format, you must determine whether the returned
/// information is applicable to your compressed format.
/// </remarks>
// https://learn.microsoft.com/en-us/windows/win32/api/audiomediatype/nf-audiomediatype-iaudiomediatype-getuncompressedaudioformat
// HRESULT GetUncompressedAudioFormat( [out] UNCOMPRESSEDAUDIOFORMAT *pUncompressedAudioFormat );
UNCOMPRESSEDAUDIOFORMAT GetUncompressedAudioFormat();
}
/// <summary>
/// The UNCOMPRESSEDAUDIOFORMAT structure specifies the frame rate, channel mask, and other attributes of the uncompressed audio data format.
/// </summary>
/// <remarks>This structure provides access to the parameters that describe an uncompressed PCM audio format.</remarks>
// https://learn.microsoft.com/en-us/windows/win32/api/audiomediatype/ns-audiomediatype-uncompressedaudioformat typedef struct
// _UNCOMPRESSEDAUDIOFORMAT { GUID guidFormatType; DWORD dwSamplesPerFrame; DWORD dwBytesPerSampleContainer; DWORD dwValidBitsPerSample;
// FLOAT fFramesPerSecond; DWORD dwChannelMask; } UNCOMPRESSEDAUDIOFORMAT;
[PInvokeData("audiomediatype.h", MSDNShortId = "NS:audiomediatype._UNCOMPRESSEDAUDIOFORMAT")]
[StructLayout(LayoutKind.Sequential)]
public struct UNCOMPRESSEDAUDIOFORMAT
{
/// <summary>Specifies the GUID of the data format type.</summary>
public Guid guidFormatType;
/// <summary>Specifies the number of samples per frame.</summary>
public uint dwSamplesPerFrame;
/// <summary>Specifies the number of bytes that make up a unit container of the sample.</summary>
public uint dwBytesPerSampleContainer;
/// <summary>Specifies the number of valid bits per sample.</summary>
public uint dwValidBitsPerSample;
/// <summary>Specifies the number of frames per second of streaming audio data.</summary>
public float fFramesPerSecond;
/// <summary>Specifies the channel mask that is used by the uncompressed audio data.</summary>
public uint dwChannelMask;
}
}

File diff suppressed because it is too large Load Diff

View File

@ -1,289 +1,281 @@
using System;
namespace Vanara.PInvoke
namespace Vanara.PInvoke;
/// <summary>Functions, structures and constants from Windows Core Audio Api.</summary>
public static partial class CoreAudio
{
/// <summary>Functions, structures and constants from Windows Core Audio Api.</summary>
public static partial class CoreAudio
/// <summary>
/// <para>
/// The AUDCLNT_SESSIONFLAGS_XXX constants indicate characteristics of an audio session associated with the stream. A client can specify
/// these options during the initialization of the stream by through the StreamFlags parameter of the <c>IAudioClient::Initialize</c> method.
/// </para>
/// </summary>
// https://docs.microsoft.com/en-us/windows/win32/coreaudio/audclnt-sessionflags-xxx-constants
[PInvokeData("Audiosessiontypes.h", MSDNShortId = "5745d5bc-71e8-4b33-8227-c1c84226b6ee")]
public enum AUDCLNT_SESSIONFLAGS : uint
{
/// <summary>The session expires when there are no associated streams and owning session control objects holding references.</summary>
AUDCLNT_SESSIONFLAGS_EXPIREWHENUNOWNED = 0x10000000,
/// <summary>
/// The volume control is hidden in the volume mixer user interface when the audio session is created. If the session associated with
/// the stream already exists before IAudioClient::Initialize opens the stream, the volume control is displayed in the volume mixer.
/// </summary>
AUDCLNT_SESSIONFLAGS_DISPLAY_HIDE = 0x20000000,
/// <summary>The volume control is hidden in the volume mixer user interface after the session expires.</summary>
AUDCLNT_SESSIONFLAGS_DISPLAY_HIDEWHENEXPIRED = 0x40000000,
}
/// <summary>
/// The <c>AUDCLNT_SHAREMODE</c> enumeration defines constants that indicate whether an audio stream will run in shared mode or in
/// exclusive mode.
/// </summary>
/// <remarks>
/// <para>
/// The IAudioClient::Initialize and IAudioClient::IsFormatSupported methods use the constants defined in the <c>AUDCLNT_SHAREMODE</c> enumeration.
/// </para>
/// <para>
/// In shared mode, the client can share the audio endpoint device with clients that run in other user-mode processes. The audio engine
/// always supports formats for client streams that match the engine's mix format. In addition, the audio engine might support another
/// format if the Windows audio service can insert system effects into the client stream to convert the client format to the mix format.
/// </para>
/// <para>
/// In exclusive mode, the Windows audio service attempts to establish a connection in which the client has exclusive access to the audio
/// endpoint device. In this mode, the audio engine inserts no system effects into the local stream to aid in the creation of the
/// connection point. Either the audio device can handle the specified format directly or the method fails.
/// </para>
/// <para>For more information about shared-mode and exclusive-mode streams, see User-Mode Audio Components.</para>
/// </remarks>
// https://docs.microsoft.com/en-us/windows/win32/api/audiosessiontypes/ne-audiosessiontypes-audclnt_sharemode typedef enum
// _AUDCLNT_SHAREMODE { AUDCLNT_SHAREMODE_SHARED, AUDCLNT_SHAREMODE_EXCLUSIVE } AUDCLNT_SHAREMODE;
[PInvokeData("audiosessiontypes.h", MSDNShortId = "f4870d0f-85d1-48ad-afe0-2f5a960c08fb")]
public enum AUDCLNT_SHAREMODE
{
/// <summary>The audio stream will run in shared mode. For more information, see Remarks.</summary>
AUDCLNT_SHAREMODE_SHARED,
/// <summary>The audio stream will run in exclusive mode. For more information, see Remarks.</summary>
AUDCLNT_SHAREMODE_EXCLUSIVE,
}
/// <summary>Specifies characteristics that a client can assign to an audio stream during the initialization of the stream.</summary>
/// <remarks>
/// The <c>IAudioClient::Initialize</c> method and the <c>DIRECTX_AUDIO_ACTIVATION_PARAMS</c> structure use the AUDCLNT_STREAMFLAGS_XXX constants.
/// </remarks>
// https://docs.microsoft.com/en-us/windows/win32/coreaudio/audclnt-streamflags-xxx-constants
[PInvokeData("Audiosessiontypes.h", MSDNShortId = "7b2267c3-79f5-4ada-a7ce-78dd514f8487")]
[Flags]
public enum AUDCLNT_STREAMFLAGS : uint
{
/// <summary>
/// The audio stream will be a member of a cross-process audio session.
/// <para>
/// The AUDCLNT_SESSIONFLAGS_XXX constants indicate characteristics of an audio session associated with the stream. A client can
/// specify these options during the initialization of the stream by through the StreamFlags parameter of the
/// <c>IAudioClient::Initialize</c> method.
/// The AUDCLNT_STREAMFLAGS_CROSSPROCESS flag indicates that the audio session for the stream is a cross-process session. A
/// cross-process session can accept streams from more than one process. If two applications in two separate processes call
/// <c>IAudioClient::Initialize</c> with identical session GUIDs, and both applications set the AUDCLNT_SHAREMODE_CROSSPROCESS flag,
/// then the audio engine assigns their streams to the same cross-process session. This flag overrides the default behavior, which is
/// to assign the stream to a process-specific session rather than a cross-process session. The AUDCLNT_STREAMFLAGS_CROSSPROCESS flag
/// bit is incompatible with exclusive mode. For more information about cross-process sessions, see Audio Sessions.
/// </para>
/// </summary>
// https://docs.microsoft.com/en-us/windows/win32/coreaudio/audclnt-sessionflags-xxx-constants
[PInvokeData("Audiosessiontypes.h", MSDNShortId = "5745d5bc-71e8-4b33-8227-c1c84226b6ee")]
public enum AUDCLNT_SESSIONFLAGS : uint
{
/// <summary>The session expires when there are no associated streams and owning session control objects holding references.</summary>
AUDCLNT_SESSIONFLAGS_EXPIREWHENUNOWNED = 0x10000000,
/// <summary>
/// The volume control is hidden in the volume mixer user interface when the audio session is created. If the session associated
/// with the stream already exists before IAudioClient::Initialize opens the stream, the volume control is displayed in the
/// volume mixer.
/// </summary>
AUDCLNT_SESSIONFLAGS_DISPLAY_HIDE = 0x20000000,
/// <summary>The volume control is hidden in the volume mixer user interface after the session expires.</summary>
AUDCLNT_SESSIONFLAGS_DISPLAY_HIDEWHENEXPIRED = 0x40000000,
}
AUDCLNT_STREAMFLAGS_CROSSPROCESS = 0x00010000,
/// <summary>
/// The <c>AUDCLNT_SHAREMODE</c> enumeration defines constants that indicate whether an audio stream will run in shared mode or in
/// exclusive mode.
/// The audio stream will operate in loopback mode.
/// <para>
/// The AUDCLNT_STREAMFLAGS_LOOPBACK flag enables loopback recording. In loopback recording, the audio engine copies the audio stream
/// that is being played by a rendering endpoint device into an audio endpoint buffer so that a WASAPI client can capture the stream.
/// If this flag is set, the <c>IAudioClient::Initialize</c> method attempts to open a capture buffer on the rendering device. This
/// flag is valid only for a rendering device and only if the <c>Initialize</c> call sets the ShareMode parameter to
/// AUDCLNT_SHAREMODE_SHARED. Otherwise the <c>Initialize</c> call will fail. If the call succeeds, the client can call the
/// <c>IAudioClient::GetService</c> method to obtain an <c>IAudioCaptureClient</c> interface on the rendering device. For more
/// information, see Loopback Recording.
/// </para>
/// </summary>
/// <remarks>
AUDCLNT_STREAMFLAGS_LOOPBACK = 0x00020000,
/// <summary>
/// Processing of the audio buffer by the client will be event driven.
/// <para>
/// The IAudioClient::Initialize and IAudioClient::IsFormatSupported methods use the constants defined in the
/// <c>AUDCLNT_SHAREMODE</c> enumeration.
/// The AUDCLNT_STREAMFLAGS_EVENTCALLBACK flag enables event-driven buffering. If a client sets this flag in the call to
/// <c>IAudioClient::Initialize</c> that initializes a stream, the client must subsequently call the
/// <c>IAudioClient::SetEventHandle</c> method to supply an event handle for the stream. After the stream starts, the audio engine
/// will signal the event handle to notify the client each time a buffer becomes ready for the client to process. WASAPI supports
/// event-driven buffering for both rendering and capture buffers. Both shared-mode and exclusive-mode streams can use event-driven
/// buffering. For a code example that uses the AUDCLNT_STREAMFLAGS_EVENTCALLBACK flag, see Exclusive-Mode Streams.
/// </para>
/// </summary>
AUDCLNT_STREAMFLAGS_EVENTCALLBACK = 0x00040000,
/// <summary>
/// The volume and mute settings for an audio session will not persist across system restarts.
/// <para>
/// In shared mode, the client can share the audio endpoint device with clients that run in other user-mode processes. The audio
/// engine always supports formats for client streams that match the engine's mix format. In addition, the audio engine might
/// support another format if the Windows audio service can insert system effects into the client stream to convert the client
/// format to the mix format.
/// The AUDCLNT_STREAMFLAGS_NOPERSIST flag disables persistence of the volume and mute settings for a session that contains rendering
/// streams. By default, the volume level and muting state for a rendering session are persistent across system restarts. The volume
/// level and muting state for a capture session are never persistent. For more information about the persistence of session volume
/// and mute settings, see Audio Sessions.
/// </para>
/// </summary>
AUDCLNT_STREAMFLAGS_NOPERSIST = 0x00080000,
/// <summary>
/// This constant is new in Windows 7. The sample rate of the stream is adjusted to a rate specified by an application. For more
/// information, see Remarks.
/// </summary>
AUDCLNT_STREAMFLAGS_RATEADJUST = 0x00100000,
/// <summary>
/// Prevents the render stream from being included in any application loopback streams. Note that this stream will continue to be
/// included in the endpoint loopback stream. This has no effect on Exclusive-Mode Streams. This constant is available starting with
/// Windows 10, version 1803.
/// <para>
/// In exclusive mode, the Windows audio service attempts to establish a connection in which the client has exclusive access to the
/// audio endpoint device. In this mode, the audio engine inserts no system effects into the local stream to aid in the creation of
/// the connection point. Either the audio device can handle the specified format directly or the method fails.
/// The AUDCLNT_STREAMFLAGS_RATEADJUST flag enables an application to get a reference to the <c>IAudioClockAdjustment</c> interface
/// that is used to set the sample rate for the stream. To get a pointer to this interace, an application must initialize the audio
/// client with this flag and then call <c>IAudioClient::GetService</c> by specifying the <c>IID_IAudioClockAdjustment</c>
/// identifier. To set the new sample rate, call <c>IAudioClockAdjustment::SetSampleRate</c>. This flag is valid only for a rendering
/// device. Otherwise the <c>GetService</c> call fails with the error code AUDCLNT_E_WRONG_ENDPOINT_TYPE. The application must also
/// set the ShareMode parameter to AUDCLNT_SHAREMODE_SHARED during the <c>Initialize</c> call. <c>SetSampleRate</c> fails if the
/// audio client is not in shared mode.
/// </para>
/// <para>For more information about shared-mode and exclusive-mode streams, see User-Mode Audio Components.</para>
/// </remarks>
// https://docs.microsoft.com/en-us/windows/win32/api/audiosessiontypes/ne-audiosessiontypes-audclnt_sharemode typedef enum
// _AUDCLNT_SHAREMODE { AUDCLNT_SHAREMODE_SHARED, AUDCLNT_SHAREMODE_EXCLUSIVE } AUDCLNT_SHAREMODE;
[PInvokeData("audiosessiontypes.h", MSDNShortId = "f4870d0f-85d1-48ad-afe0-2f5a960c08fb")]
public enum AUDCLNT_SHAREMODE
{
/// <summary>The audio stream will run in shared mode. For more information, see Remarks.</summary>
AUDCLNT_SHAREMODE_SHARED,
/// </summary>
AUDCLNT_STREAMFLAGS_PREVENT_LOOPBACK_CAPTURE = 0x01000000,
/// <summary>The audio stream will run in exclusive mode. For more information, see Remarks.</summary>
AUDCLNT_SHAREMODE_EXCLUSIVE,
}
/// <summary>
/// A channel matrixer and a sample rate converter are inserted as necessary to convert between the uncompressed format supplied to
/// IAudioClient::Initialize and the audio engine mix format.
/// </summary>
AUDCLNT_STREAMFLAGS_AUTOCONVERTPCM = 0x80000000,
/// <summary>Specifies characteristics that a client can assign to an audio stream during the initialization of the stream.</summary>
/// <remarks>
/// The <c>IAudioClient::Initialize</c> method and the <c>DIRECTX_AUDIO_ACTIVATION_PARAMS</c> structure use the
/// AUDCLNT_STREAMFLAGS_XXX constants.
/// </remarks>
// https://docs.microsoft.com/en-us/windows/win32/coreaudio/audclnt-streamflags-xxx-constants
[PInvokeData("Audiosessiontypes.h", MSDNShortId = "7b2267c3-79f5-4ada-a7ce-78dd514f8487")]
[Flags]
public enum AUDCLNT_STREAMFLAGS : uint
{
/// <summary>
/// The audio stream will be a member of a cross-process audio session.
/// <para>
/// The AUDCLNT_STREAMFLAGS_CROSSPROCESS flag indicates that the audio session for the stream is a cross-process session. A
/// cross-process session can accept streams from more than one process. If two applications in two separate processes call
/// <c>IAudioClient::Initialize</c> with identical session GUIDs, and both applications set the AUDCLNT_SHAREMODE_CROSSPROCESS
/// flag, then the audio engine assigns their streams to the same cross-process session. This flag overrides the default
/// behavior, which is to assign the stream to a process-specific session rather than a cross-process session. The
/// AUDCLNT_STREAMFLAGS_CROSSPROCESS flag bit is incompatible with exclusive mode. For more information about cross-process
/// sessions, see Audio Sessions.
/// </para>
/// </summary>
AUDCLNT_STREAMFLAGS_CROSSPROCESS = 0x00010000,
/// <summary>
/// When used with AUDCLNT_STREAMFLAGS_AUTOCONVERTPCM, a sample rate converter with better quality than the default conversion but
/// with a higher performance cost is used. This should be used if the audio is ultimately intended to be heard by humans as opposed
/// to other scenarios such as pumping silence or populating a meter.
/// </summary>
AUDCLNT_STREAMFLAGS_SRC_DEFAULT_QUALITY = 0x08000000,
}
/// <summary>
/// The audio stream will operate in loopback mode.
/// <para>
/// The AUDCLNT_STREAMFLAGS_LOOPBACK flag enables loopback recording. In loopback recording, the audio engine copies the audio
/// stream that is being played by a rendering endpoint device into an audio endpoint buffer so that a WASAPI client can capture
/// the stream. If this flag is set, the <c>IAudioClient::Initialize</c> method attempts to open a capture buffer on the
/// rendering device. This flag is valid only for a rendering device and only if the <c>Initialize</c> call sets the ShareMode
/// parameter to AUDCLNT_SHAREMODE_SHARED. Otherwise the <c>Initialize</c> call will fail. If the call succeeds, the client can
/// call the <c>IAudioClient::GetService</c> method to obtain an <c>IAudioCaptureClient</c> interface on the rendering device.
/// For more information, see Loopback Recording.
/// </para>
/// </summary>
AUDCLNT_STREAMFLAGS_LOOPBACK = 0x00020000,
/// <summary>Specifies the category of an audio stream.</summary>
/// <remarks>
/// <para>Note that only a subset of the audio stream categories are valid for certain stream types.</para>
/// <list type="table">
/// <listheader>
/// <term>Stream type</term>
/// <term>Valid categories</term>
/// </listheader>
/// <item>
/// <term>Render stream</term>
/// <term>All categories are valid.</term>
/// </item>
/// <item>
/// <term>Capture stream</term>
/// <term>AudioCategory_Communications, AudioCategory_Speech, AudioCategory_Other</term>
/// </item>
/// <item>
/// <term>Loopback stream</term>
/// <term>AudioCategory_Other</term>
/// </item>
/// </list>
/// <para>
/// Games should categorize their music streams as <c>AudioCategory_GameMedia</c> so that game music mutes automatically if another
/// application plays music in the background. Music or video applications should categorize their streams as <c>AudioCategory_Media</c>
/// or <c>AudioCategory_Movie</c> so they will take priority over <c>AudioCategory_GameMedia</c> streams. Game audio for in-game
/// cinematics or cutscenes, when the audio is premixed or for creative reasons should take priority over background audio, should also
/// be categorized as <c>Media</c> or <c>Movie</c>.
/// </para>
/// <para>
/// The values <c>AudioCategory_ForegroundOnlyMedia</c> and <c>AudioCategory_BackgroundCapableMedia</c> are deprecated. For Windows Store
/// apps, these values will continue to function the same when running on Windows 10 as they did on Windows 8.1. Attempting to use these
/// values in a Universal Windows Platform (UWP) app, will result in compilation errors and an exception at runtime. Using these values
/// in a Windows desktop application built with the Windows 10 SDK the will result in a compilation error.
/// </para>
/// </remarks>
// https://docs.microsoft.com/en-us/windows/win32/api/audiosessiontypes/ne-audiosessiontypes-audio_stream_category typedef enum
// _AUDIO_STREAM_CATEGORY { AudioCategory_Other, AudioCategory_ForegroundOnlyMedia, AudioCategory_BackgroundCapableMedia,
// AudioCategory_Communications, AudioCategory_Alerts, AudioCategory_SoundEffects, AudioCategory_GameEffects, AudioCategory_GameMedia,
// AudioCategory_GameChat, AudioCategory_Speech, AudioCategory_Movie, AudioCategory_Media } AUDIO_STREAM_CATEGORY;
[PInvokeData("audiosessiontypes.h", MSDNShortId = "B6B9195A-2704-4633-AFCF-B01CED6B6DB4")]
public enum AUDIO_STREAM_CATEGORY
{
/// <summary>Other audio stream.</summary>
AudioCategory_Other,
/// <summary>
/// Processing of the audio buffer by the client will be event driven.
/// <para>
/// The AUDCLNT_STREAMFLAGS_EVENTCALLBACK flag enables event-driven buffering. If a client sets this flag in the call to
/// <c>IAudioClient::Initialize</c> that initializes a stream, the client must subsequently call the
/// <c>IAudioClient::SetEventHandle</c> method to supply an event handle for the stream. After the stream starts, the audio
/// engine will signal the event handle to notify the client each time a buffer becomes ready for the client to process. WASAPI
/// supports event-driven buffering for both rendering and capture buffers. Both shared-mode and exclusive-mode streams can use
/// event-driven buffering. For a code example that uses the AUDCLNT_STREAMFLAGS_EVENTCALLBACK flag, see Exclusive-Mode Streams.
/// </para>
/// </summary>
AUDCLNT_STREAMFLAGS_EVENTCALLBACK = 0x00040000,
/// <summary>
/// Media that will only stream when the app is in the foreground. This enumeration value has been deprecated. For more information,
/// see the Remarks section.
/// </summary>
AudioCategory_ForegroundOnlyMedia,
/// <summary>
/// The volume and mute settings for an audio session will not persist across system restarts.
/// <para>
/// The AUDCLNT_STREAMFLAGS_NOPERSIST flag disables persistence of the volume and mute settings for a session that contains
/// rendering streams. By default, the volume level and muting state for a rendering session are persistent across system
/// restarts. The volume level and muting state for a capture session are never persistent. For more information about the
/// persistence of session volume and mute settings, see Audio Sessions.
/// </para>
/// </summary>
AUDCLNT_STREAMFLAGS_NOPERSIST = 0x00080000,
/// <summary>
/// Media that can be streamed when the app is in the background. This enumeration value has been deprecated. For more information,
/// see the Remarks section.
/// </summary>
AudioCategory_BackgroundCapableMedia,
/// <summary>
/// This constant is new in Windows 7. The sample rate of the stream is adjusted to a rate specified by an application. For more
/// information, see Remarks.
/// </summary>
AUDCLNT_STREAMFLAGS_RATEADJUST = 0x00100000,
/// <summary>Real-time communications, such as VOIP or chat.</summary>
AudioCategory_Communications,
/// <summary>
/// Prevents the render stream from being included in any application loopback streams. Note that this stream will continue to
/// be included in the endpoint loopback stream. This has no effect on Exclusive-Mode Streams. This constant is available
/// starting with Windows 10, version 1803.
/// <para>
/// The AUDCLNT_STREAMFLAGS_RATEADJUST flag enables an application to get a reference to the <c>IAudioClockAdjustment</c>
/// interface that is used to set the sample rate for the stream. To get a pointer to this interace, an application must
/// initialize the audio client with this flag and then call <c>IAudioClient::GetService</c> by specifying the
/// <c>IID_IAudioClockAdjustment</c> identifier. To set the new sample rate, call <c>IAudioClockAdjustment::SetSampleRate</c>.
/// This flag is valid only for a rendering device. Otherwise the <c>GetService</c> call fails with the error code
/// AUDCLNT_E_WRONG_ENDPOINT_TYPE. The application must also set the ShareMode parameter to AUDCLNT_SHAREMODE_SHARED during the
/// <c>Initialize</c> call. <c>SetSampleRate</c> fails if the audio client is not in shared mode.
/// </para>
/// </summary>
AUDCLNT_STREAMFLAGS_PREVENT_LOOPBACK_CAPTURE = 0x01000000,
/// <summary>Alert sounds.</summary>
AudioCategory_Alerts,
/// <summary>
/// A channel matrixer and a sample rate converter are inserted as necessary to convert between the uncompressed format supplied
/// to IAudioClient::Initialize and the audio engine mix format.
/// </summary>
AUDCLNT_STREAMFLAGS_AUTOCONVERTPCM = 0x80000000,
/// <summary>Sound effects.</summary>
AudioCategory_SoundEffects,
/// <summary>
/// When used with AUDCLNT_STREAMFLAGS_AUTOCONVERTPCM, a sample rate converter with better quality than the default conversion
/// but with a higher performance cost is used. This should be used if the audio is ultimately intended to be heard by humans as
/// opposed to other scenarios such as pumping silence or populating a meter.
/// </summary>
AUDCLNT_STREAMFLAGS_SRC_DEFAULT_QUALITY = 0x08000000,
}
/// <summary>Game sound effects.</summary>
AudioCategory_GameEffects,
/// <summary>Specifies the category of an audio stream.</summary>
/// <remarks>
/// <para>Note that only a subset of the audio stream categories are valid for certain stream types.</para>
/// <list type="table">
/// <listheader>
/// <term>Stream type</term>
/// <term>Valid categories</term>
/// </listheader>
/// <item>
/// <term>Render stream</term>
/// <term>All categories are valid.</term>
/// </item>
/// <item>
/// <term>Capture stream</term>
/// <term>AudioCategory_Communications, AudioCategory_Speech, AudioCategory_Other</term>
/// </item>
/// <item>
/// <term>Loopback stream</term>
/// <term>AudioCategory_Other</term>
/// </item>
/// </list>
/// <para>
/// Games should categorize their music streams as <c>AudioCategory_GameMedia</c> so that game music mutes automatically if another
/// application plays music in the background. Music or video applications should categorize their streams as
/// <c>AudioCategory_Media</c> or <c>AudioCategory_Movie</c> so they will take priority over <c>AudioCategory_GameMedia</c> streams.
/// Game audio for in-game cinematics or cutscenes, when the audio is premixed or for creative reasons should take priority over
/// background audio, should also be categorized as <c>Media</c> or <c>Movie</c>.
/// </para>
/// <para>
/// The values <c>AudioCategory_ForegroundOnlyMedia</c> and <c>AudioCategory_BackgroundCapableMedia</c> are deprecated. For Windows
/// Store apps, these values will continue to function the same when running on Windows 10 as they did on Windows 8.1. Attempting to
/// use these values in a Universal Windows Platform (UWP) app, will result in compilation errors and an exception at runtime. Using
/// these values in a Windows desktop application built with the Windows 10 SDK the will result in a compilation error.
/// </para>
/// </remarks>
// https://docs.microsoft.com/en-us/windows/win32/api/audiosessiontypes/ne-audiosessiontypes-audio_stream_category typedef enum
// _AUDIO_STREAM_CATEGORY { AudioCategory_Other, AudioCategory_ForegroundOnlyMedia, AudioCategory_BackgroundCapableMedia,
// AudioCategory_Communications, AudioCategory_Alerts, AudioCategory_SoundEffects, AudioCategory_GameEffects,
// AudioCategory_GameMedia, AudioCategory_GameChat, AudioCategory_Speech, AudioCategory_Movie, AudioCategory_Media } AUDIO_STREAM_CATEGORY;
[PInvokeData("audiosessiontypes.h", MSDNShortId = "B6B9195A-2704-4633-AFCF-B01CED6B6DB4")]
public enum AUDIO_STREAM_CATEGORY
{
/// <summary>Other audio stream.</summary>
AudioCategory_Other,
/// <summary>Background audio for games.</summary>
AudioCategory_GameMedia,
/// <summary>
/// Media that will only stream when the app is in the foreground. This enumeration value has been deprecated. For more
/// information, see the Remarks section.
/// </summary>
AudioCategory_ForegroundOnlyMedia,
/// <summary>
/// Game chat audio. Similar to AudioCategory_Communications except that AudioCategory_GameChat will not attenuate other streams.
/// </summary>
AudioCategory_GameChat,
/// <summary>
/// Media that can be streamed when the app is in the background. This enumeration value has been deprecated. For more
/// information, see the Remarks section.
/// </summary>
AudioCategory_BackgroundCapableMedia,
/// <summary>Speech.</summary>
AudioCategory_Speech,
/// <summary>Real-time communications, such as VOIP or chat.</summary>
AudioCategory_Communications,
/// <summary>Stream that includes audio with dialog.</summary>
AudioCategory_Movie,
/// <summary>Alert sounds.</summary>
AudioCategory_Alerts,
/// <summary>Stream that includes audio without dialog.</summary>
AudioCategory_Media,
}
/// <summary>Sound effects.</summary>
AudioCategory_SoundEffects,
/// <summary>The <c>AudioSessionState</c> enumeration defines constants that indicate the current state of an audio session.</summary>
/// <remarks>
/// <para>
/// When a client opens a session by assigning the first stream to the session (by calling the IAudioClient::Initialize method), the
/// initial session state is inactive. The session state changes from inactive to active when a stream in the session begins running
/// (because the client has called the IAudioClient::Start method). The session changes from active to inactive when the client stops the
/// last running stream in the session (by calling the IAudioClient::Stop method). The session state changes to expired when the client
/// destroys the last stream in the session by releasing all references to the stream object.
/// </para>
/// <para>
/// The system volume-control program, Sndvol, displays volume controls for both active and inactive sessions. Sndvol stops displaying
/// the volume control for a session when the session state changes to expired. For more information about Sndvol, see Audio Sessions.
/// </para>
/// <para>
/// The IAudioSessionControl::GetState and IAudioSessionEvents::OnStateChanged methods use the constants defined in the
/// <c>AudioSessionState</c> enumeration.
/// </para>
/// <para>For more information about session states, see Audio Sessions.</para>
/// </remarks>
// https://docs.microsoft.com/en-us/windows/win32/api/audiosessiontypes/ne-audiosessiontypes-audiosessionstate typedef enum
// _AudioSessionState { AudioSessionStateInactive, AudioSessionStateActive, AudioSessionStateExpired } AudioSessionState;
[PInvokeData("audiosessiontypes.h", MSDNShortId = "a972fed6-425f-46c8-b0cc-6538460bb104")]
public enum AudioSessionState
{
/// <summary>
/// The audio session is inactive. (It contains at least one stream, but none of the streams in the session is currently running.)
/// </summary>
AudioSessionStateInactive,
/// <summary>Game sound effects.</summary>
AudioCategory_GameEffects,
/// <summary>The audio session is active. (At least one of the streams in the session is running.)</summary>
AudioSessionStateActive,
/// <summary>Background audio for games.</summary>
AudioCategory_GameMedia,
/// <summary>
/// Game chat audio. Similar to AudioCategory_Communications except that AudioCategory_GameChat will not attenuate other streams.
/// </summary>
AudioCategory_GameChat,
/// <summary>Speech.</summary>
AudioCategory_Speech,
/// <summary>Stream that includes audio with dialog.</summary>
AudioCategory_Movie,
/// <summary>Stream that includes audio without dialog.</summary>
AudioCategory_Media,
}
/// <summary>The <c>AudioSessionState</c> enumeration defines constants that indicate the current state of an audio session.</summary>
/// <remarks>
/// <para>
/// When a client opens a session by assigning the first stream to the session (by calling the IAudioClient::Initialize method), the
/// initial session state is inactive. The session state changes from inactive to active when a stream in the session begins running
/// (because the client has called the IAudioClient::Start method). The session changes from active to inactive when the client
/// stops the last running stream in the session (by calling the IAudioClient::Stop method). The session state changes to expired
/// when the client destroys the last stream in the session by releasing all references to the stream object.
/// </para>
/// <para>
/// The system volume-control program, Sndvol, displays volume controls for both active and inactive sessions. Sndvol stops
/// displaying the volume control for a session when the session state changes to expired. For more information about Sndvol, see
/// Audio Sessions.
/// </para>
/// <para>
/// The IAudioSessionControl::GetState and IAudioSessionEvents::OnStateChanged methods use the constants defined in the
/// <c>AudioSessionState</c> enumeration.
/// </para>
/// <para>For more information about session states, see Audio Sessions.</para>
/// </remarks>
// https://docs.microsoft.com/en-us/windows/win32/api/audiosessiontypes/ne-audiosessiontypes-audiosessionstate typedef enum
// _AudioSessionState { AudioSessionStateInactive, AudioSessionStateActive, AudioSessionStateExpired } AudioSessionState;
[PInvokeData("audiosessiontypes.h", MSDNShortId = "a972fed6-425f-46c8-b0cc-6538460bb104")]
public enum AudioSessionState
{
/// <summary>
/// The audio session is inactive. (It contains at least one stream, but none of the streams in the session is currently running.)
/// </summary>
AudioSessionStateInactive,
/// <summary>The audio session is active. (At least one of the streams in the session is running.)</summary>
AudioSessionStateActive,
/// <summary>The audio session has expired. (It contains no streams.)</summary>
AudioSessionStateExpired,
}
/// <summary>The audio session has expired. (It contains no streams.)</summary>
AudioSessionStateExpired,
}
}

File diff suppressed because it is too large Load Diff

View File

@ -1421,7 +1421,8 @@ namespace Vanara.PInvoke
/// Specifies whether the audio stream is currently muted. If <c>bMuted</c> is <c>TRUE</c>, the stream is muted. If
/// <c>FALSE</c>, the stream is not muted.
/// </summary>
[MarshalAs(UnmanagedType.Bool)] public bool bMuted;
[MarshalAs(UnmanagedType.Bool)]
public bool bMuted;
/// <summary>
/// Specifies the current master volume level of the audio stream. The volume level is normalized to the range from 0.0 to 1.0,

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@ -12,8 +12,12 @@
<PackageReadmeFile>pkgreadme.md</PackageReadmeFile>
<PackageReleaseNotes />
</PropertyGroup>
<ItemGroup>
<Compile Remove="MmReg.cs" />
</ItemGroup>
<ItemGroup>
<ProjectReference Include="..\..\Core\Vanara.Core.csproj" />
<ProjectReference Include="..\Multimedia\Vanara.PInvoke.Multimedia.csproj" />
<ProjectReference Include="..\Ole\Vanara.PInvoke.Ole.csproj" />
<ProjectReference Include="..\Shared\Vanara.PInvoke.Shared.csproj" />
</ItemGroup>

View File

@ -1,82 +1,10 @@
<?xml version="1.0" encoding="utf-8"?>
<Project ToolsVersion="16.0" DefaultTargets="Build" xmlns="http://schemas.microsoft.com/developer/msbuild/2003">
<Import Project="$(MSBuildExtensionsPath)\$(MSBuildToolsVersion)\Microsoft.Common.props" Condition="Exists('$(MSBuildExtensionsPath)\$(MSBuildToolsVersion)\Microsoft.Common.props')" />
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<Configuration Condition=" '$(Configuration)' == '' ">Debug</Configuration>
<Platform Condition=" '$(Platform)' == '' ">AnyCPU</Platform>
<ProjectGuid>{62869A96-7A2E-42E9-A286-B8A2E3FE9D20}</ProjectGuid>
<OutputType>Library</OutputType>
<AppDesignerFolder>Properties</AppDesignerFolder>
<AssemblyName>UnitTest.PInvoke.CoreAudio</AssemblyName>
<TargetFrameworkVersion>v4.7.2</TargetFrameworkVersion>
<FileAlignment>512</FileAlignment>
<TargetFrameworkProfile />
</PropertyGroup>
<PropertyGroup Condition=" '$(Configuration)|$(Platform)' == 'Debug|AnyCPU' ">
<DebugSymbols>true</DebugSymbols>
<DebugType>full</DebugType>
<Optimize>false</Optimize>
<OutputPath>bin\Debug\</OutputPath>
<DefineConstants>DEBUG;TRACE</DefineConstants>
<ErrorReport>prompt</ErrorReport>
<WarningLevel>4</WarningLevel>
<PlatformTarget>x64</PlatformTarget>
<AllowUnsafeBlocks>true</AllowUnsafeBlocks>
</PropertyGroup>
<PropertyGroup Condition=" '$(Configuration)|$(Platform)' == 'Release|AnyCPU' ">
<DebugType>pdbonly</DebugType>
<Optimize>true</Optimize>
<OutputPath>bin\Release\</OutputPath>
<DefineConstants>TRACE</DefineConstants>
<ErrorReport>prompt</ErrorReport>
<WarningLevel>4</WarningLevel>
<AllowUnsafeBlocks>true</AllowUnsafeBlocks>
<PlatformTarget>x64</PlatformTarget>
</PropertyGroup>
<ItemGroup>
<Compile Include="TopologyTests.cs" />
<Compile Include="DeviceTests.cs" />
<Compile Include="IPolicyConfig.cs" />
<ProjectReference Include="..\..\..\PInvoke\Avrt\Vanara.PInvoke.Avrt.csproj" />
<ProjectReference Include="..\..\..\PInvoke\CoreAudio\Vanara.PInvoke.CoreAudio.csproj" />
</ItemGroup>
<ItemGroup>
<ProjectReference Include="..\..\..\Core\Vanara.Core.csproj">
<Project>{241f73ee-9298-45c9-b869-a045dff94c03}</Project>
<Name>Vanara.Core</Name>
</ProjectReference>
<ProjectReference Include="..\..\..\PInvoke\CoreAudio\Vanara.PInvoke.CoreAudio.csproj">
<Project>{f9c2b1c3-079d-47d8-996e-1575d2f5c4d2}</Project>
<Name>Vanara.PInvoke.CoreAudio</Name>
</ProjectReference>
<ProjectReference Include="..\..\..\PInvoke\Kernel32\Vanara.PInvoke.Kernel32.csproj">
<Project>{842d436f-598c-47d7-b5aa-12399f8ccfe9}</Project>
<Name>Vanara.PInvoke.Kernel32</Name>
</ProjectReference>
<ProjectReference Include="..\..\..\PInvoke\Ole\Vanara.PInvoke.Ole.csproj">
<Project>{BBE4A7D6-0B24-4F58-9726-E05F358C1256}</Project>
<Name>Vanara.PInvoke.Ole</Name>
</ProjectReference>
<ProjectReference Include="..\..\..\PInvoke\Shared\Vanara.PInvoke.Shared.csproj">
<Project>{a5e519e9-feba-4fe3-93a5-b8269bef72f4}</Project>
<Name>Vanara.PInvoke.Shared</Name>
</ProjectReference>
<ProjectReference Include="..\..\CSharpRunner\Shared.csproj">
<Project>{a96cff10-0967-429a-8700-4a86c97c5603}</Project>
<Name>Shared</Name>
</ProjectReference>
</ItemGroup>
<ItemGroup>
<PackageReference Include="NUnit" Version="3.12.0" />
<PackageReference Include="NUnit3TestAdapter" Version="3.15.1" />
</ItemGroup>
<ItemGroup>
<Reference Include="System" />
</ItemGroup>
<Import Project="$(MSBuildToolsPath)\Microsoft.CSharp.targets" />
<!-- To modify your build process, add your task inside one of the targets below and uncomment it.
Other similar extension points exist, see Microsoft.Common.targets.
<Target Name="BeforeBuild">
</Target>
<Target Name="AfterBuild">
</Target>
-->
</Project>

View File

@ -12,390 +12,390 @@ using Vanara.InteropServices;
using static Vanara.PInvoke.CoreAudio;
using static Vanara.PInvoke.Ole32;
using static Vanara.PInvoke.PropSys;
using static Vanara.PInvoke.Winmm;
using static Vanara.PInvoke.WinMm;
namespace Vanara.PInvoke.Tests
namespace Vanara.PInvoke.Tests;
public partial class CoreAudioTests
{
public partial class CoreAudioTests
private static readonly PROPERTYKEY PKEY_Device_FriendlyName = new PROPERTYKEY(new Guid(0xa45c254e, 0xdf1c, 0x4efd, 0x80, 0x20, 0x67, 0xd1, 0x46, 0xa8, 0x50, 0xe0), 14);
private static Dictionary<string, string> lookup = new Dictionary<string, string>();
public static IEnumerable<IMMDevice> CreateIMMDeviceCollection(IMMDeviceEnumerator deviceEnumerator, EDataFlow direction = EDataFlow.eAll, DEVICE_STATE stateMasks = DEVICE_STATE.DEVICE_STATEMASK_ALL)
{
private static readonly PROPERTYKEY PKEY_Device_FriendlyName = new PROPERTYKEY(new Guid(0xa45c254e, 0xdf1c, 0x4efd, 0x80, 0x20, 0x67, 0xd1, 0x46, 0xa8, 0x50, 0xe0), 14);
private static Dictionary<string, string> lookup = new Dictionary<string, string>();
public static IEnumerable<IMMDevice> CreateIMMDeviceCollection(IMMDeviceEnumerator deviceEnumerator, EDataFlow direction = EDataFlow.eAll, DEVICE_STATE stateMasks = DEVICE_STATE.DEVICE_STATEMASK_ALL)
using var deviceCollection = ComReleaserFactory.Create(deviceEnumerator.EnumAudioEndpoints(direction, stateMasks));
var deviceList = new List<IMMDevice>();
var cnt = deviceCollection.Item.GetCount();
if (cnt == 0) Assert.Inconclusive("No devices were found.");
for (uint i = 0; i < cnt; i++)
{
using var deviceCollection = ComReleaserFactory.Create(deviceEnumerator.EnumAudioEndpoints(direction, stateMasks));
var deviceList = new List<IMMDevice>();
var cnt = deviceCollection.Item.GetCount();
if (cnt == 0) Assert.Inconclusive("No devices were found.");
for (uint i = 0; i < cnt; i++)
{
deviceCollection.Item.Item(i, out var dev).ThrowIfFailed();
deviceList.Add(dev);
}
return deviceList;
deviceCollection.Item.Item(i, out var dev).ThrowIfFailed();
deviceList.Add(dev);
}
return deviceList;
}
/// This test ensures that each device can use any valid COM interface returned from the Activate method. It checks to make sure
/// each received interface is not null and an HRESULT of S_OK is returned. </summary>
[Test]
public void IMMDevice_Activate()
/// This test ensures that each device can use any valid COM interface returned from the Activate method. It checks to make sure
/// each received interface is not null and an HRESULT of S_OK is returned. </summary>
[Test]
public void IMMDevice_Activate()
{
using var enumerator = ComReleaserFactory.Create(new IMMDeviceEnumerator());
foreach (var d in CreateIMMDeviceCollection(enumerator.Item, EDataFlow.eAll, DEVICE_STATE.DEVICE_STATE_ACTIVE))
{
using var enumerator = ComReleaserFactory.Create(new IMMDeviceEnumerator());
TestActivation<IAudioClient>();
foreach (var d in CreateIMMDeviceCollection(enumerator.Item, EDataFlow.eAll, DEVICE_STATE.DEVICE_STATE_ACTIVE))
TestActivation<IAudioEndpointVolume>();
TestActivation<IAudioMeterInformation>();
TestActivation<IAudioSessionManager>();
TestActivation<IAudioSessionManager2>();
TestActivation<IDeviceTopology>();
void TestActivation<T>() where T : class
{
TestActivation<IAudioClient>();
TestActivation<IAudioEndpointVolume>();
TestActivation<IAudioMeterInformation>();
TestActivation<IAudioSessionManager>();
TestActivation<IAudioSessionManager2>();
TestActivation<IDeviceTopology>();
void TestActivation<T>() where T : class
{
Assert.That(d.Activate(typeof(T).GUID, CLSCTX.CLSCTX_INPROC_SERVER, default, out var objInterface), ResultIs.Successful);
Assert.IsNotNull(objInterface as T);
Marshal.ReleaseComObject(objInterface);
}
}
}
/// <summary>This test ensures that each device can get its ID. It also checks that the received ID is not null.</summary>
[Test]
public void IMMDevice_GetId()
{
using var enumerator = ComReleaserFactory.Create(new IMMDeviceEnumerator());
foreach (var d in CreateIMMDeviceCollection(enumerator.Item))
{
string strId = null;
Assert.That(() => strId = d.GetId(), Throws.Nothing);
Assert.IsNotNull(strId);
}
}
/// <summary>
/// This test ensures that each device can get its state. It also checks that the received state is a valid device state constant.
/// </summary>
[Test]
public void IMMDevice_GetState()
{
using var enumerator = ComReleaserFactory.Create(new IMMDeviceEnumerator());
foreach (var d in CreateIMMDeviceCollection(enumerator.Item))
{
DEVICE_STATE deviceState = 0;
Assert.That(() => deviceState = d.GetState(), Throws.Nothing);
Assert.That(Enum.IsDefined(typeof(DEVICE_STATE), deviceState), Is.True);
}
}
/// <summary>
/// This test ensures that each device can open a property store in READWRITE mode and that the received property store is non-null.
/// It also checks that the property store object works correctly by making a call to get the property count.
/// </summary>
[Test]
public void IMMDevice_OpenPropertyStore()
{
var tested = false;
using var enumerator = ComReleaserFactory.Create(new IMMDeviceEnumerator());
foreach (var d in CreateIMMDeviceCollection(enumerator.Item))
{
TestContext.WriteLine($"**** {GetDeviceName(d.GetId())} ****");
// Open the property store
IPropertyStore propertyStore = null;
Assert.That(() => propertyStore = d.OpenPropertyStore(STGM.STGM_READ), Throws.Nothing);
// Verify the count can be received.
var propertyCount = uint.MaxValue;
Assert.That(() => propertyCount = propertyStore.GetCount(), Throws.Nothing);
Assert.AreNotEqual(uint.MaxValue, propertyCount, "The property count was not received.");
// Get each property key, then get value.
for (uint i = 0; i < propertyCount; i++)
{
PROPERTYKEY propertyKey = default;
Assert.That(() => propertyKey = propertyStore.GetAt(i), Throws.Nothing);
var value = GetPropertyValue(propertyStore, propertyKey);
if (value != null)
tested = true;
TestContext.WriteLine($"{propertyKey.GetCanonicalName()}={value ?? "null"}");
}
}
if (!tested) Assert.Inconclusive("No property values returned valid, non-null values.");
}
/// <summary>
/// Tests that the individual render and capture device collections have a combined count equal to the total device count.
/// </summary>
[Test]
public void IMMDeviceCollection_GetCount()
{
using var enumerator = ComReleaserFactory.Create(new IMMDeviceEnumerator());
var allCaptureDevices = enumerator.Item.EnumAudioEndpoints(EDataFlow.eCapture, DEVICE_STATE.DEVICE_STATEMASK_ALL);
var allRenderDevices = enumerator.Item.EnumAudioEndpoints(EDataFlow.eRender, DEVICE_STATE.DEVICE_STATEMASK_ALL);
var allDevices = enumerator.Item.EnumAudioEndpoints(EDataFlow.eAll, DEVICE_STATE.DEVICE_STATEMASK_ALL);
Assert.IsNotNull(allCaptureDevices, "The IMMDeviceCollection object is null.");
Assert.IsNotNull(allRenderDevices, "The IMMDeviceCollection object is null.");
Assert.IsNotNull(allDevices, "The IMMDeviceCollection object is null.");
uint captureCount = uint.MaxValue, renderCount = uint.MaxValue, allCount = uint.MaxValue;
Assert.That(() => captureCount = allCaptureDevices.GetCount(), Throws.Nothing);
Assert.AreNotEqual(uint.MaxValue, captureCount, "Device count was not received.");
Assert.That(() => renderCount = allRenderDevices.GetCount(), Throws.Nothing);
Assert.AreNotEqual(uint.MaxValue, renderCount, "Device count was not received.");
Assert.That(() => allCount = allDevices.GetCount(), Throws.Nothing);
Assert.AreNotEqual(uint.MaxValue, allDevices, "Device count was not received.");
Assert.AreEqual(allCount, captureCount + renderCount, "The combined number of capture and render devices is not equal to the total device count.");
}
/// <summary>Tests the all devices from index zero to [count - 1] can be received with S_OK HRESULT and each device is not null.</summary>
[Test]
public void IMMDeviceCollection_Item()
{
using var enumerator = ComReleaserFactory.Create(new IMMDeviceEnumerator());
IMMDeviceCollection allDevices = null;
Assert.That(() => allDevices = enumerator.Item.EnumAudioEndpoints(EDataFlow.eAll, DEVICE_STATE.DEVICE_STATEMASK_ALL), Throws.Nothing);
Assert.IsNotNull(allDevices, "The IMMDeviceCollection object is null");
uint count = 0;
Assert.That(() => count = allDevices.GetCount(), Throws.Nothing);
IMMDevice device;
for (uint i = 0; i < count; i++)
{
Assert.That(allDevices.Item(i, out device), ResultIs.Successful);
}
}
/// <summary>
/// This test method does nothing. Testing of the EnumAudioEndpoints method is implicit by testing other aspects of the IMMDevice API.
/// </summary>
[Test]
public void IMMDeviceEnumerator_EnumAudioEndpoints()
{
// This method is thouroughly tested through various other unit tests. The entry point for most other tests starts with calling EnumAudioEndpoints.
// TODO: Add specific test for this.
}
/// <summary>
/// Tests that the default audio endpoint for all combinations of data flow and roles can be created with S_OK HRESULT and that each
/// device is not null.
/// </summary>
[Test]
public void IMMDeviceEnumerator_GetDefaultAudioEndpoint()
{
IMMDevice device = null;
using var enumerator = ComReleaserFactory.Create(new IMMDeviceEnumerator());
// data flow - eAll (this should always produce HRESULT of E_INVALIDARG, which is 0x80070057)
Assert.That(() => device = enumerator.Item.GetDefaultAudioEndpoint(EDataFlow.eAll, ERole.eCommunications), Throws.Exception);
Assert.That(() => device = enumerator.Item.GetDefaultAudioEndpoint(EDataFlow.eAll, ERole.eConsole), Throws.Exception);
Assert.That(() => device = enumerator.Item.GetDefaultAudioEndpoint(EDataFlow.eAll, ERole.eMultimedia), Throws.Exception);
// data flow - eCapture
Assert.That(() => device = enumerator.Item.GetDefaultAudioEndpoint(EDataFlow.eCapture, ERole.eCommunications), Throws.Nothing);
Assert.IsNotNull(device);
Assert.That(() => device = enumerator.Item.GetDefaultAudioEndpoint(EDataFlow.eCapture, ERole.eConsole), Throws.Nothing);
Assert.IsNotNull(device);
Assert.That(() => device = enumerator.Item.GetDefaultAudioEndpoint(EDataFlow.eCapture, ERole.eMultimedia), Throws.Nothing);
Assert.IsNotNull(device);
// data flow - eRender
Assert.That(() => device = enumerator.Item.GetDefaultAudioEndpoint(EDataFlow.eRender, ERole.eCommunications), Throws.Nothing);
Assert.IsNotNull(device);
Assert.That(() => device = enumerator.Item.GetDefaultAudioEndpoint(EDataFlow.eRender, ERole.eConsole), Throws.Nothing);
Assert.IsNotNull(device);
Assert.That(() => device = enumerator.Item.GetDefaultAudioEndpoint(EDataFlow.eRender, ERole.eMultimedia), Throws.Nothing);
Assert.IsNotNull(device);
}
/// <summary>Tests that the GetDevice method can get each audio device individually, by ID.</summary>
[Test]
public void IMMDeviceEnumerator_GetDevice()
{
using var enumerator = ComReleaserFactory.Create(new IMMDeviceEnumerator());
foreach (var device in CreateIMMDeviceCollection(enumerator.Item))
{
// Get the device ID.
string deviceId = null;
Assert.That(() => deviceId = device.GetId(), Throws.Nothing);
Assert.IsNotNull(deviceId, "The device string is null.");
// Get the IMMDevice directly from the ID.
IMMDevice deviceFromId = null;
Assert.That(() => deviceFromId = enumerator.Item.GetDevice(deviceId), Throws.Nothing);
Assert.IsNotNull(deviceFromId, "The IMMDevice object is null.");
// Ensure the IDs of each device match.
string deviceId2 = null;
Assert.That(() => deviceId2 = deviceFromId.GetId(), Throws.Nothing);
Assert.IsNotNull(deviceId2, "The device string is null.");
Assert.AreEqual(deviceId, deviceId2, "The device IDs are not equal.");
}
}
/// <summary>Tests that a valid client can be registered and an HRESULT of S_OK is returned.</summary>
[Test]
public void IMMDeviceEnumerator_RegisterEndpointNotificationCallback()
{
var cTok = new CancellationTokenSource();
var task = Task.Run(() =>
{
using var enumerator = ComReleaserFactory.Create(new IMMDeviceEnumerator());
var client = new MMDeviceNotifyClient(TestContext.Out);
Assert.That(() => enumerator.Item.RegisterEndpointNotificationCallback(client), Throws.Nothing);
while (!cTok.Token.IsCancellationRequested)
Thread.Sleep(50);
Assert.That(() => enumerator.Item.UnregisterEndpointNotificationCallback(client), Throws.Nothing);
}, cTok.Token);
try
{
// Make changes
using var enumerator = ComReleaserFactory.Create(new IMMDeviceEnumerator());
var activeEndpoints = CreateIMMDeviceCollection(enumerator.Item, EDataFlow.eAll, DEVICE_STATE.DEVICE_STATE_ACTIVE).ToList();
using var ep = ComReleaserFactory.Create(enumerator.Item.GetDefaultAudioEndpoint(EDataFlow.eRender, ERole.eMultimedia));
using var alt = ComReleaserFactory.Create(activeEndpoints.First(d => d.GetId() != ep.Item.GetId()));
using var pc = ComReleaserFactory.Create(new CoreAudio.IPolicyConfig());
Assert.That(pc.Item.SetDefaultEndpoint(alt.Item.GetId(), ERole.eMultimedia), ResultIs.Successful);
Thread.Sleep(250);
Assert.That(pc.Item.SetDefaultEndpoint(ep.Item.GetId(), ERole.eMultimedia), ResultIs.Successful);
Thread.Sleep(250);
// Registry hack to disable
//Registry.SetValue(@"HKEY_LOCAL_MACHINE\SOFTWARE\Microsoft\Windows\CurrentVersion\MMDevices\Audio\Render\{14b8ed7a-b84f-43b1-8de6-dc678cd96836}", "DeviceState", 0x2, RegistryValueKind.DWord);
//Thread.Sleep(100);
//Registry.SetValue(@"HKEY_LOCAL_MACHINE\SOFTWARE\Microsoft\Windows\CurrentVersion\MMDevices\Audio\Render\{14b8ed7a-b84f-43b1-8de6-dc678cd96836}", "DeviceState", 0x1, RegistryValueKind.DWord);
//Thread.Sleep(100);
//using var vol = ComReleaserFactory.Create(ep.Item.Activate<IAudioEndpointVolume>());
//var mute = vol.Item.GetMute();
//vol.Item.SetMute(!mute, Guid.NewGuid());
//Thread.Sleep(100);
//Assert.That(vol.Item.GetMute(), Is.EqualTo(!mute));
//vol.Item.SetMute(mute, Guid.NewGuid());
//Thread.Sleep(100);
//Assert.That(vol.Item.GetMute(), Is.EqualTo(mute));
}
finally
{
cTok.Cancel();
task.Wait(1000);
}
}
[Test]
public void IMMEndpoint_GetDataFlow()
{
using var enumerator = ComReleaserFactory.Create(new IMMDeviceEnumerator());
foreach (var device in CreateIMMDeviceCollection(enumerator.Item))
{
// Cast compiles to QueryInterface call.
var endpoint = (IMMEndpoint)device;
Assert.IsNotNull(endpoint);
EDataFlow dataFlow = EDataFlow.eAll;
Assert.That(() => dataFlow = endpoint.GetDataFlow(), Throws.Nothing);
Assert.AreNotEqual(EDataFlow.eAll, dataFlow);
}
}
private static string GetDeviceName(string devId)
{
if (lookup.TryGetValue(devId, out var val)) return val;
using var pEnum = ComReleaserFactory.Create(new IMMDeviceEnumerator());
using var pDev = ComReleaserFactory.Create(pEnum.Item.GetDevice(devId));
using var pProps = ComReleaserFactory.Create(pDev.Item.OpenPropertyStore(STGM.STGM_READ));
using var pv = new PROPVARIANT();
pProps.Item.GetValue(PKEY_Device_FriendlyName, pv);
lookup.Add(devId, pv.pwszVal);
return pv.pwszVal;
}
private object GetPropertyValue(IPropertyStore propertyStore, PROPERTYKEY propertyKey)
{
try
{
using var pv = new PROPVARIANT();
propertyStore.GetValue(propertyKey, pv);
if (propertyKey == AudioPropertyKeys.PKEY_AudioEngine_DeviceFormat || propertyKey == AudioPropertyKeys.PKEY_AudioEngine_OEMFormat)
{
Assert.That(pv.vt, Is.EqualTo(VARTYPE.VT_BLOB));
var format = pv.blob.pBlobData.ToStructure<WAVEFORMATEX>(pv.blob.cbSize);
if (format.nChannels != 0 && format.nSamplesPerSec != 0 && format.wBitsPerSample != 0)
Assert.AreEqual(format.nChannels * format.nSamplesPerSec * format.wBitsPerSample, format.nAvgBytesPerSec * 8, "The wave format was not valid.");
}
return pv.Value;
}
catch (Exception ex)
{
return "ERROR: " + ex.Message;
}
}
[ComVisible(true), Guid("e5b6a8de-913e-4756-b1c7-7c73a92eeb3f")]
public class MMDeviceNotifyClient : IMMNotificationClient
{
private readonly TextWriter textWriter;
public MMDeviceNotifyClient(TextWriter writer) => textWriter = writer;
HRESULT IMMNotificationClient.OnDefaultDeviceChanged(EDataFlow flow, ERole role, string pwstrDefaultDeviceId)
{
textWriter.WriteLine($"DefDevChg: flow={flow}, role={role}, dev={GetDeviceName(pwstrDefaultDeviceId)}");
return HRESULT.S_OK;
}
HRESULT IMMNotificationClient.OnDeviceAdded(string pwstrDeviceId)
{
textWriter.WriteLine($"DevAdd: dev={GetDeviceName(pwstrDeviceId)}");
return HRESULT.S_OK;
}
HRESULT IMMNotificationClient.OnDeviceRemoved(string pwstrDeviceId)
{
textWriter.WriteLine($"DevRmv: dev={GetDeviceName(pwstrDeviceId)}");
return HRESULT.S_OK;
}
HRESULT IMMNotificationClient.OnDeviceStateChanged(string pwstrDeviceId, DEVICE_STATE dwNewState)
{
textWriter.WriteLine($"DevStateChg: dev={GetDeviceName(pwstrDeviceId)}, state={dwNewState}");
return HRESULT.S_OK;
}
HRESULT IMMNotificationClient.OnPropertyValueChanged(string pwstrDeviceId, PROPERTYKEY key)
{
textWriter.WriteLine($"DefPropChg: dev={GetDeviceName(pwstrDeviceId)}, key={key}");
return HRESULT.S_OK;
Assert.That(d.Activate(typeof(T).GUID, CLSCTX.CLSCTX_INPROC_SERVER, default, out var objInterface), ResultIs.Successful);
Assert.IsNotNull(objInterface as T);
Marshal.ReleaseComObject(objInterface);
}
}
}
/// <summary>This test ensures that each device can get its ID. It also checks that the received ID is not null.</summary>
[Test]
public void IMMDevice_GetId()
{
using var enumerator = ComReleaserFactory.Create(new IMMDeviceEnumerator());
foreach (var d in CreateIMMDeviceCollection(enumerator.Item))
{
string strId = null;
Assert.That(() => strId = d.GetId(), Throws.Nothing);
Assert.IsNotNull(strId);
TestContext.WriteLine($"Id:{d.GetId()}, State:{d.GetState()}");
}
}
/// <summary>
/// This test ensures that each device can get its state. It also checks that the received state is a valid device state constant.
/// </summary>
[Test]
public void IMMDevice_GetState()
{
using var enumerator = ComReleaserFactory.Create(new IMMDeviceEnumerator());
foreach (var d in CreateIMMDeviceCollection(enumerator.Item))
{
DEVICE_STATE deviceState = 0;
Assert.That(() => deviceState = d.GetState(), Throws.Nothing);
Assert.That(Enum.IsDefined(typeof(DEVICE_STATE), deviceState), Is.True);
}
}
/// <summary>
/// This test ensures that each device can open a property store in READWRITE mode and that the received property store is non-null.
/// It also checks that the property store object works correctly by making a call to get the property count.
/// </summary>
[Test]
public void IMMDevice_OpenPropertyStore()
{
var tested = false;
using var enumerator = ComReleaserFactory.Create(new IMMDeviceEnumerator());
foreach (var d in CreateIMMDeviceCollection(enumerator.Item))
{
TestContext.WriteLine($"**** {GetDeviceName(d.GetId())} ****");
// Open the property store
IPropertyStore propertyStore = null;
Assert.That(() => propertyStore = d.OpenPropertyStore(STGM.STGM_READ), Throws.Nothing);
// Verify the count can be received.
var propertyCount = uint.MaxValue;
Assert.That(() => propertyCount = propertyStore.GetCount(), Throws.Nothing);
Assert.AreNotEqual(uint.MaxValue, propertyCount, "The property count was not received.");
// Get each property key, then get value.
for (uint i = 0; i < propertyCount; i++)
{
PROPERTYKEY propertyKey = default;
Assert.That(() => propertyKey = propertyStore.GetAt(i), Throws.Nothing);
var value = GetPropertyValue(propertyStore, propertyKey);
if (value != null)
tested = true;
TestContext.WriteLine($"{propertyKey.GetCanonicalName()}={value ?? "null"}");
}
}
if (!tested) Assert.Inconclusive("No property values returned valid, non-null values.");
}
/// <summary>
/// Tests that the individual render and capture device collections have a combined count equal to the total device count.
/// </summary>
[Test]
public void IMMDeviceCollection_GetCount()
{
using var enumerator = ComReleaserFactory.Create(new IMMDeviceEnumerator());
var allCaptureDevices = enumerator.Item.EnumAudioEndpoints(EDataFlow.eCapture, DEVICE_STATE.DEVICE_STATEMASK_ALL);
var allRenderDevices = enumerator.Item.EnumAudioEndpoints(EDataFlow.eRender, DEVICE_STATE.DEVICE_STATEMASK_ALL);
var allDevices = enumerator.Item.EnumAudioEndpoints(EDataFlow.eAll, DEVICE_STATE.DEVICE_STATEMASK_ALL);
Assert.IsNotNull(allCaptureDevices, "The IMMDeviceCollection object is null.");
Assert.IsNotNull(allRenderDevices, "The IMMDeviceCollection object is null.");
Assert.IsNotNull(allDevices, "The IMMDeviceCollection object is null.");
uint captureCount = uint.MaxValue, renderCount = uint.MaxValue, allCount = uint.MaxValue;
Assert.That(() => captureCount = allCaptureDevices.GetCount(), Throws.Nothing);
Assert.AreNotEqual(uint.MaxValue, captureCount, "Device count was not received.");
Assert.That(() => renderCount = allRenderDevices.GetCount(), Throws.Nothing);
Assert.AreNotEqual(uint.MaxValue, renderCount, "Device count was not received.");
Assert.That(() => allCount = allDevices.GetCount(), Throws.Nothing);
Assert.AreNotEqual(uint.MaxValue, allDevices, "Device count was not received.");
Assert.AreEqual(allCount, captureCount + renderCount, "The combined number of capture and render devices is not equal to the total device count.");
}
/// <summary>Tests the all devices from index zero to [count - 1] can be received with S_OK HRESULT and each device is not null.</summary>
[Test]
public void IMMDeviceCollection_Item()
{
using var enumerator = ComReleaserFactory.Create(new IMMDeviceEnumerator());
IMMDeviceCollection allDevices = null;
Assert.That(() => allDevices = enumerator.Item.EnumAudioEndpoints(EDataFlow.eAll, DEVICE_STATE.DEVICE_STATEMASK_ALL), Throws.Nothing);
Assert.IsNotNull(allDevices, "The IMMDeviceCollection object is null");
uint count = 0;
Assert.That(() => count = allDevices.GetCount(), Throws.Nothing);
IMMDevice device;
for (uint i = 0; i < count; i++)
{
Assert.That(allDevices.Item(i, out device), ResultIs.Successful);
}
}
/// <summary>
/// This test method does nothing. Testing of the EnumAudioEndpoints method is implicit by testing other aspects of the IMMDevice API.
/// </summary>
[Test]
public void IMMDeviceEnumerator_EnumAudioEndpoints()
{
// This method is thouroughly tested through various other unit tests. The entry point for most other tests starts with calling EnumAudioEndpoints.
// TODO: Add specific test for this.
}
/// <summary>
/// Tests that the default audio endpoint for all combinations of data flow and roles can be created with S_OK HRESULT and that each
/// device is not null.
/// </summary>
[Test]
public void IMMDeviceEnumerator_GetDefaultAudioEndpoint()
{
IMMDevice device = null;
using var enumerator = ComReleaserFactory.Create(new IMMDeviceEnumerator());
// data flow - eAll (this should always produce HRESULT of E_INVALIDARG, which is 0x80070057)
Assert.That(() => device = enumerator.Item.GetDefaultAudioEndpoint(EDataFlow.eAll, ERole.eCommunications), Throws.Exception);
Assert.That(() => device = enumerator.Item.GetDefaultAudioEndpoint(EDataFlow.eAll, ERole.eConsole), Throws.Exception);
Assert.That(() => device = enumerator.Item.GetDefaultAudioEndpoint(EDataFlow.eAll, ERole.eMultimedia), Throws.Exception);
// data flow - eCapture
Assert.That(() => device = enumerator.Item.GetDefaultAudioEndpoint(EDataFlow.eCapture, ERole.eCommunications), Throws.Nothing);
Assert.IsNotNull(device);
Assert.That(() => device = enumerator.Item.GetDefaultAudioEndpoint(EDataFlow.eCapture, ERole.eConsole), Throws.Nothing);
Assert.IsNotNull(device);
Assert.That(() => device = enumerator.Item.GetDefaultAudioEndpoint(EDataFlow.eCapture, ERole.eMultimedia), Throws.Nothing);
Assert.IsNotNull(device);
// data flow - eRender
Assert.That(() => device = enumerator.Item.GetDefaultAudioEndpoint(EDataFlow.eRender, ERole.eCommunications), Throws.Nothing);
Assert.IsNotNull(device);
Assert.That(() => device = enumerator.Item.GetDefaultAudioEndpoint(EDataFlow.eRender, ERole.eConsole), Throws.Nothing);
Assert.IsNotNull(device);
Assert.That(() => device = enumerator.Item.GetDefaultAudioEndpoint(EDataFlow.eRender, ERole.eMultimedia), Throws.Nothing);
Assert.IsNotNull(device);
}
/// <summary>Tests that the GetDevice method can get each audio device individually, by ID.</summary>
[Test]
public void IMMDeviceEnumerator_GetDevice()
{
using var enumerator = ComReleaserFactory.Create(new IMMDeviceEnumerator());
foreach (var device in CreateIMMDeviceCollection(enumerator.Item))
{
// Get the device ID.
string deviceId = null;
Assert.That(() => deviceId = device.GetId(), Throws.Nothing);
Assert.IsNotNull(deviceId, "The device string is null.");
// Get the IMMDevice directly from the ID.
IMMDevice deviceFromId = null;
Assert.That(() => deviceFromId = enumerator.Item.GetDevice(deviceId), Throws.Nothing);
Assert.IsNotNull(deviceFromId, "The IMMDevice object is null.");
// Ensure the IDs of each device match.
string deviceId2 = null;
Assert.That(() => deviceId2 = deviceFromId.GetId(), Throws.Nothing);
Assert.IsNotNull(deviceId2, "The device string is null.");
Assert.AreEqual(deviceId, deviceId2, "The device IDs are not equal.");
}
}
/// <summary>Tests that a valid client can be registered and an HRESULT of S_OK is returned.</summary>
[Test]
public void IMMDeviceEnumerator_RegisterEndpointNotificationCallback()
{
var cTok = new CancellationTokenSource();
var task = Task.Run(() =>
{
using var enumerator = ComReleaserFactory.Create(new IMMDeviceEnumerator());
var client = new MMDeviceNotifyClient(TestContext.Out);
Assert.That(() => enumerator.Item.RegisterEndpointNotificationCallback(client), Throws.Nothing);
while (!cTok.Token.IsCancellationRequested)
Thread.Sleep(50);
Assert.That(() => enumerator.Item.UnregisterEndpointNotificationCallback(client), Throws.Nothing);
}, cTok.Token);
try
{
// Make changes
using var enumerator = ComReleaserFactory.Create(new IMMDeviceEnumerator());
var activeEndpoints = CreateIMMDeviceCollection(enumerator.Item, EDataFlow.eAll, DEVICE_STATE.DEVICE_STATE_ACTIVE).ToList();
using var ep = ComReleaserFactory.Create(enumerator.Item.GetDefaultAudioEndpoint(EDataFlow.eRender, ERole.eMultimedia));
using var alt = ComReleaserFactory.Create(activeEndpoints.First(d => d.GetId() != ep.Item.GetId()));
using var pc = ComReleaserFactory.Create(new CoreAudio.IPolicyConfig());
Assert.That(pc.Item.SetDefaultEndpoint(alt.Item.GetId(), ERole.eMultimedia), ResultIs.Successful);
Thread.Sleep(250);
Assert.That(pc.Item.SetDefaultEndpoint(ep.Item.GetId(), ERole.eMultimedia), ResultIs.Successful);
Thread.Sleep(250);
// Registry hack to disable
//Registry.SetValue(@"HKEY_LOCAL_MACHINE\SOFTWARE\Microsoft\Windows\CurrentVersion\MMDevices\Audio\Render\{14b8ed7a-b84f-43b1-8de6-dc678cd96836}", "DeviceState", 0x2, RegistryValueKind.DWord);
//Thread.Sleep(100);
//Registry.SetValue(@"HKEY_LOCAL_MACHINE\SOFTWARE\Microsoft\Windows\CurrentVersion\MMDevices\Audio\Render\{14b8ed7a-b84f-43b1-8de6-dc678cd96836}", "DeviceState", 0x1, RegistryValueKind.DWord);
//Thread.Sleep(100);
//using var vol = ComReleaserFactory.Create(ep.Item.Activate<IAudioEndpointVolume>());
//var mute = vol.Item.GetMute();
//vol.Item.SetMute(!mute, Guid.NewGuid());
//Thread.Sleep(100);
//Assert.That(vol.Item.GetMute(), Is.EqualTo(!mute));
//vol.Item.SetMute(mute, Guid.NewGuid());
//Thread.Sleep(100);
//Assert.That(vol.Item.GetMute(), Is.EqualTo(mute));
}
finally
{
cTok.Cancel();
task.Wait(1000);
}
}
[Test]
public void IMMEndpoint_GetDataFlow()
{
using var enumerator = ComReleaserFactory.Create(new IMMDeviceEnumerator());
foreach (var device in CreateIMMDeviceCollection(enumerator.Item))
{
// Cast compiles to QueryInterface call.
var endpoint = (IMMEndpoint)device;
Assert.IsNotNull(endpoint);
EDataFlow dataFlow = EDataFlow.eAll;
Assert.That(() => dataFlow = endpoint.GetDataFlow(), Throws.Nothing);
Assert.AreNotEqual(EDataFlow.eAll, dataFlow);
}
}
private static string GetDeviceName(string devId)
{
if (lookup.TryGetValue(devId, out var val)) return val;
using var pEnum = ComReleaserFactory.Create(new IMMDeviceEnumerator());
using var pDev = ComReleaserFactory.Create(pEnum.Item.GetDevice(devId));
using var pProps = ComReleaserFactory.Create(pDev.Item.OpenPropertyStore(STGM.STGM_READ));
using var pv = new PROPVARIANT();
pProps.Item.GetValue(PKEY_Device_FriendlyName, pv);
lookup.Add(devId, pv.pwszVal);
return pv.pwszVal;
}
private object GetPropertyValue(IPropertyStore propertyStore, PROPERTYKEY propertyKey)
{
try
{
using var pv = new PROPVARIANT();
propertyStore.GetValue(propertyKey, pv);
if (propertyKey == AudioPropertyKeys.PKEY_AudioEngine_DeviceFormat || propertyKey == AudioPropertyKeys.PKEY_AudioEngine_OEMFormat)
{
Assert.That(pv.vt, Is.EqualTo(VARTYPE.VT_BLOB));
var format = pv.blob.pBlobData.ToStructure<WAVEFORMATEX>(pv.blob.cbSize);
if (format.nChannels != 0 && format.nSamplesPerSec != 0 && format.wBitsPerSample != 0)
Assert.AreEqual(format.nChannels * format.nSamplesPerSec * format.wBitsPerSample, format.nAvgBytesPerSec * 8, "The wave format was not valid.");
}
return pv.Value;
}
catch (Exception ex)
{
return "ERROR: " + ex.Message;
}
}
[ComVisible(true), Guid("e5b6a8de-913e-4756-b1c7-7c73a92eeb3f")]
public class MMDeviceNotifyClient : IMMNotificationClient
{
private readonly TextWriter textWriter;
public MMDeviceNotifyClient(TextWriter writer) => textWriter = writer;
HRESULT IMMNotificationClient.OnDefaultDeviceChanged(EDataFlow flow, ERole role, string pwstrDefaultDeviceId)
{
textWriter.WriteLine($"DefDevChg: flow={flow}, role={role}, dev={GetDeviceName(pwstrDefaultDeviceId)}");
return HRESULT.S_OK;
}
HRESULT IMMNotificationClient.OnDeviceAdded(string pwstrDeviceId)
{
textWriter.WriteLine($"DevAdd: dev={GetDeviceName(pwstrDeviceId)}");
return HRESULT.S_OK;
}
HRESULT IMMNotificationClient.OnDeviceRemoved(string pwstrDeviceId)
{
textWriter.WriteLine($"DevRmv: dev={GetDeviceName(pwstrDeviceId)}");
return HRESULT.S_OK;
}
HRESULT IMMNotificationClient.OnDeviceStateChanged(string pwstrDeviceId, DEVICE_STATE dwNewState)
{
textWriter.WriteLine($"DevStateChg: dev={GetDeviceName(pwstrDeviceId)}, state={dwNewState}");
return HRESULT.S_OK;
}
HRESULT IMMNotificationClient.OnPropertyValueChanged(string pwstrDeviceId, PROPERTYKEY key)
{
textWriter.WriteLine($"DefPropChg: dev={GetDeviceName(pwstrDeviceId)}, key={key}");
return HRESULT.S_OK;
}
}
}

View File

@ -0,0 +1,87 @@
#define AVRT
using Microsoft.Win32;
using NUnit.Framework;
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Runtime.InteropServices;
using System.Threading;
using System.Threading.Tasks;
using Vanara.Extensions;
using Vanara.InteropServices;
#if AVRT
using static Vanara.PInvoke.Avrt;
#endif
using static Vanara.PInvoke.CoreAudio;
using static Vanara.PInvoke.Ole32;
using static Vanara.PInvoke.Kernel32;
namespace Vanara.PInvoke.Tests;
public partial class CoreAudioTests
{
static readonly Guid AUDIO_EFFECT_TYPE_ACOUSTIC_ECHO_CANCELLATION = new("6f64adbe-8211-11e2-8c70-2c27d7f001fa");
[Test]
public void EnumDevices()
{
IMMDeviceEnumerator deviceEnumerator = new();
IMMDeviceCollection spDeviceCollection = deviceEnumerator.EnumAudioEndpoints(EDataFlow.eRender, DEVICE_STATE.DEVICE_STATE_ACTIVE);
for (uint i = 0; i < spDeviceCollection.GetCount(); i++)
{
spDeviceCollection.Item(i, out var device);
var properties = device.OpenPropertyStore(STGM.STGM_READ);
TestContext.WriteLine($"{i + 1}: {properties.GetValue(PKEY_Device_FriendlyName)}");
}
}
[Test]
public void EchoCancellationTest()
{
IMMDeviceEnumerator deviceEnumerator = new();
IMMDevice device = deviceEnumerator.GetDefaultAudioEndpoint(EDataFlow.eCapture, ERole.eCommunications);
IAudioClient2 audioClient = device.Activate<IAudioClient2>(CLSCTX.CLSCTX_INPROC_SERVER);
AudioClientProperties clientProperties = new() { cbSize = InteropExtensions.SizeOf<AudioClientProperties>(), eCategory = AUDIO_STREAM_CATEGORY.AudioCategory_Communications };
audioClient.SetClientProperties(clientProperties);
audioClient.GetMixFormat(out var fmtMem).ThrowIfFailed();
audioClient.Initialize(AUDCLNT_SHAREMODE.AUDCLNT_SHAREMODE_SHARED, AUDCLNT_STREAMFLAGS.AUDCLNT_STREAMFLAGS_EVENTCALLBACK, 10000000, 0, fmtMem).ThrowIfFailed();
var audioEffectsManager = audioClient.GetService<IAudioEffectsManager>();
var effects = audioEffectsManager.GetAudioEffects();
var ispresent = effects.Any(e => e.id == AUDIO_EFFECT_TYPE_ACOUSTIC_ECHO_CANCELLATION);
TestContext.WriteLine($"Capture stream is {(ispresent ? "" : "not ")}echo cancelled.");
string deviceId = device.GetId();
TestContext.WriteLine($"Created communications stream on capture endpoint {deviceId}");
var captureClient = audioClient.GetService<IAudioCaptureClient>();
using var terminationEvent = CreateEvent(default, false, false);
using var captureThread = CreateThread(default, default, p => {
#if AVRT
uint mmcssTaskIndex = 0;
using SafeHANDLE mmcssTaskHandle = AvSetMmThreadCharacteristics("Audio", ref mmcssTaskIndex);
#endif
using var bufferComplete = CreateEvent(default, false, false);
audioClient.SetEventHandle(bufferComplete);
while (WaitForMultipleObjects(new[] { bufferComplete, terminationEvent }, false, INFINITE) != WAIT_STATUS.WAIT_OBJECT_0)
{
while (captureClient.GetNextPacketSize(out var packetLen).Succeeded && packetLen > 0)
{
captureClient.GetBuffer(out var buffer, out var numFramesRead, out var flags, out _, out _).ThrowIfFailed();
captureClient.ReleaseBuffer(numFramesRead);
}
}
audioClient.Stop();
return 0;
}, default, default, out _);
if (ispresent)
{
IAcousticEchoCancellationControl aecControl = audioClient.GetService<IAcousticEchoCancellationControl>();
aecControl.SetEchoCancellationRenderEndpoint(null);
}
Sleep(10000);
terminationEvent.Set();
WaitForSingleObject(captureThread, 1000);
}
}

View File

@ -12,7 +12,7 @@ using Vanara.InteropServices;
using static Vanara.PInvoke.CoreAudio;
using static Vanara.PInvoke.Ole32;
using static Vanara.PInvoke.PropSys;
using static Vanara.PInvoke.Winmm;
using static Vanara.PInvoke.WinMm;
namespace Vanara.PInvoke.Tests
{

1207
Vanara.sln

File diff suppressed because it is too large Load Diff