Files
UnrealEngine/Engine/Source/ThirdParty/libPhonon/phonon_api/include/phonon.h
2025-05-18 13:04:45 +08:00

2262 lines
154 KiB
C

/*
* Copyright 2017 Valve Corporation. All rights reserved. Subject to the following license:
* https://valvesoftware.github.io/steam-audio/license.html
*/
#ifndef IPL_PHONON_H
#define IPL_PHONON_H
#include <stddef.h>
#include "phonon_version.h"
#if defined(SWIG)
#define IPLAPI
#elif (defined(_WIN32) || defined(_WIN64))
#define IPLAPI __declspec(dllexport)
#else
#define IPLAPI __attribute__((visibility("default")))
#endif
#ifdef __cplusplus
extern "C" {
#endif
/*****************************************************************************************************************/
/* Data Types */
/*****************************************************************************************************************/
/** \defgroup types Data Types
* Common data types used throughout the Phonon API.
* \{ */
typedef void IPLvoid; /**< Equivalent to \c void. */
typedef char IPLint8; /**< Signed 8-bit integer. */
typedef unsigned char IPLuint8; /**< Unsigned 8-bit integer. */
typedef short IPLint16; /**< Signed 16-bit integer. */
typedef unsigned short IPLuint16; /**< Unsigned 16-bit integer. */
typedef int IPLint32; /**< Signed 32-bit integer. */
typedef unsigned int IPLuint32; /**< Unsigned 32-bit integer. */
typedef long long IPLint64; /**< Signed 64-bit integer. */
typedef unsigned long long IPLuint64; /**< Unsigned 64-bit integer. */
typedef float IPLfloat32; /**< Single-precision floating-point number. */
typedef double IPLfloat64; /**< Double-precision floating-point number. */
typedef unsigned char IPLbyte; /**< A single byte. */
typedef size_t IPLsize; /**< Unsigned integer of machine-dependent size. Equivalent to \c size_t. */
typedef char* IPLstring; /**< NULL-terminated string. ASCII or UTF-8 encoding is supported. */
/** An opaque handle to a Phonon API object. A variable of this type may not be cast to a pointer to any other
* API type. */
typedef void* IPLhandle;
/** Boolean values. */
typedef enum {
IPL_FALSE, /**< The Boolean value \c false. */
IPL_TRUE /**< The Boolean value \c true. */
} IPLbool;
/** Status codes returned by Phonon API functions. */
typedef enum {
IPL_STATUS_SUCCESS, /**< The operation completed successfully. */
IPL_STATUS_FAILURE, /**< An unspecified error occurred. */
IPL_STATUS_OUTOFMEMORY, /**< The system ran out of memory. */
IPL_STATUS_INITIALIZATION /**< An error occurred while initializing an external dependency. */
} IPLerror;
/** \} */
/*****************************************************************************************************************/
/* Context */
/*****************************************************************************************************************/
/** \defgroup context Context
* Defines a Context object, which controls low-level operations of Phonon. Typically, a Context is specified
* once during the execution of the client program, before calling any other API functions. Once any API function
* is called, changing the Context may lead to undefined behavior.
* \{
*/
/** Prototype of a callback that logs a message generated by Phonon. This may be implemented in any suitable way,
* such as appending to a log file, displaying a dialog box, etc. The default behavior is to print to \c stdout.
*
* \param message The message to log.
*/
typedef IPLvoid (*IPLLogFunction)(char* message);
/** Prototype of a callback that allocates memory. This is usually specified to let Phonon use a custom memory
* allocator. The default behavior is to use the OS-dependent aligned version of \c malloc.
*
* \param size The number of bytes to allocate.
* \param alignment The alignment (in bytes) of the start address of the allocated memory.
*
* \return Pointer to the allocated block of memory, or \c NULL if allocation failed.
*/
typedef IPLvoid* (*IPLAllocateFunction)(IPLsize, IPLsize);
/** Prototype of a callback that frees a block of memory. This is usually specified when using a custom memory
* allocator with Phonon. The default behavior is to use the OS-dependent aligned version of \c free.
*
* \param memoryBlock Pointer to the block of memory.
*/
typedef IPLvoid (*IPLFreeFunction)(IPLvoid*);
/** Creates a Context object. A Context object must be created before creating any other API objects.
*
* \param logCallback Callback for logging messages. Can be NULL.
* \param allocateCallback Callback for allocating memory. Can be NULL.
* \param freeCallback Callback for freeing memory. Can be NULL.
* \param context [out] Handle to the created Context object.
*
* \return Status code indicating whether or not the operation succeeded.
*/
IPLAPI IPLerror iplCreateContext(IPLLogFunction logCallback,
IPLAllocateFunction allocateCallback,
IPLFreeFunction freeCallback,
IPLhandle* context);
/** Destroys a Context object. If any other API objects are still referencing the Context object, it will not be
* destroyed; destruction occurs when the Context object's reference count reaches zero.
*
* \param context [in, out] Address of a handle to the Context object to destroy.
*/
IPLAPI IPLvoid iplDestroyContext(IPLhandle* context);
/** Performs last-minute cleanup and finalization. This function must be the last API function to be called before
* your application exits.
*/
IPLAPI IPLvoid iplCleanup();
/** \} */
/*****************************************************************************************************************/
/* Geometry */
/*****************************************************************************************************************/
/** \defgroup geometry Geometry
* Geometric data types and helper functions. These structures and functions are used to specify 3D geometric
* objects to various functions in the Phonon API.
* \{
*/
/** A point or vector in 3D space. Phonon uses a right-handed coordinate system, with the positive x-axis pointing
* right, the positive y-axis pointing up, and the negative z-axis pointing ahead. Position and direction data
* obtained from a game engine or audio engine must be properly transformed before being passed to any Phonon API
* function.
*/
typedef struct {
IPLfloat32 x; /**< The x-coordinate. */
IPLfloat32 y; /**< The y-coordinate. */
IPLfloat32 z; /**< The z-coordinate. */
} IPLVector3;
/** An axis-aligned box. Axis-aligned boxes are used to specify a volume of 3D space.
*/
typedef struct {
IPLVector3 minCoordinates; /**< The minimum coordinates of any vertex. */
IPLVector3 maxCoordinates; /**< The maximum coordinates of any vertex. */
} IPLBox;
/** A sphere. Spheres are used to define a region of influence around a point.
*/
typedef struct {
IPLVector3 center; /**< The center. */
IPLfloat32 radius; /**< The radius. */
} IPLSphere;
/** Calculates the relative direction from the listener to a sound source. The returned direction
* vector is expressed in the listener's coordinate system.
*
* \param sourcePosition World-space coordinates of the source.
* \param listenerPosition World-space coordinates of the listener.
* \param listenerAhead World-space unit-length vector pointing ahead relative to the listener.
* \param listenerUp World-space unit-length vector pointing up relative to the listener.
*
* \return A unit-length vector in the listener's coordinate space, pointing from the listener to the source.
*/
IPLAPI IPLVector3 iplCalculateRelativeDirection(IPLVector3 sourcePosition, IPLVector3 listenerPosition,
IPLVector3 listenerAhead, IPLVector3 listenerUp);
/** \} */
/*****************************************************************************************************************/
/* OpenCL Compute Devices */
/*****************************************************************************************************************/
/** \defgroup compute Compute Device
* Functions for controlling an OpenCL compute device. Phonon requires OpenCL when used with the following
* third-party technologies:
*
* - AMD Radeon Rays
* - AMD TrueAudio Next
*
* If you are not using one of the above technologies, you do not need to call any of the Phonon API functions
* that deal with OpenCL Compute Device objects.
* \{
*/
/** The type of device to use with OpenCL. The appropriate OpenCL drivers must be installed on the user's system.
* Multiple OpenCL drivers may be installed on the same system; in this case the first available driver that
* exposes the specified kind of device will be used.
*/
typedef enum {
IPL_COMPUTEDEVICE_CPU, /**< Use a CPU device only. */
IPL_COMPUTEDEVICE_GPU, /**< Use a GPU device only. */
IPL_COMPUTEDEVICE_ANY /**< Use either a CPU or GPU device, whichever is listed first by the driver. */
} IPLComputeDeviceType;
/** Specifies constraints on the type of OpenCL device to create. This information is intended to be passed to
* \c iplCreateComputeDevice.
*/
typedef struct {
IPLComputeDeviceType type; /**< The type of device to use. */
IPLint32 maxCUsToReserve; /**< The maximum number of GPU compute units (CUs) that the
application will reserve on the device. When set to zero,
resource reservation is disabled and the entire GPU is used.*/
IPLfloat32 fractionCUsForIRUpdate; /**< Fraction of maximum reserved CUs that should be used
for impulse response (IR) update. The IR update includes
any simulation performed by Radeon Rays to calculate IR and/or
pre-transformation of the IR for convolution with input audio.
The remaining reserved CUs are used for convolution.
Below are typical scenarios:
- <b>Using only AMD TrueAudio Next with Steam Audio.</b>
Set \c fractionCUsForIRUpdate to a value greater than 0 and less
than 1 in this case. This ensures that reserved CUs are
available for IR update as well as convolution. For example,
setting \c maxCUsToReserve to 8 and \c fractionCUsForIRUpdate
to .5 will use 4 reserved CUs for convolution and 4 reserved
CUs to pre-transform IR calculated on CPU or GPU.
- <b>Using AMD TrueAudio Next and AMD Radeon Rays with Steam Audio.</b>
Choosing \c fractionCUsForIRUpdate may require some experimentation
to utilize reserved CUs optimally. For example, setting
\c maxCUsToReserve to 8 and \c fractionCUsForIRUpdate to .5 will use
4 reserved CUs for convolution and 4 reserved CUs for IR update.
However, if IR calculation has high latency with these settings,
you may want to increase \c fractionCUsForIRUpdate to devote
additional reserved CUs for IR update.
- <b>Using only AMD Radeon Rays with Steam Audio.</b>
Set \c fractionCUsForIRUpdate to 1 to make sure all the
reserved CUs are used for calculating IRs using Radeon Rays
and pre-transforming the calculated IRs.
If the number of reserved CUs assigned for convolution or IR
update are 0, then the entire GPU minus the reserved CUs are
used for the corresponding calculations. For example,
if \c maxCUsToReserve is set to 8 and \c fractionCUsForIRUpdate
is set to 0 then all the reserved CUs are used for convolution and
the rest of the GPU is used for IR update.*/
} IPLComputeDeviceFilter;
/** Creates a Compute Device object. The same Compute Device must be used by the game engine and audio engine
* parts of the Phonon integration. Depending on the OpenCL driver and device, this function may take some
* time to execute, so do not call it from performance-sensitive code.
*
* \param context The Context object used by the game engine.
* \param deviceFilter Constraints on the type of device to create.
* \param device [out] Handle to the created Compute Device object.
*
* \return Status code indicating whether or not the operation succeeded.
*/
IPLAPI IPLerror iplCreateComputeDevice(IPLhandle context, IPLComputeDeviceFilter deviceFilter, IPLhandle* device);
/** Destroys a Compute Device object. If any other API objects are still referencing the Compute Device object,
* it will not be destroyed; destruction occurs when the object's reference count reaches zero.
*
* \param device [in, out] Address of a handle to the Compute Device object to destroy.
*/
IPLAPI IPLvoid iplDestroyComputeDevice(IPLhandle* device);
/** \} */
/*****************************************************************************************************************/
/* Simulation Settings */
/*****************************************************************************************************************/
/** \defgroup simsettings Simulation Settings
* Types for specifying simulation settings.
* \{
*/
/** The ray tracer to use for scene representation and simulation. Phonon lets you choose from multiple ray
* tracing implementations, each with different trade-offs. You can also choose to use your own ray tracing
* implementation.
*/
typedef enum {
IPL_SCENETYPE_PHONON, /**< Phonon's built-in ray tracer which supports multi-threading. */
IPL_SCENETYPE_EMBREE, /**< The Intel Embree ray tracer. This is a highly-optimized multi-threaded CPU
implementation, and is likely to be faster than the Phonon ray tracer. However,
Embree support requires a 64-bit CPU, and is not available on Android. */
IPL_SCENETYPE_RADEONRAYS, /**< The AMD Radeon Rays ray tracer. This is an OpenCL implementation, and can
use either the CPU or the GPU. If using the GPU, it is likely to be
significantly faster than the Phonon ray tracer. However, on heavy
real-time simulation workloads, it may impact the application's frame rate. */
IPL_SCENETYPE_CUSTOM /**< Allows you to specify callbacks to your own ray tracer. Useful if your
application already uses a high-performance ray tracer. This option uses
the least amount of memory at run-time, since it does not have to build
any ray tracing data structures of its own. */
} IPLSceneType;
/** The type of simulation to perform. All sound sources must use the same type of simulation; it is not
* currently possible to use real-time simulation for some sources and baked data for others.
*/
typedef enum {
IPL_SIMTYPE_REALTIME, /**< Real-time simulation. Sound propagation from all sound sources is
constantly updated in a separate thread, as the player moves and interacts
with the scene. This is a very performance-intensive approach, and requires
the user to have a powerful PC for optimal results. This is also the type
of simulation to choose when generating baked data. */
IPL_SIMTYPE_BAKED /**< Simulation using baked data. If baked data has been generated for the scene
and sound sources, simulation will be carried out by looking up information
from the baked data. This approach has much lower CPU usage than real-time
simulation, but at the cost of increased memory usage. */
} IPLSimulationType;
/** Configures the complexity of the simulation. You can fine-tune these values to arrive at a suitable
* balance between performance, memory usage, and acoustic detail.
*/
typedef struct {
IPLSceneType sceneType; /**< The ray tracer to use for simulation. \see IPLSceneType. */
IPLint32 numOcclusionSamples; /**< The number of rays to trace from the listener to a source
when simulating volumetric occlusion. Increasing this number
increases the smoothness of occlusion transitions, but also
increases CPU usage and memory consumption. Any positive
integer may be specified, but typical values are in the range
of 32 to 512. */
IPLint32 numRays; /**< The number of rays to trace from the listener. Increasing this
number increases the accuracy of the simulation, but also
increases CPU usage. Any positive integer may be specified,
but typical values are in the range of 1024 to 131072. */
IPLint32 numDiffuseSamples; /**< The number of directions to consider when a ray bounces off
a diffuse (or partly diffuse) surface. Increasing this number
increases the accuracy of diffuse reflections, and does not
significantly impact CPU usage. Any positive integer may be
specified, but typical values are in the range of 32 to 4096. */
IPLint32 numBounces; /**< The maximum number of times any ray can bounce within the scene.
Increasing this number allows the simulation to more accurately
model reverberant spaces, at the cost of increased CPU usage.
Any positive integer may be specified, but typical values are
in the range of 1 to 32. */
IPLint32 numThreads; /**< The number of threads to create for the simulation. The performance
improves linearly with the number of threads upto the number of
physical cores available on the CPU. */
IPLfloat32 irDuration; /**< The time delay between a sound being emitted and the last
audible reflection. Echoes and reverberation longer than this
amount will not be modeled by the simulation. Any positive
number may be specified, but typical values are in the range
of 0.5 to 4.0. */
IPLint32 ambisonicsOrder; /**< The amount of directional detail in the simulation results.
Phonon encodes the simulation results using Ambisonics.
Increasing this number increases the amount of directional
detail in the simulated acoustics, but at the cost of
increased CPU usage and memory consumption. Supported values
are between 0 and 3. */
IPLint32 maxConvolutionSources; /**< The maximum number of sound sources that can be simulated
and rendered using a Convolution Effect object at any point
in time. If you attempt to create more than this many
Convolution Effect objects, creation will fail. Increasing
this number allows more sound sources to be rendered with
sound propagation effects, but at the cost of increased
memory consumption. */
IPLint32 bakingBatchSize; /**< The number of probes that should be baked simultaneously.
Only used if \c sceneType is set to
\c IPL_SCENETYPE_RADEONRAYS, ignored otherwise. Set this to
1 unless you are creating a Scene for the purposes of
baking indirect sound using \c iplBakeReverb,
\c iplBakePropagation, or \c iplBakeStaticListener. */
IPLfloat32 irradianceMinDistance; /**< The minimum distance between a source and a scene surface,
used when calculating the energy received at the surface from
the source during indirect sound simulation. Increasing this
number reduces the loudness of reflections when standing
close to a wall; decreasing this number results in a more
physically realistic model. */
} IPLSimulationSettings;
/** \} */
/*****************************************************************************************************************/
/* Scene */
/*****************************************************************************************************************/
/** \defgroup scene Scene
* Functions and types for specifying scene information. Before you can use physics-based sound propagation
* features like occlusion or reverb, you must specify the geometry and materials that make up the 3D scene.
* \{
*/
/** A triangle in 3D space. Triangles are specified by their three vertices, which are in turn specified using
* indices into a vertex array. See iplSetStaticMeshVertices for how to specify the vertex array. Phonon uses
* a counter-clockwise winding order. This means that when looking at the triangle such that the normal is
* pointing towards you, the vertices are specified in counter-clockwise order.
*/
typedef struct {
IPLint32 indices[3]; /**< Indices of the three vertices of this triangle. Each triangle must be specified
using three vertices; triangle strip or fan representations are not supported. */
} IPLTriangle;
/** The acoustic properties of a surface. You can specify the acoustic material properties of each triangle,
* although typically many triangles will share a common material. The acoustic material properties are specified
* for three frequency bands with center frequencies of 400 Hz, 2.5 KHz, and 15 KHz.
*
* Below are the acoustic material properties for a few standard materials.
*
* ```cpp
* {"generic",{0.10f,0.20f,0.30f,0.05f,0.100f,0.050f,0.030f}}
* {"brick",{0.03f,0.04f,0.07f,0.05f,0.015f,0.015f,0.015f}}
* {"concrete",{0.05f,0.07f,0.08f,0.05f,0.015f,0.002f,0.001f}}
* {"ceramic",{0.01f,0.02f,0.02f,0.05f,0.060f,0.044f,0.011f}}
* {"gravel",{0.60f,0.70f,0.80f,0.05f,0.031f,0.012f,0.008f}},
* {"carpet",{0.24f,0.69f,0.73f,0.05f,0.020f,0.005f,0.003f}}
* {"glass",{0.06f,0.03f,0.02f,0.05f,0.060f,0.044f,0.011f}}
* {"plaster",{0.12f,0.06f,0.04f,0.05f,0.056f,0.056f,0.004f}}
* {"wood",{0.11f,0.07f,0.06f,0.05f,0.070f,0.014f,0.005f}}
* {"metal",{0.20f,0.07f,0.06f,0.05f,0.200f,0.025f,0.010f}}
* {"rock",{0.13f,0.20f,0.24f,0.05f,0.015f,0.002f,0.001f}}
* ```
*/
typedef struct {
IPLfloat32 lowFreqAbsorption; /**< Fraction of sound energy absorbed at low frequencies. Between 0.0 and
1.0. */
IPLfloat32 midFreqAbsorption; /**< Fraction of sound energy absorbed at middle frequencies. Between 0.0
and 1.0. */
IPLfloat32 highFreqAbsorption; /**< Fraction of sound energy absorbed at high frequencies. Between 0.0 and
1.0. */
IPLfloat32 scattering; /**< Fraction of sound energy that is scattered in a random direction when
it reaches the surface. Between 0.0 and 1.0. A value of 0.0 describes
a smooth surface with mirror-like reflection properties; a value of 1.0
describes rough surface with diffuse reflection properties. */
IPLfloat32 lowFreqTransmission; /**< Fraction of sound energy transmitted through at low frequencies.
Between 0.0 and 1.0.
<b>Used only for direct sound occlusion calculations</b>.*/
IPLfloat32 midFreqTransmission; /**< Fraction of sound energy transmitted through at middle frequencies.
Between 0.0 and 1.0.
<b>Used only for direct sound occlusion calculations</b>.*/
IPLfloat32 highFreqTransmission; /**< Fraction of sound energy transmitted through at high frequencies.
Between 0.0 and 1.0.
<b>Used only for direct sound occlusion calculations</b>.*/
} IPLMaterial;
/** A callback that is called to update the application on the progress of the iplLoadScene function. You
* can use this to provide the user with visual feedback, like a progress bar.
*
* \param progress Fraction of the loading process that has been completed, between 0.0 and 1.0.
*/
typedef void(*IPLLoadSceneProgressCallback)(IPLfloat32 progress);
/** A callback that is called to update the application on the progress of the iplFinalizeScene function. You can
* use this to provide the user with visual feedback, like a progress bar.
*
* \param progress Fraction of the finalization process that has been completed, between 0.0 and 1.0.
*/
typedef void(*IPLFinalizeSceneProgressCallback)(IPLfloat32 progress);
/** A callback that is called to calculate the closest hit along a ray. Strictly speaking, the intersection is
* calculated with a ray _interval_ (equivalent to a line segment). Any ray interval may have multiple points
* of intersection with scene geometry; this function must return information about the point of intersection that
* is closest to the ray's origin.
*
* \param origin Array containing the x, y, z coordinates (in that order) of the ray's origin.
* \param direction Array containing the x, y, z coordinates (in that order) of a unit-length vector
* along the ray's direction.
* \param minDistance The minimum distance from the origin at which an intersection may occur for it
* to be considered. This function must not return any intersections closer to the
* origin than this value.
* \param maxDistance The maximum distance from the origin at which an intersection may occur for it
* to be considered. This function must not return any intersections farther from
* the origin than this value.
* \param hitDistance [out] Distance between the origin and the closest intersection point on the ray.
* \param hitNormal [out] Array containing the x, y, z coordinates (in that order) of the unit-length
* surface normal of the geometry at the closest intersection point.
* \param hitMaterial [out] Address of a pointer to the material properties of the surface at the closest
* intersection point. The array contains the low-, mid-, and high-frequency
* absorption coefficients, the scattering coefficient, and the low-, mid-, and
* high-frequency transmission coefficients, in that order.
* \param userData Pointer a block of memory containing arbitrary data, specified during the call to
* \c ::iplCreateScene.
*/
typedef void (*IPLClosestHitCallback)(const IPLfloat32* origin, const IPLfloat32* direction,
const IPLfloat32 minDistance, const IPLfloat32 maxDistance, IPLfloat32* hitDistance, IPLfloat32* hitNormal,
IPLMaterial** hitMaterial, IPLvoid* userData);
/** A callback that is called to calculate whether a ray hits any geometry. Strictly speaking, the function
* looks for any intersection with a ray _interval_ (equivalent to a line segment).
*
* \param origin Array containing the x, y, z coordinates (in that order) of the ray's origin.
* \param direction Array containing the x, y, z coordinates (in that order) of a unit-length vector
* along the ray's direction.
* \param minDistance The minimum distance from the origin at which an intersection may occur for it
* to be considered.
* \param maxDistance The maximum distance from the origin at which an intersection may occur for it
* to be considered.
* \param hitExists [out] An integer indicating whether the ray intersects any geometry. A value of 0
* indicates no intersection, 1 indicates that an intersection exists.
* \param userData Pointer a block of memory containing arbitrary data, specified during the call to
* \c ::iplCreateScene.
*/
typedef void (*IPLAnyHitCallback)(const IPLfloat32* origin, const IPLfloat32* direction,
const IPLfloat32 minDistance, const IPLfloat32 maxDistance, IPLint32* hitExists, IPLvoid* userData);
/** A callback that is called to calculate the closest hits along a batch of rays. Strictly speaking, intersections
* are calculated with ray _intervals_ (equivalent to line segments). Any ray interval may have multiple points
* of intersection with scene geometry; this function must return, for each ray interval, information about the
* point of intersection that is closest to the ray's origin.
*
* \param numRays Number of rays in the batch.
* \param origins Array containing the origins of each ray. Successive ray origins are located
* \c rayStride bytes apart in this array.
* \param directions Array containing the unit-length direction vectors of each ray. Successive ray
* directions are located \c rayStride bytes apart in this array.
* \param rayStride Number of bytes between successive origins in the \c origins array, and between
* successive directions in the \c directions array.
* \param minDistances Array containing, for each ray, the minimum distance from the origin at which an
* intersection may occur for it to be considered.
* \param maxDistances Array containing, for each ray, the maximum distance from the origin at which an
* intersection may occur for it to be considered.
* \param hitDistances [out] Array containing, for each ray, the distance between the ray's origin and
* the closest intersection point on the ray. Successive distance values are located
* \c hitStride bytes apart in this array.
* \param hitNormals [out] Array containing, for each ray, the unit-length surface normal at the ray's
* closest intersection point. Successive normals are located \c hitStride bytes
* apart in this array.
* \param hitMaterial [out] Array containing, for each ray, a pointer to the material properties of the
* surface at the closest intersection point. Successive material pointers are
* located \c hitStride bytes apart in this array.
* \param hitStride Number of bytes between successive distance values in the \c hitDistances array,
* between successive normals in the \c hitNormals array, and successive material
* pointers in the \c hitMaterials array.
* \param userData Pointer a block of memory containing arbitrary data, specified during the call to
* \c ::iplCreateScene.
*/
typedef void (*IPLBatchedClosestHitCallback)(IPLint32 numRays, IPLVector3* origins, IPLVector3* directions,
IPLint32 rayStride, IPLfloat32* minDistances, IPLfloat32* maxDistances, IPLfloat32* hitDistances,
IPLVector3* hitNormals, IPLMaterial** hitMaterials, IPLint32 hitStride, IPLvoid* userData);
/** A callback that is called to calculate, for each ray in a batch of rays, whether the ray hits any geometry.
* Strictly speaking, the function looks for intersections with ray _intervals_ (equivalent to line segments).
*
* \param numRays Number of rays in the batch.
* \param origins Array containing the origins of each ray. Successive ray origins are located
* \c rayStride bytes apart in this array.
* \param directions Array containing the unit-length direction vectors of each ray. Successive ray
* directions are located \c rayStride bytes apart in this array.
* \param rayStride Number of bytes between successive origins in the \c origins array, and between
* successive directions in the \c directions array.
* \param minDistances Array containing, for each ray, the minimum distance from the origin at which an
* intersection may occur for it to be considered.
* \param maxDistances Array containing, for each ray, the maximum distance from the origin at which an
* intersection may occur for it to be considered.
* \param hitExists [out] An array of integers indicating, for each ray, whether the ray intersects
* any geometry. A value of 0 indicates no intersection, 1 indicates that an
* intersection exists.
* \param userData Pointer a block of memory containing arbitrary data, specified during the call to
* \c ::iplCreateScene.
*/
typedef void (*IPLBatchedAnyHitCallback)(IPLint32 numRays, IPLVector3* origins, IPLVector3* directions,
IPLint32 rayStride, IPLfloat32* minDistances, IPLfloat32* maxDistances, IPLuint8* hitExists,
IPLvoid* userData);
/** Creates a Scene object. A Scene object does not store any geometry information on its own; for that you
* need to create one or more Static Mesh objects and add them to the Scene object. The Scene object
* does contain an array of materials; all triangles in all Static Mesh objects refer to this array in order
* to specify their material properties.
*
* \param context The Context object used by the game engine.
* \param computeDevice Handle to a Compute Device object. Only required if using Radeon Rays for
* ray tracing, may be \c NULL otherwise.
* \param simulationSettings The settings to use for simulation.
* \param numMaterials The number of materials that are used to describe the various surfaces in
* the scene. Materials may not be added or removed once the Scene object is
* created.
* \param materials Array containing all the materials in the Scene object. The number of
* \c IPLMaterial objects in the array must be equal to the value of \c numMaterials
* passed to \c ::iplCreateScene.
* \param closestHitCallback Pointer to a function that returns the closest hit along a ray.
* \param anyHitCallback Pointer to a function that returns whether a ray hits anything.
* \param batchedClosestHitCallback Pointer to a function that returns the closests hits along each ray in a
* batch of rays. Can be \c NULL. If not \c NULL, then this function is used
* instead of \c closestHitCallback.
* \param batchedAnyHitCallback Pointer to a function that returns, for each ray in a batch of rays,
* whether the ray hits anything. Can be \c NULL. If not \c NULL, then this
* function is used instead of \c anyHitCallback.
* \param userData Pointer to a block of memory containing arbitrary data for use
* by the closest hit and any hit callbacks.
* \param scene [out] Handle to the created Scene object.
*
* \return Status code indicating whether or not the operation succeeded.
*/
IPLAPI IPLerror iplCreateScene(IPLhandle context, IPLhandle computeDevice,
IPLSimulationSettings simulationSettings, IPLint32 numMaterials,
IPLMaterial* materials, IPLClosestHitCallback closestHitCallback,
IPLAnyHitCallback anyHitCallback,
IPLBatchedClosestHitCallback batchedClosestHitCallback,
IPLBatchedAnyHitCallback batchedAnyHitCallback, IPLvoid* userData,
IPLhandle* scene);
/** Destroys a Scene object. If any other API objects are still referencing the Scene object, it will not be
* destroyed; destruction occurs when the object's reference count reaches zero.
*
* \param scene [in, out] Address of a handle to the Scene object to destroy.
*/
IPLAPI IPLvoid iplDestroyScene(IPLhandle* scene);
/** Creates a Static Mesh object. A Static Mesh object represents a triangle mesh that does not change after it
* is created. A Static Mesh object also contains a mapping between each of its triangles and their acoustic
* material properties. Static Mesh objects should be used for scene geometry that is guaranteed to never change,
* such as rooms, buildings, or triangulated terrain. A Scene object may contain multiple Static Mesh objects,
* although typically one is sufficient.
*
* \param scene Handle to the Scene object to which to add the Static Mesh object.
* \param numVertices Number of vertices in the triangle mesh.
* \param numTriangles Number of triangles in the triangle mesh.
* \param vertices Array containing the coordinates of all vertices in the Static Mesh object.
* The number of \c IPLVector3 objects in the array must be equal to the value of
* \c numVertices passed to \c ::iplCreateStaticMesh.
* \param triangles Array containing all triangles in the Static Mesh object. The number of
* \c IPLTriangle objects in the array must be equal to the value of
* \c numTriangles passed to \c ::iplCreateStaticMesh.
* \param materialIndices Array containing material indices for all triangles in the Static Mesh object.
* The number of material indices in the array must be equal to the value of
* \c numTriangles passed to \c ::iplCreateStaticMesh.
* \param staticMesh [out] Handle to the created Static Mesh object.
*
* \return Status code indicating whether or not the operation succeeded.
*/
IPLAPI IPLerror iplCreateStaticMesh(IPLhandle scene, IPLint32 numVertices, IPLint32 numTriangles,
IPLVector3* vertices, IPLTriangle* triangles, IPLint32* materialIndices,
IPLhandle* staticMesh);
/** Destroys a Static Mesh object. If any other API objects are still referencing the Static Mesh object, it will
* not be destroyed; destruction occurs when the object's reference count reaches zero. Since the Scene object
* maintains an internal reference to the Static Mesh object, you may call this function at any point after
* fully specifying the Static Mesh object using \c ::iplCreateStaticMesh.
*
* \param staticMesh [in, out] Address of a handle to the Static Mesh object to destroy.
*/
IPLAPI IPLvoid iplDestroyStaticMesh(IPLhandle* staticMesh);
/** Serializes a Scene object to a byte array. This function can only be called on a Scene object that
* has been created using the Phonon built-in ray tracer.
*
* \param scene Handle to the Scene object.
* \param data [out] Byte array into which the Scene object will be serialized. It is the
* caller's responsibility to manage memory for this array. The array must be large
* enough to hold all the data in the Scene object. May be \c NULL, in which case
* no data is returned; this is useful when finding out the size of the data stored
* in the Scene object.
*/
IPLAPI IPLint32 iplSaveScene(IPLhandle scene, IPLbyte* data);
/** Creates a Scene object based on data stored in a byte array.
*
* \param context The Context object used by the game engine.
* \param simulationSettings The settings to use for the simulation. This must exactly match the settings
* that were used to create the original Scene object that was passed to
* \c ::iplSaveScene, except for the \c sceneType and \c simulationType
* data members. This allows you to use the same file to create a Scene object
* that uses any ray tracer you prefer.
* \param data Byte array containing the serialized representation of the Scene object. Must
* not be \c NULL.
* \param size Size (in bytes) of the serialized data.
* \param computeDevice Handle to a Compute Device object. Only required if using Radeon Rays for
* ray tracing, may be \c NULL otherwise.
* \param progressCallback Pointer to a function that reports the percentage of this function's work
* that has been completed. May be \c NULL.
* \param scene [out] Handle to the created Scene object.
*
* \return Status code indicating whether or not the operation succeeded.
*/
IPLAPI IPLerror iplLoadScene(IPLhandle context, IPLSimulationSettings simulationSettings,
IPLbyte* data, IPLint32 size, IPLhandle computeDevice, IPLLoadSceneProgressCallback progressCallback, IPLhandle* scene);
/** Saves a Scene object to an OBJ file. An OBJ file is a widely-supported 3D model file format, that can be
* displayed using a variety of software on most PC platforms. The OBJ file generated by this function can be
* useful for detecting problems that occur when exporting scene data from the game engine to Phonon.
* This function can only be called on a Scene object that has been created using the Phonon built-in ray tracer.
*
* \param scene Handle to the Scene object.
* \param fileBaseName Absolute or relative path to the OBJ file to generate.
*/
IPLAPI IPLvoid iplSaveSceneAsObj(IPLhandle scene, IPLstring fileBaseName);
/** A 4x4 matrix used to represent an affine transform. The matrix elements are stored in row-major order.
*/
typedef struct {
float elements[4][4]; /**< The elements of the matrix, in row-major order. */
} IPLMatrix4x4;
/** Creates an Instanced Mesh object. An Instanced Mesh takes one scene and positions it within another scene.
* This is useful if you have the same object, like a pillar, that you want to instantiate multiple times within
* the same scene. A scene can be instantiated multiple times within another scene, without incurring any significant
* memory overhead. The Instanced Mesh can be moved, rotated, and scaled freely at any time, providing an easy way to
* implement dynamic objects whose motion can be described purely in terms of rigid-body transformations.
*
* \param scene The scene in which to instantiate another scene.
* \param instancedScene The scene to instantiate.
* \param transform A transform matrix that maps from the coordinate space of \c instancedScene to the
* coordinate space of \c scene. This is used to position and orient \c instancedScene
* within \c scene. This parameter specifies the initial value of the transform; it can be
* freely changed once the Instanced Mesh is created, using
* \c iplUpdateInstancedMeshTransform.
* \param instancedMesh [out] Handle to the created Instanced Mesh object.
*
* \return Status code indicating whether or not the operation succeeded.
*/
IPLAPI IPLerror iplCreateInstancedMesh(IPLhandle scene,
IPLhandle instancedScene,
IPLMatrix4x4 transform,
IPLhandle* instancedMesh);
/** Destroys an Instanced Mesh object. If any other API objects are still referencing the Instanced Mesh object,
* it will not be destroyed; destruction occurs when the object's reference count reaches zero.
*
* \param instancedMesh [in, out] Address of a handle to the Instanced Mesh object to destroy.
*/
IPLAPI void iplDestroyInstancedMesh(IPLhandle* instancedMesh);
/** Adds an Instanced Mesh object to a Scene object. This function should be called after \c iplCreateInstancedMesh, or
* at any point after calling \c iplRemoveInstancedMesh, for the Instanced Mesh to start affecting sound
* propagation.
*
* \param scene The Scene to which to add the Instanced Mesh. This must be the Scene which was passed
* as the \c scene parameter when calling \c iplCreateInstancedMesh to create the
* Instanced Mesh.
* \param instancedMesh The Instanced Mesh to add to the Scene.
*/
IPLAPI void iplAddInstancedMesh(IPLhandle scene,
IPLhandle instancedMesh);
/** Removes an Instanced Mesh object from a Scene object. After this function is called, the Instanced Mesh will stop
* affecting sound propagation, until a subsequent call to \c iplAddInstancedMesh.
*
* \param scene The Scene from which to remove the Instanced Mesh.
* \param instancedMesh The Instanced Mesh to remove from the Scene.
*/
IPLAPI void iplRemoveInstancedMesh(IPLhandle scene,
IPLhandle instancedMesh);
/** Updates the local-to-world transform of an Instanced Mesh within a Scene. This function allows the Instanced
* Mesh to be moved, rotated, and scaled dynamically. After calling this function, you must call
* \c iplCommitScene for the changes to take effect.
*
* \param instancedMesh The Instanced Mesh whose transform is to be updated.
* \param transform The new 4x4 transform matrix.
*/
IPLAPI void iplUpdateInstancedMeshTransform(IPLhandle instancedMesh,
IPLMatrix4x4 transform);
/** Commits a series of changes to Instanced Meshes in a Scene. This function should be called after any calls to
* \c iplUpdateInstancedMeshTransform for the changes to take effect. For best performance, call this function after
* all transforms have been updated for a given frame.
*
* \param scene The Scene to commit changes to.
*/
IPLAPI void iplCommitScene(IPLhandle scene);
/** \} */
/*****************************************************************************************************************/
/* Environment */
/*****************************************************************************************************************/
/** \defgroup environment Environment
* Functions for controlling an Environment object, which is used to export data to the audio engine. A typical
* usage scenario for Phonon involves a game engine that must specify geometry and material information to
* DSP effects that are applied by the audio engine. The Environment object is the mechanism for doing so. It is
* the only object that must be passed from the game engine into the audio engine, and it encapsulates all
* information that DSP effects may need from the game engine. If you are not using physics-based sound
* propagation features, you still need to create an Environment object. After you create an Environment object,
* how you pass it from the game engine to the audio engine depends on how your game engine and audio engine
* are designed.
* \{
*/
/** Creates an Environment object. It is necessary to call this function even if you are not using the sound
* propagation features of Phonon.
*
* \param context The Context object used by the game engine.
* \param computeDevice Handle to a Compute Device object. Only required if using Radeon Rays for
* ray tracing, or if using TrueAudio Next for convolution, may be \c NULL otherwise.
* \param simulationSettings The settings to use for simulation. This must be the same settings passed to
* \c ::iplCreateScene or \c ::iplLoadScene, whichever was used to create
* the Scene object passed in the \c scene parameter to this function.
* \param scene The Scene object. May be \c NULL, in which case only direct sound will be
* simulated, without occlusion or any other indirect sound propagation.
* \param probeManager The Probe Manager object. May be \c NULL if not using baked data.
* \param environment [out] Handle to the created Environment object.
*
* \return Status code indicating whether or not the operation succeeded.
*/
IPLAPI IPLerror iplCreateEnvironment(IPLhandle context, IPLhandle computeDevice,
IPLSimulationSettings simulationSettings, IPLhandle scene, IPLhandle probeManager, IPLhandle* environment);
/** Destroys an Environment object. If any other API objects are still referencing the Environment object, it will
* not be destroyed; destruction occurs when the object's reference count reaches zero.
*
* \param environment [in, out] Address of a handle to the Environment object to destroy.
*/
IPLAPI IPLvoid iplDestroyEnvironment(IPLhandle* environment);
/** Sets the number of bounces to use for real-time simulations that use an Environment object. Calling this
* function overrides the value of \c bounces set on the \c IPLSimulationSettings structure passed when
* calling \c ::iplCreateEnvironment to create this Environment object.
*
* \param environment Handle to an Environment object.
* \param numBounces The number of bounces to use for all subsequent simulations in the Environment.
*/
IPLAPI IPLvoid iplSetNumBounces(IPLhandle environment, IPLint32 numBounces);
/** \} */
/*****************************************************************************************************************/
/* Rendering Settings */
/*****************************************************************************************************************/
/** \defgroup rendersettings Rendering Settings
* Data structures for specifying the parameters of the application's audio rendering pipeline.
* \{
*/
/** The backend to use for applying convolution effects for sound propagation. Phonon lets you choose from
* multiple convolution implementations, with different trade-offs.
*/
typedef enum {
IPL_CONVOLUTIONTYPE_PHONON, /**< Phonon's built-in convolution algorithm. This is a highly optimized,
but single-threaded CPU-based implementation. With this implementation,
there is a significant performance advantage to using
\c ::iplGetMixedEnvironmentalAudio compared to using
\c ::iplGetWetAudioForConvolutionEffect. */
IPL_CONVOLUTIONTYPE_TRUEAUDIONEXT /**< The AMD TrueAudio Next convolution algorithm. This is GPU-based
implementation, that requires an AMD GPU that supports
AMD TrueAudio Next. With this implementation, there is no major
performance advantage to using \c ::iplGetMixedEnvironmentalAudio
as compared to using \c ::iplGetWetAudioForConvolutionEffect. */
} IPLConvolutionType;
/** Describes various properties of the audio processing pipeline. Many Phonon API objects that are used by the
* audio engine need to know how the audio processing pipeline (i.e., your audio engine) applies DSP effects to
* audio data. This structure describes the key parameters.
*/
typedef struct {
IPLint32 samplingRate; /**< The sampling rate (in Hz) of any audio to be processed by Phonon.
**All audio that is passed to Phonon must use the same sampling
rate.** Phonon will output audio at the same sampling rate as its
input; no sampling rate conversion will be performed. Supported
sampling rates are 24000 Hz, 44100 Hz, and 48000 Hz. */
IPLint32 frameSize; /**< The number of samples in a single frame of audio. The value of
this parameter should be obtained from your audio engine. */
IPLConvolutionType convolutionType; /**< The convolution algorithm to use for any Convolution Effect
objects created for this audio processing pipeline. */
} IPLRenderingSettings;
/** \} */
/*****************************************************************************************************************/
/* Audio Buffers */
/*****************************************************************************************************************/
/** \defgroup audiobuffer Audio Buffers
* Defining and manipulating audio buffers provided by the audio engine. Phonon can process audio data in a wide
* variety of formats. To facilitate this, the Phonon API contains a range of data types to describe the format
* and contents of audio buffers.
* \{
*/
/** Whether the audio buffer is encoded using Ambisonics or not.
*/
typedef enum {
IPL_CHANNELLAYOUTTYPE_SPEAKERS, /**< Indicates that each channel of audio data is intended to be played
back by a single speaker. This corresponds to most multi-speaker mono,
stereo, or surround sound configurations. */
IPL_CHANNELLAYOUTTYPE_AMBISONICS /**< Indicates that each channel of audio data is to be interpreted as a
series of Ambisonics coefficients. Playing back such an audio buffer
requires a software or hardware Ambisonics decoder. Phonon contains a
software Ambisonics decoder. */
} IPLChannelLayoutType;
/** The type of speaker configuration, for audio formats that are not encoded using Ambisonics.
*/
typedef enum {
IPL_CHANNELLAYOUT_MONO, /**< A single speaker, typically in front of the user. */
IPL_CHANNELLAYOUT_STEREO, /**< A pair of speakers, one to the left of the user, and one to the right.
This is also the setting to use when playing audio over headphones. */
IPL_CHANNELLAYOUT_QUADRAPHONIC, /**< Four speakers: front left, front right, back left, and back right. */
IPL_CHANNELLAYOUT_FIVEPOINTONE, /**< Six speakers: front left, front center, front right, back left, back
right, and subwoofer. */
IPL_CHANNELLAYOUT_SEVENPOINTONE, /**< Eight speakers: front left, front center, front right, side left, side
right, back left, back right, and subwoofer. */
IPL_CHANNELLAYOUT_CUSTOM /**< Lets you specify your own speaker configuration. You can specify any
number of speakers, and set their positions relative to the user. This
is useful if you have a large speaker array, or if you want Phonon to
account for the heights at which the speakers have been installed. */
} IPLChannelLayout;
/** The order in which Ambisonics channels are stored in an audio buffer. Each Ambisonics channel is a series of
* coefficients for a corresponding basis function, denoted by \f$ Y_l^m(\theta,\phi) \f$, where \f$\theta\f$ and
* \f$\phi\f$ are two angles which pinpoint the source relative to the listener, and \f$l\f$ and \f$m\f$ are two
* two integers which, taken together, identify a single Ambisonics channel. Here, \f$ l \geq 0 \f$ and
* \f$ -l \leq m \leq l \f$.
*
* There are many different conventions used by the audio engineering community to encode Ambisonics coefficients.
* Phonon supports many of them.
*
* This enumeration defines the sequence in which Ambisonics channels are stored. Since two integers are needed to
* identify an Ambisonics channel, there is more than one way to use a single integer to identify an Ambisonics
* channel.
*/
typedef enum {
IPL_AMBISONICSORDERING_FURSEMALHAM, /**< Specifies the Furse-Malham (FuMa) channel ordering. This is an
extension of traditional B-format encoding to higher-order
Ambisonics. */
IPL_AMBISONICSORDERING_ACN /**< Specifies the Ambisonics Channel Number scheme for channel ordering.
This is the new standard adopted by the AmbiX Ambisonics format. The
position of each Ambisonics channel is uniquely calculated as
\f$ ACN = l^2 + l + m \f$. */
} IPLAmbisonicsOrdering;
/** Normalization conventions for Ambisonics channels. There are a few different ways of normalizing the values of
* the Ambisonics channels relative to each other. Phonon supports the most popular ones.
*/
typedef enum {
IPL_AMBISONICSNORMALIZATION_FURSEMALHAM, /**< This is the normalization scheme used in Furse-Malham
higher-order Ambisonics. Each channel is normalized to not
exceed 1.0, and a -3 dB gain correction is applied to
channel 0. */
IPL_AMBISONICSNORMALIZATION_SN3D, /**< Also called Schmidt semi-normalized form. This is the
normalization scheme used in the AmbiX format. */
IPL_AMBISONICSNORMALIZATION_N3D /**< This normalization scheme is based on the mathematical
definition of Ambisonics. It is closely related to
\c ::IPL_AMBISONICSNORMALIZATION_SN3D by a series of scaling
factors. This normalization scheme is used internally
throughout Phonon, and using it results in the fastest
performance. */
} IPLAmbisonicsNormalization;
/** Whether the data is interleaved or deinterleaved.
*/
typedef enum {
IPL_CHANNELORDER_INTERLEAVED, /**< Sample values for each channel are stored one after another, followed by
the next set of sample values for each channel, etc. In the case of
2-channel stereo, this would correspond to **LRLRLRLR...** */
IPL_CHANNELORDER_DEINTERLEAVED /**< All sample values for the first channel are stored one after another,
followed by the sample values for the next channel, etc. In the case of
2-channel stereo, this would correspond to **LLLL...RRRR...** */
} IPLChannelOrder;
/** The format of an audio buffer. Whenever you pass audio data to or from Phonon, you must describe the format in
* which the audio is encoded. **Phonon only supports uncompressed PCM wave data, stored in 32-bit floating point
* format**. However, Phonon supports many different multi-channel and Ambisonics formats, and the
* \c IPLAudioFormat tells Phonon how to interpret a buffer of audio data.
*/
typedef struct {
IPLChannelLayoutType channelLayoutType; /**< Indicates whether or not the audio should be
interpreted as Ambisonics data. */
IPLChannelLayout channelLayout; /**< Specifies the speaker configuration used for
multi-channel, speaker-based audio data. Ignored
if \c channelLayoutType is
\c ::IPL_CHANNELLAYOUTTYPE_AMBISONICS. */
IPLint32 numSpeakers; /**< The number of channels in the audio data. Only
used if \c channelLayoutType is
\c ::IPL_CHANNELLAYOUTTYPE_SPEAKERS and
\c channelLayout is
\c ::IPL_CHANNELLAYOUT_CUSTOM. */
IPLVector3* speakerDirections; /**< An array of \c IPLVector3 objects indicating the
direction of each speaker relative to the user.
Can be \c NULL. Only used if \c channelLayoutType
is \c ::IPL_CHANNELLAYOUTTYPE_SPEAKERS and
\c channelLayout is
\c ::IPL_CHANNELLAYOUT_CUSTOM. */
IPLint32 ambisonicsOrder; /**< The order of Ambisonics to use. Must be between 0
and 3. Ignored if \c channelLayoutType is
\c ::IPL_CHANNELLAYOUTTYPE_SPEAKERS. */
IPLAmbisonicsOrdering ambisonicsOrdering; /**< The ordering of Ambisonics channels within the
data. Ignored if \c channelLayoutType is
\c ::IPL_CHANNELLAYOUTTYPE_SPEAKERS. */
IPLAmbisonicsNormalization ambisonicsNormalization; /**< The normalization scheme used for Ambisonics
data. Ignored if \c channelLayoutType is
\c ::IPL_CHANNELLAYOUTTYPE_SPEAKERS. */
IPLChannelOrder channelOrder; /**< Whether the audio data is interleaved or
deinterleaved. */
} IPLAudioFormat;
/** A buffer containing audio data. All audio data passed to or from Phonon must be packaged in \c IPLAudioBuffer
* objects, which describe the format and size of the audio data.
*/
typedef struct {
IPLAudioFormat format; /**< The format of the audio data. */
IPLint32 numSamples; /**< The number of samples in the audio buffer. The total number of
elements in the audio buffer is equal to \c numSamples *
\c format.numSpeakers. */
IPLfloat32* interleavedBuffer; /**< A pointer to a contiguous block of memory containing interleaved
audio data in the format described by \c format. Can be \c NULL
if \c format.channelOrder is \c ::IPL_CHANNELORDER_DEINTERLEAVED. */
IPLfloat32** deinterleavedBuffer; /**< A pointer to an array of pointers, each of which points to a block
of memory containing audio data for a single channel of audio data
in the format described by \c format. In other words,
deinterleaved audio data doesn't have to be stored contiguously
in memory. Can be \c NULL if \c format.channelOrder is
\c ::IPL_CHANNELORDER_INTERLEAVED. */
} IPLAudioBuffer;
/** Mixes a set of audio buffers. This is primarily useful for mixing the output of multiple Panning Effect
* objects, before passing them to a single Virtual Surround Effect or a single Ambisonics Binaural Effect. This
* way, applications can significantly accelerate 3D audio rendering for large numbers of sources.
*
* \param numBuffers The number of input buffers to mix. Must be greater than 0.
* \param inputAudio Array of audio buffers to mix. All of these audio buffers must have identical
* formats.
* \param outputAudio Audio buffer that will contain the mixed audio data. The format of this buffer
* must be identical to all buffers contained in \c inputAudio.
*/
IPLAPI IPLvoid iplMixAudioBuffers(IPLint32 numBuffers, IPLAudioBuffer* inputAudio, IPLAudioBuffer outputAudio);
/** Interleaves a deinterleaved audio buffer. The formats of \c inputAudio and \c outputAudio must be identical
* except for the \c channelOrder field.
*
* \param inputAudio The input audio buffer. This audio buffer must be deinterleaved.
* \param outputAudio The output audio buffer. This audio buffer must be interleaved.
*/
IPLAPI IPLvoid iplInterleaveAudioBuffer(IPLAudioBuffer inputAudio, IPLAudioBuffer outputAudio);
/** Deinterleaves an interleaved audio buffer. The formats of \c inputAudio and \c outputAudio must be identical
* except for the \c channelOrder field.
*
* \param inputAudio The input audio buffer. This audio buffer must be interleaved.
* \param outputAudio The output audio buffer. This audio buffer must be deinterleaved.
*/
IPLAPI IPLvoid iplDeinterleaveAudioBuffer(IPLAudioBuffer inputAudio, IPLAudioBuffer outputAudio);
/** Converts the format of an audio buffer into the format of the output audio buffer. This is primarily useful
* for 360 video and audio authoring workflows. The following format conversions are supported:
*
* - mono to multi-channel speaker-based formats (stereo, quadraphonic, 5.1, 7.1)
* - multi-channel speaker-based (stereo, quadraphonic, 5.1, 7.1) to mono
* - stereo to 5.1 or 7.1
* - Ambisonics to multi-channel speaker-based (mono, stereo, quadraphonic, 5.1, 7.1)
*
* \param inputAudio The input audio buffer.
* \param outputAudio The output audio buffer.
*/
IPLAPI IPLvoid iplConvertAudioBufferFormat(IPLAudioBuffer inputAudio, IPLAudioBuffer outputAudio);
/** Creates an Ambisonics Rotator object. An Ambisonics Rotator object is used to apply an arbitrary rotation to
* audio data encoded in Ambisonics. This is primarily useful in the following situations:
*
* - If you have an Ambisonics audio buffer whose coefficients are defined relative to world space coordinates,
* you can convert them to listener space using an Ambisonics Rotator object. This is necessary when using a
* Convolution Effect object, since its output is defined in world space, and will not change if the listener
* looks around.
*
* - If your final mix is encoded in Ambisonics, and the user is using headphones with head tracking, you can use
* the Ambisonics Rotator object to make the sound field stay "in place" as the user looks around in the real
* world. This is achieved by using the Ambisonics Rotator object to apply the inverse of the user's rotation
* to the final mix.
*
* \param context The Context object used by the audio engine.
* \param order The order of the Ambisonics data to rotate.
* \param rotator [out] Handle to the created Ambisonics Rotator object.
*
* \return Status code indicating whether or not the operation succeeded.
*/
IPLAPI IPLerror iplCreateAmbisonicsRotator(IPLhandle context, IPLint32 order, IPLhandle* rotator);
/** Destroys an Ambisonics Rotator object.
*
* \param rotator [in, out] Address of a handle to the Ambisonics Rotator object to destroy.
*/
IPLAPI IPLvoid iplDestroyAmbisonicsRotator(IPLhandle* rotator);
/** Specifies a rotation value. This function must be called before using \c ::iplRotateAmbisonicsAudioBuffer to
* rotate an Ambisonics-encoded audio buffer, or the resulting audio will be incorrect.
*
* \param rotator Handle to an Ambisonics Rotator object.
* \param listenerAhead Unit vector pointing in the direction in which the listener is looking.
* \param listenerUp Unit vector pointing upwards from the listener.
*/
IPLAPI IPLvoid iplSetAmbisonicsRotation(IPLhandle rotator, IPLVector3 listenerAhead, IPLVector3 listenerUp);
/** Rotates an Ambisonics-encoded audio buffer. The \c ::iplSetAmbisonicsRotation function must have been called
* prior to calling this function, or the resulting audio will be incorrect. It is possible to pass the same
* value for \c inputAudio and \c outputAudio. This results in in-place rotation of the Ambisonics data.
*
* \param rotator Handle to an Ambisonics Rotator object.
* \param inputAudio Audio buffer containing the Ambisonics-encoded data that is to be rotated. The
* format of this buffer must be Ambisonics.
* \param outputAudio Audio buffer containing the rotated Ambisonics-encoded data. The format of this
* buffer must be Ambisonics.
*/
IPLAPI IPLvoid iplRotateAmbisonicsAudioBuffer(IPLhandle rotator, IPLAudioBuffer inputAudio,
IPLAudioBuffer outputAudio);
/** \} */
/*****************************************************************************************************************/
/* Binaural Renderer */
/*****************************************************************************************************************/
/** \defgroup binauralrenderer Binaural Renderer
* Functions for managing a Binaural Renderer object. Before creating any effect objects that perform
* spatialization or binaural rendering, you must create a Binaural Renderer object. This object describes global
* settings and audio pipeline parameters that will be used by all spatialization and binaural
* rendering effect objects.
* \{
*/
/** The type of HRTF database to use for binaural rendering. You can either use the built-in HRTF database, or
* supply your own HRTF data at run-time.
*/
typedef enum {
IPL_HRTFDATABASETYPE_DEFAULT, /**< The built-in HRTF database. */
IPL_HRTFDATABASETYPE_SOFA /**< An HRTF database loaded from a SOFA file. SOFA is an AES standard
file format for storing and exchanging acoustic data, including HRTFs.
For more information on the SOFA format, see
https://www.sofaconventions.org/ */
} IPLHrtfDatabaseType;
/** Parameters used to describe the HRTF database you want to use when creating a Binaural Renderer object.
*/
typedef struct {
IPLHrtfDatabaseType type; /**< Type of HRTF database to use. */
IPLbyte* hrtfData; /**< Reserved. Must be NULL. */
IPLstring sofaFileName; /**< Name of the SOFA file from which to load HRTF data. Can
be a relative or absolute path. Must be a null-terminated
UTF-8 string. */
} IPLHrtfParams;
/** Creates a Binaural Renderer object. This function must be called before creating any Panning Effect objects,
* Object-Based Binaural Effect objects, Virtual Surround Effect objects, or Ambisonics Binaural Effect objects.
* Calling this function for the first time is somewhat expensive; avoid creating Binaural Renderer objects in
* your audio thread if at all possible. **This function is not thread-safe. It cannot be simultaneously called
* from multiple threads.**
*
* \param context The Context object used by the audio engine.
* \param renderingSettings An \c IPLRenderingSettings object describing the audio pipeline's DSP processing
* parameters. These properties must remain constant throughout the lifetime of your
* application.
* \param params Parameters describing the type of HRTF data you wish to use (built-in HRTF data or
* your own custom HRTF data).
* \param renderer [out] Handle to the created Binaural Renderer object.
*
* \return Status code indicating whether or not the operation succeeded.
*/
IPLAPI IPLerror iplCreateBinauralRenderer(IPLhandle context, IPLRenderingSettings renderingSettings,
IPLHrtfParams params, IPLhandle* renderer);
/** Destroys a Binaural Renderer object. If any other API objects are still referencing the Binaural Renderer
* object, it will not be destroyed; destruction occurs when the object's reference count reaches zero.
*
* \param renderer [in, out] Address of a handle to the Binaural Renderer object to destroy.
*/
IPLAPI IPLvoid iplDestroyBinauralRenderer(IPLhandle* renderer);
/** \} */
/*****************************************************************************************************************/
/* Panning Effect */
/*****************************************************************************************************************/
/** \defgroup panningeffect Panning Effect
* Functionality for calculating 3D panning and Ambisonics coefficients for a point source. While this is
* useful for rendering point sources on surround speakers, the main advantage of Phonon's Panning Effect is that
* it can **pan a point source and generate Ambisonics data**.
* \{
*/
/** Creates a Panning Effect object. This can be used to render a point source on surround speakers, or using
* Ambisonics.
*
* \param renderer Handle to a Binaural Renderer object.
* \param inputFormat The format of the audio buffers that will be passed as input to this effect. All
* subsequent calls to \c ::iplApplyPanningEffect for this effect object must use
* \c IPLAudioBuffer objects with the same format as specified here. The input format
* must not be Ambisonics.
* \param outputFormat The format of the audio buffers which will be used to retrieve the output from
* this effect. All subsequent calls to \c ::iplApplyPanningEffect for this effect
* object must use \c IPLAudioBuffer objects with the same format as specified here.
* Any valid audio format may be specified as the output format.
* \param effect [out] Handle to the created Panning Effect object.
*
* \return Status code indicating whether or not the operation succeeded.
*/
IPLAPI IPLerror iplCreatePanningEffect(IPLhandle renderer, IPLAudioFormat inputFormat, IPLAudioFormat outputFormat,
IPLhandle* effect);
/** Destroys a Panning Effect object.
*
* \param effect [in, out] Address of a handle to the Panning Effect object to destroy.
*/
IPLAPI IPLvoid iplDestroyPanningEffect(IPLhandle* effect);
/** Applies 3D panning to a buffer of audio data, using the configuration of a Panning Effect object. The input
* audio is treated as emanating from a single point. If the input audio buffer contains more than one channel,
* it will automatically be downmixed to mono.
*
* \param effect Handle to a Panning Effect object.
* \param binauralRenderer Handle to a Binaural Renderer object that should be used to apply the panning
* effect.
* \param inputAudio Audio buffer containing the data to render using 3D panning. The format of this
* buffer must match the \c inputFormat parameter passed to \c ::iplCreatePanningEffect.
* \param direction Unit vector from the listener to the point source, relative to the listener's
* coordinate system.
* \param outputAudio Audio buffer that should contain the rendered audio data. The format of this buffer
* must match the \c outputFormat parameter passed to \c ::iplCreatePanningEffect.
*/
IPLAPI IPLvoid iplApplyPanningEffect(IPLhandle effect, IPLhandle binauralRenderer, IPLAudioBuffer inputAudio, IPLVector3 direction,
IPLAudioBuffer outputAudio);
/** Resets any internal state maintained by a Panning Effect object. This is useful if the Panning Effect object
* is going to be disabled/unused for a few frames; resetting the internal state will prevent an audible glitch
* when the Panning Effect object is re-enabled at a later time.
*
* \param effect Handle to a Panning Effect object.
*/
IPLAPI IPLvoid iplFlushPanningEffect(IPLhandle effect);
/** \} */
/*****************************************************************************************************************/
/* Object-Based Binaural Effect */
/*****************************************************************************************************************/
/** \defgroup binauraleffect Object-Based Binaural Effect
* Functionality for accurately spatializing point sources in 3D, using Head-Related Transfer Functions (HRTFs).
* The Phonon API includes a simple set of functions for applying high-performance binaural rendering to point
* source audio data.
* \{
*/
/** Techniques for interpolating HRTF data. This is used when rendering a point source whose position relative to
* the listener is not contained in the measured HRTF data used by Phonon.
*/
typedef enum {
IPL_HRTFINTERPOLATION_NEAREST, /**< Nearest-neighbor filtering, i.e., no interpolation. Selects the
measurement location that is closest to the source's actual location. */
IPL_HRTFINTERPOLATION_BILINEAR /**< Bilinear filtering. Incurs a relatively high CPU overhead as compared to
nearest-neighbor filtering, so use this for sounds where it has a
significant benefit. */
} IPLHrtfInterpolation;
/** Creates an Object-Based Binaural Effect object. This can be used to render a point source using HRTF-based
* binaural rendering.
*
* \param renderer Handle to a Binaural Renderer object.
* \param inputFormat The format of the audio buffers that will be passed as input to this effect. All
* subsequent calls to \c ::iplApplyBinauralEffect for this effect object must use
* \c IPLAudioBuffer objects with the same format as specified here. The input format
* must not be Ambisonics.
* \param outputFormat The format of the audio buffers which will be used to retrieve the output from this
* effect. All subsequent calls to \c ::iplApplyBinauralEffect for this effect object
* must use \c IPLAudioBuffer objects with the same format as specified here. The
* output format must be stereo (2 channels).
* \param effect [out] Handle to the created Object-Based Binaural Effect object.
*
* \return Status code indicating whether or not the operation succeeded.
*/
IPLAPI IPLerror iplCreateBinauralEffect(IPLhandle renderer, IPLAudioFormat inputFormat,
IPLAudioFormat outputFormat, IPLhandle* effect);
/** Destroys an Object-Based Binaural Effect object.
*
* \param effect [in, out] Address of a handle to the Object-Based Binaural Effect object to
* destroy.
*/
IPLAPI IPLvoid iplDestroyBinauralEffect(IPLhandle* effect);
/** Applies HRTF-based binaural rendering to a buffer of audio data. The input audio is treated as emanating from
* a single point. If the input audio buffer contains more than one channel, it will automatically be downmixed to
* mono. Using bilinear interpolation (by setting \c interpolation to \c ::IPL_HRTFINTERPOLATION_BILINEAR) can
* incur a relatively high CPU cost. Use it only on sources where nearest-neighbor filtering
* (\c ::IPL_HRTFINTERPOLATION_NEAREST) produces suboptimal results. Typically, bilinear filtering is most useful
* for wide-band noise-like sounds, such as radio static, mechanical noise, fire, etc.
*
* \param effect Handle to an Object-Based Binaural Effect object.
* \param binauralRenderer Handle to a Binaural Renderer object that should be used to apply the binaural
* effect. Each Binaural Renderer corresponds to an HRTF (which may be loaded from
* SOFA files); the value of this parameter determines which HRTF is used to
* spatialize the input audio.
* \param inputAudio Audio buffer containing the data to render using binaural rendering. The format of
* this buffer must match the \c inputFormat parameter passed to
* \c ::iplCreateBinauralEffect.
* \param direction Unit vector from the listener to the point source, relative to the listener's
* coordinate system.
* \param interpolation The interpolation technique to use when rendering a point source at a location
* that is not contained in the measured HRTF data used by Phonon. **If using a custom
* HRTF database, this value must be set to IPL_HRTFINTERPOLATION_BILINEAR.**
* \param outputAudio Audio buffer that should contain the rendered audio data. The format of this
* buffer must match the \c outputFormat parameter passed to
* \c ::iplCreateBinauralEffect.
*/
IPLAPI IPLvoid iplApplyBinauralEffect(IPLhandle effect, IPLhandle binauralRenderer, IPLAudioBuffer inputAudio, IPLVector3 direction,
IPLHrtfInterpolation interpolation, IPLAudioBuffer outputAudio);
IPLAPI IPLvoid iplApplyBinauralEffectWithParameters(IPLhandle effect, IPLhandle binauralRenderer, IPLAudioBuffer inputAudio,
IPLVector3 direction, IPLHrtfInterpolation interpolation, IPLAudioBuffer outputAudio, IPLfloat32* leftDelay,
IPLfloat32* rightDelay);
/** Resets any internal state maintained by an Object-Based Binaural Effect object. This is useful if the
* Object-Based Binaural Effect object is going to be disabled/unused for a few frames; resetting the internal
* state will prevent an audible glitch when the Object-Based Binaural Effect object is re-enabled at a later
* time.
*
* \param effect Handle to an Object-Based Binaural Effect object.
*/
IPLAPI IPLvoid iplFlushBinauralEffect(IPLhandle effect);
/** \} */
/*****************************************************************************************************************/
/* Virtual Surround Effect */
/*****************************************************************************************************************/
/** \defgroup virtualsurround Virtual Surround Effect
* Functionality for using Virtual Surround Effect objects. Phonon includes support for _virtual surround_. This
* involves taking multi-channel speaker-based audio data (stereo, quadraphonic, 5.1, or 7.1) and rendering audio
* for each speaker using binaural rendering. In other words, the audio signal for each speaker is rendered as if
* it were emanating from a point in space corresponding to the speaker's position. This allows users to
* experience, say, a 7.1 surround sound mix over regular stereo headphones.
*
* Virtual Surround also works as a fast way to get approximate binaural rendering. All sound sources can be
* panned to some surround format (say 7.1); after they are mixed, the 7.1 surround mix can be rendered using
* virtual surround. This can save CPU cycles, at the cost of spatialization accuracy.
* \{
*/
/** Creates a Virtual Surround Effect object. This can be used to render a multichannel surround sound data using
* HRTF-based binaural rendering.
*
* \param renderer Handle to a Binaural Renderer object.
* \param inputFormat The format of the audio buffers that will be passed as input to this effect. All
* subsequent calls to \c ::iplApplyVirtualSurroundEffect for this effect object must
* use \c IPLAudioBuffer objects with the same format as specified here. The input
* format must not be Ambisonics.
* \param outputFormat The format of the audio buffers which will be used to retrieve the output from this
* effect. All subsequent calls to \c ::iplApplyVirtualSurroundEffect for this effect
* object must use \c IPLAudioBuffer objects with the same format as specified here.
* The output format must be stereo (2 channels).
* \param effect [out] Handle to the created Virtual Surround Effect object.
*
* \return Status code indicating whether or not the operation succeeded.
*/
IPLAPI IPLerror iplCreateVirtualSurroundEffect(IPLhandle renderer, IPLAudioFormat inputFormat,
IPLAudioFormat outputFormat, IPLhandle* effect);
/** Destroys a Virtual Surround Effect object.
*
* \param effect [in, out] Address of a handle to the Virtual Surround Effect object to destroy.
*/
IPLAPI IPLvoid iplDestroyVirtualSurroundEffect(IPLhandle* effect);
/** Applies HRTF-based binaural rendering to a buffer of multichannel audio data.
*
* \param effect Handle to a Virtual Surround Effect.
* \param binauralRenderer Handle to a Binaural Renderer object that should be used to apply the virtual surround
* effect. Each Binaural Renderer corresponds to an HRTF (which may be loaded from
* SOFA files); the value of this parameter determines which HRTF is used to
* spatialize the input audio.
* \param inputAudio Audio buffer containing the data to render using binaural rendering. The format of
* this buffer must match the \c inputFormat parameter passed to
* \c ::iplCreateVirtualSurroundEffect.
* \param outputAudio Audio buffer that should contain the rendered audio data. The format of this buffer
* must match the \c outputFormat parameter passed to
* \c ::iplCreateVirtualSurroundEffect.
*
* \remark When using a custom HRTF database, calling this function is not supported.
*/
IPLAPI IPLvoid iplApplyVirtualSurroundEffect(IPLhandle effect, IPLhandle binauralRenderer, IPLAudioBuffer inputAudio,
IPLAudioBuffer outputAudio);
/** Resets any internal state maintained by a Virtual Surround Effect object. This is useful if the Virtual
* Surround Effect object is going to be disabled/unused for a few frames; resetting the internal state will
* prevent an audible glitch when the Virtual Surround Effect object is re-enabled at a later time.
*
* \param effect Handle to a Virtual Surround Effect object.
*/
IPLAPI IPLvoid iplFlushVirtualSurroundEffect(IPLhandle effect);
/** \} */
/*****************************************************************************************************************/
/* Ambisonics Panning Effect */
/*****************************************************************************************************************/
/** \defgroup ambisonicspanning Ambisonics Panning Effect
* Functionality for rendering Ambisonics data by panning it to standard speaker layouts. Ambisonics is a powerful
* format for encoding 3D sound fields, and exchanging them. Phonon can encode data into Ambisonics using the
* Panning Effect: to spatialize a sound source and create an Ambisonics track, use the Panning Effect with
* \c outputFormat.channelLayoutType set to \c ::IPL_CHANNELLAYOUTTYPE_AMBISONICS.
*
* Phonon can also decode and render Ambisonics data, using panning. This involves approximating the sound field
* as if it were generated by sound coming from each speaker.
*
* Ambisonics also allows 3D audio rendering in VR to be significantly accelerated: instead of applying
* object-based binaural rendering to each source individually, the sources can be encoded into Ambisonics first,
* then mixed, and finally the mix can be rendered using Ambisonics binaural rendering. This saves CPU cycles, at
* the cost of some spatialization accuracy.
* \{
*/
/** Creates an Ambisonics Panning Effect object. This can be used to render higher-order Ambisonics data using
* standard panning algorithms.
*
* \param renderer Handle to a Binaural Renderer object.
* \param inputFormat The format of the audio buffers that will be passed as input to this effect. All
* subsequent calls to \c ::iplApplyAmbisonicsPanningEffect for this effect object must
* use \c IPLAudioBuffer objects with the same format as specified here. The input
* format must be Ambisonics.
* \param outputFormat The format of the audio buffers which will be used to retrieve the output from this
* effect. All subsequent calls to \c ::iplApplyAmbisonicsPanningEffect for this
* effect object must use \c IPLAudioBuffer objects with the same format as specified
* here.
* \param effect [out] Handle to the created Ambisonics Panning Effect object.
*
* \return Status code indicating whether or not the operation succeeded.
*/
IPLAPI IPLerror iplCreateAmbisonicsPanningEffect(IPLhandle renderer, IPLAudioFormat inputFormat,
IPLAudioFormat outputFormat, IPLhandle* effect);
/** Destroys an Ambisonics Panning Effect object.
*
* \param effect [in, out] Address of a handle to the Ambisonics Panning Effect object to destroy.
*/
IPLAPI IPLvoid iplDestroyAmbisonicsPanningEffect(IPLhandle* effect);
/** Applies a panning-based rendering algorithm to a buffer of Ambisonics audio data. Ambisonics encoders and decoders
* use many different conventions to store the multiple Ambisonics channels, as well as different normalization
* schemes. Make sure that you correctly specify these settings when creating the Ambisonics Panning Effect
* object, otherwise the rendered audio will be incorrect.
*
* \param effect Handle to an Ambisonics Panning Effect object.
* \param binauralRenderer Handle to a Binaural Renderer object that should be used to apply the ambisonics panning
* effect.
* \param inputAudio Audio buffer containing the data to render. The format of
* this buffer must match the \c inputFormat parameter passed to
* \c ::iplCreateAmbisonicsPanningEffect.
* \param outputAudio Audio buffer that should contain the rendered audio data. The format of this buffer
* must match the \c outputFormat parameter passed to
* \c ::iplCreateAmbisonicsPanningEffect.
*/
IPLAPI IPLvoid iplApplyAmbisonicsPanningEffect(IPLhandle effect, IPLhandle binauralRenderer, IPLAudioBuffer inputAudio,
IPLAudioBuffer outputAudio);
/** Resets any internal state maintained by an Ambisonics Panning Effect object. This is useful if the Ambisonics
* Panning Effect object is going to be disabled/unused for a few frames; resetting the internal state will
* prevent an audible glitch when the Ambisonics Panning Effect object is re-enabled at a later time.
*
* \param effect Handle to an Ambisonics Panning Effect object.
*/
IPLAPI IPLvoid iplFlushAmbisonicsPanningEffect(IPLhandle effect);
/** \} */
/*****************************************************************************************************************/
/* Ambisonics Binaural Effect */
/*****************************************************************************************************************/
/** \defgroup ambisonics Ambisonics Binaural Effect
* Functionality for rendering Ambisonics data using HRTF-based binaural rendering. Ambisonics is a powerful
* format for encoding 3D sound fields, and exchanging them. Phonon can encode data into Ambisonics using the
* Panning Effect: to spatialize a sound source and create an Ambisonics track, use the Panning Effect with
* \c outputFormat.channelLayoutType set to \c ::IPL_CHANNELLAYOUTTYPE_AMBISONICS.
*
* Phonon can also decode and render Ambisonics data, using Ambisonics binaural rendering. This involves
* recreating the 3D sound field as perceived by each ear. This is a powerful and intuitive way of listening to
* Ambisonics data. It is extremely useful for rendering audio tracks recorded for 360 video projects.
*
* Ambisonics also allows 3D audio rendering in VR to be significantly accelerated: instead of applying
* object-based binaural rendering to each source individually, the sources can be encoded into Ambisonics first,
* then mixed, and finally the mix can be rendered using Ambisonics binaural rendering. This saves CPU cycles, at
* the cost of some spatialization accuracy.
* \{
*/
/** Creates an Ambisonics Binaural Effect object. This can be used to render higher-order Ambisonics data using
* HRTF-based binaural rendering.
*
* \param renderer Handle to a Binaural Renderer object.
* \param inputFormat The format of the audio buffers that will be passed as input to this effect. All
* subsequent calls to \c ::iplApplyAmbisonicsBinauralEffect for this effect object must
* use \c IPLAudioBuffer objects with the same format as specified here. The input
* format must be Ambisonics.
* \param outputFormat The format of the audio buffers which will be used to retrieve the output from this
* effect. All subsequent calls to \c ::iplApplyAmbisonicsBinauralEffect for this
* effect object must use \c IPLAudioBuffer objects with the same format as specified
* here. The output format must be stereo (2 channels).
* \param effect [out] Handle to the created Ambisonics Binaural Effect object.
*
* \return Status code indicating whether or not the operation succeeded.
*/
IPLAPI IPLerror iplCreateAmbisonicsBinauralEffect(IPLhandle renderer, IPLAudioFormat inputFormat,
IPLAudioFormat outputFormat, IPLhandle* effect);
/** Destroys an Ambisonics Binaural Effect object.
*
* \param effect [in, out] Address of a handle to the Ambisonics Binaural Effect object to destroy.
*/
IPLAPI IPLvoid iplDestroyAmbisonicsBinauralEffect(IPLhandle* effect);
/** Applies HRTF-based binaural rendering to a buffer of Ambisonics audio data. Ambisonics encoders and decoders
* use many different conventions to store the multiple Ambisonics channels, as well as different normalization
* schemes. Make sure that you correctly specify these settings when creating the Ambisonics Binaural Effect
* object, otherwise the rendered audio will be incorrect.
*
* \param effect Handle to an Ambisonics Binaural Effect object.
* \param binauralRenderer Handle to a Binaural Renderer object that should be used to apply the ambisonics binaural
* effect. Each Binaural Renderer corresponds to an HRTF (which may be loaded from
* SOFA files); the value of this parameter determines which HRTF is used to
* spatialize the input audio.
* \param inputAudio Audio buffer containing the data to render using binaural rendering. The format of
* this buffer must match the \c inputFormat parameter passed to
* \c ::iplCreateAmbisonicsBinauralEffect.
* \param outputAudio Audio buffer that should contain the rendered audio data. The format of this buffer
* must match the \c outputFormat parameter passed to
* \c ::iplCreateAmbisonicsBinauralEffect.
*
* \remark When using a custom HRTF database, calling this function is not supported.
*/
IPLAPI IPLvoid iplApplyAmbisonicsBinauralEffect(IPLhandle effect, IPLhandle binauralRenderer, IPLAudioBuffer inputAudio,
IPLAudioBuffer outputAudio);
/** Resets any internal state maintained by an Ambisonics Binaural Effect object. This is useful if the Ambisonics
* Binaural Effect object is going to be disabled/unused for a few frames; resetting the internal state will
* prevent an audible glitch when the Ambisonics Binaural Effect object is re-enabled at a later time.
*
* \param effect Handle to an Ambisonics Binaural Effect object.
*/
IPLAPI IPLvoid iplFlushAmbisonicsBinauralEffect(IPLhandle effect);
/** \} */
/*****************************************************************************************************************/
/* Environmental Renderer */
/*****************************************************************************************************************/
/** \defgroup envrenderer Environmental Renderer
* Functions for managing an Environmental Renderer object. An Environmental Renderer object is the primary object
* used by the audio engine to apply audio effects that depend on scene geometry and materials. It is created
* and managed by the audio engine, and serves as the primary point of contact between the game engine and the
* audio engine. It acts as a proxy between an Environment object (which is managed by the game engine), and the
* various objects managed by the audio engine.
* \{
*/
/** Callback function that is called when the simulation thread is created.
*/
typedef void (*IPLSimulationThreadCreateCallback)(void);
/** Callback function that is called when the simulation thread is destroyed.
*/
typedef void (*IPLSimulationThreadDestroyCallback)(void);
/** Creates an Environmental Renderer object.
*
* \param context The Context object used by the audio engine.
* \param environment Handle to an Environment object provided by the game engine. It is up to your
* application to pass this handle from the game engine to the audio engine.
* \param renderingSettings An \c IPLRenderingSettings object describing the audio pipeline's DSP processing
* parameters. These properties must remain constant throughout the lifetime of your
* application.
* \param outputFormat The audio format of the output buffers passed to any subsequent call to
* \c ::iplGetMixedEnvironmentalAudio. This format must not be changed once it is set
* during the call to this function.
* \param threadCreateCallback Pointer to a function that will be called when the internal simulation thread
* is created. May be NULL.
* \param threadDestroyCallback Pointer to a function that will be called when the internal simulation thread
* is destroyed. May be NULL.
* \param renderer [out] Handle to the created Environmental Renderer object.
*
* \return Status code indicating whether or not the operation succeeded.
*/
IPLAPI IPLerror iplCreateEnvironmentalRenderer(IPLhandle context, IPLhandle environment,
IPLRenderingSettings renderingSettings, IPLAudioFormat outputFormat,
IPLSimulationThreadCreateCallback threadCreateCallback,
IPLSimulationThreadDestroyCallback threadDestroyCallback, IPLhandle* renderer);
/** Destroys an Environmental Renderer object. If any other API objects are still referencing the Environmental
* Renderer object, the object will not be destroyed; it will only be destroyed once its reference count reaches
* zero.
*
* \param renderer [in, out] Address of a handle to the Environmental Renderer object to destroy.
*/
IPLAPI IPLvoid iplDestroyEnvironmentalRenderer(IPLhandle* renderer);
IPLAPI IPLhandle iplGetEnvironmentForRenderer(IPLhandle renderer);
/** \} */
/*****************************************************************************************************************/
/* Direct Sound */
/*****************************************************************************************************************/
/** \defgroup directsound Direct Sound
* Functions for calculating various properties of direct sound. Direct sound is defined as sound that reaches
* the listener directly from the source, without any reflections from the environment. The Phonon API contains
* functions for calculating various parameters of direct sound.
* \{
*/
/** The algorithm to use when checking for direct path occlusion. Phonon can check whether a direct sound path is
* occluded by scene geometry, and optionally how much of a sound source is occluded.
*/
typedef enum {
IPL_DIRECTOCCLUSION_RAYCAST, /**< Performs a rudimentary occlusion test by checking if the ray from the
listener to the source is occluded by any scene geometry. If so, the
sound will be considered to be completely occluded. The Environment
object created by the game engine must have a valid Scene object for
this to work. */
IPL_DIRECTOCCLUSION_VOLUMETRIC /**< Performs a slightly more complicated occlusion test: the source is
treated as a sphere, and rays are traced from the listener to various
points in the interior of the sphere. The proportion of rays that are
occluded by scene geometry determines the how much of the sound
source is considered occluded. The Environment object created by the
game engine must have a valid Scene object for this to work. */
} IPLDirectOcclusionMethod;
/** The method to use when rendering occluded or partially occluded sound. Phonon can model sound passing through
solid objects, and optionally apply frequency-dependent transmission filters.
*/
typedef enum {
IPL_DIRECTOCCLUSION_NONE, /**< Does not perform any occlusion checks. Sound will be
audible through solid objects. */
IPL_DIRECTOCCLUSION_NOTRANSMISSION, /**< Perform occlusion checks but do not model transmission.
Occluded sound will be completely inaudible. */
IPL_DIRECTOCCLUSION_TRANSMISSIONBYVOLUME, /**< Perform occlusion checks and model transmission; occluded
sound will be scaled by a frequency-independent
attenuation value. This value is calculated based on the
transmission properties of the object occluding the
direct sound path. */
IPL_DIRECTOCCLUSION_TRANSMISSIONBYFREQUENCY, /**< Perform occlusion checks and model transmission; occluded
sound will be rendered with a frequency-dependent
transmission filter. This filter is calculated based on
the transmission properties of the object occluding the
direct sound path. */
} IPLDirectOcclusionMode;
/** Parameters describing a direct sound path. For each frequency band, the attenuation factor applied to the
* direct sound path is:
*
* distanceAttenuation * airAbsorption * (occlusionFactor + (1 - occlusionFactor) * transmissionFactor)
*/
typedef struct {
IPLVector3 direction; /**< Unit vector from the listener to the source. */
IPLfloat32 distanceAttenuation; /**< Scaling factor to apply to direct sound, that arises due to the
spherical attenuation of sound with distance from the source.
Linear scale from 0.0 to 1.0. */
IPLfloat32 airAbsorption[3]; /**< Scaling factors to apply to direct sound, for low, middle, and high
frequencies, that arise due to the scattering of sound waves as they
travel through the air. Linear scale from 0.0 to 1.0. */
IPLfloat32 propagationDelay; /**< Time delay (in seconds) due to propagation from the source to the
listener. */
IPLfloat32 occlusionFactor; /**< Scaling factor to apply to direct sound, that arises due to occlusion
by scene geometry. Linear scale from 0.0 to 1.0. */
IPLfloat32 transmissionFactor[3]; /**< Scaling factors to apply to direct sound, for low, middle, and high
frequencies, that arise due to the transmission of sound waves through
scene geometry. Linear scale from 0.0 to 1.0. */
IPLfloat32 directivityFactor; /**< Scaling factor to apply to direct sound, that arises due to the
directivity pattern of the source. Linear scale from 0.0 to 1.0. */
} IPLDirectSoundPath;
/** Callback function that is called when the directivity pattern needs to be queried at a given direction. This
* function may be called many times when simulating indirect sound, and should not perform any long, blocking
* operations.
*
* \param direction Unit vector point from the source in the direction in which the directivity
* pattern should be evaluated, relative to the source's orientation.
* \param userData User-specified data that was specified via IPLDirectivity.
*
* \return Directivity pattern evaluated at the given direction. Typically between 0.0 and 1.0.
*/
typedef float (*IPLDirectivityCallback)(IPLVector3 direction, void* userData);
/** Specifies a directivity pattern. A simple weighted dipole pattern may be specified. Alternatively, a callback
* may be specified to allow user-provided code to be called whenever the directivity pattern needs to be
* evaluated.
*/
typedef struct {
IPLfloat32 dipoleWeight; /**< Controls the blend between a monopole (omnidirectional) and dipole
directivity pattern. 0.0 means pure monopole, 1.0 means pure
dipole. 0.5 results in a cardioid pattern. */
IPLfloat32 dipolePower; /**< Controls the width of the dipole directivity pattern. Higher
values mean sharper, more focused dipoles. */
IPLDirectivityCallback callback; /**< Pointer to a function to call when the directivity pattern needs
to be evaluated. */
void* userData; /**< User-specified data that should be passed to the callback function
when it is called. Use this to pass in any source-specific
data that must be known to the directivity callback function. */
} IPLDirectivity;
/** Specifies information associated with a sound source.
*/
typedef struct {
IPLVector3 position; /**< World-space position of the source. */
IPLVector3 ahead; /**< Unit vector pointing forwards from the source. */
IPLVector3 up; /**< Unit vector pointing upwards from the source. */
IPLVector3 right; /**< Unit vector pointing to the right of the source. */
IPLDirectivity directivity; /**< The source's directivity pattern. */
} IPLSource;
/** Calculates direct sound path parameters for a single source. It is up to the audio engine to perform audio
* processing that uses the information returned by this function.
*
* \param environment Handle to an Environment object.
* \param listenerPosition World-space position of the listener.
* \param listenerAhead Unit vector pointing in the direction in which the listener is looking.
* \param listenerUp Unit vector pointing upwards from the listener.
* \param source Position, orientation, and directivity of the source.
* \param sourceRadius Radius of the sphere defined around the source, for use with
* \c ::IPL_DIRECTOCCLUSION_VOLUMETRIC only.
* \param occlusionMode Confuguring the occlusion mode for direct path.
* \param occlusionMethod Algorithm to use for checking for direct path occlusion.
*
* \return Parameters of the direct path from the source to the listener.
*/
IPLAPI IPLDirectSoundPath iplGetDirectSoundPath(IPLhandle environment, IPLVector3 listenerPosition,
IPLVector3 listenerAhead, IPLVector3 listenerUp, IPLSource source, IPLfloat32 sourceRadius,
IPLDirectOcclusionMode occlusionMode, IPLDirectOcclusionMethod occlusionMethod);
/** \} */
/*****************************************************************************************************************/
/* Direct Sound Effect */
/*****************************************************************************************************************/
/** \defgroup directsoundeffect Direct Sound Effect.
* Functions for managing and using Direct Sound Effect objects. A Direct Sound Effect object is the main object
* used to apply \c IPLDirectSoundPath parameters to audio data. A Direct Sound Effect only applies direction
* independent effects to direct sound.
* \{
*/
/** Flags that specify which parameters from \c IPLDirectSoundPath should be applied by the Direct Sound Effect.
*/
typedef struct {
IPLbool applyDistanceAttenuation; /**< Whether to apply distance attenuation. */
IPLbool applyAirAbsorption; /**< Whether to apply frequency-dependent air absorption. */
IPLbool applyDirectivity; /**< Whether to apply source directivity. */
IPLDirectOcclusionMode directOcclusionMode; /**< Whether to apply occlusion and transmission. Also
lets you specify whether to apply frequency-dependent
or frequency-independent transmission. */
} IPLDirectSoundEffectOptions;
/** Creates a Direct Sound Effect object.
*
* \param renderer Handle to an Environmental Renderer object.
* \param inputFormat The format of the audio buffers that will be passed as input to this effect. All
* subsequent calls to \c ::iplApplyDirectSoundEffect for this effect object must use
* \c IPLAudioBuffer objects with the same format as specified here.
* \param outputFormat The format of the audio buffers which will be used to retrieve the output from this
* effect. All subsequent calls to \c ::iplApplyDirectSoundEffect for this effect
* object must use \c IPLAudioBuffer objects with the same format as specified here.
* \param effect [out] Handle to the created Direct Sound Effect object.
*
* \return Status code indicating whether or not the operation succeeded.
*/
IPLAPI IPLerror iplCreateDirectSoundEffect(IPLhandle renderer, IPLAudioFormat inputFormat,
IPLAudioFormat outputFormat, IPLhandle* effect);
/** Destroys a Direct Sound Effect object.
*
* \param effect [in, out] Address of a handle to the Direct Sound Effect object to destroy.
*/
IPLAPI IPLvoid iplDestroyDirectSoundEffect(IPLhandle* effect);
/** Applies various parameters in \c IPLDirectSoundPath to a buffer of audio data.
*
* \param effect Handle to a Direct Sound Effect object.
* \param inputAudio Audio buffer containing the dry audio data. The format of this buffer must match the
* \c inputFormat parameter passed to \c ::iplCreateDirectSoundEffect.
* \param directSoundPath Parameters of the direct path from the source to the listener.
* \param options Specifies which parameters from \c IPLDirectSoundPath should be processed by
* the Direct Sound Effect.
* \param outputAudio Audio buffer that should contain the wet audio data. The format of this buffer must
* match the \c outputFormat parameter passed to \c ::iplCreateDirectSoundEffect.
*/
IPLAPI IPLvoid iplApplyDirectSoundEffect(IPLhandle effect, IPLAudioBuffer inputAudio,
IPLDirectSoundPath directSoundPath, IPLDirectSoundEffectOptions options, IPLAudioBuffer outputAudio);
/** Resets any internal state maintained by a Direct Sound Effect object. This is useful if the
* Direct Sound Effect object is going to be disabled/unused for a few frames; resetting the internal
* state will prevent an audible glitch when the Direct Sound Effect object is re-enabled at a later
* time.
*
* \param effect Handle to a Direct Sound Effect object.
*/
IPLAPI IPLvoid iplFlushDirectSoundEffect(IPLhandle effect);
/** \} */
/*****************************************************************************************************************/
/* Convolution Effect */
/*****************************************************************************************************************/
/** \defgroup conveffect Convolution Effect
* Functions for managing and using Convolution Effect objects. A Convolution Effect object is the main object
* used to apply physics-based sound propagation effects to audio data. Sound propagation effects are applied as
* direction-dependent convolution reverb, and the direction dependency is encoded in Ambisonics.
* \{
*/
/** Defines how a set of baked data should be interpreted.
*/
typedef enum {
IPL_BAKEDDATATYPE_STATICSOURCE, /**< Baked sound propagation from a static source to a moving listener. */
IPL_BAKEDDATATYPE_STATICLISTENER, /**< Baked sound propagation from a moving source to a static listener. */
IPL_BAKEDDATATYPE_REVERB /**< Baked listener-centric reverb. */
} IPLBakedDataType;
/** Identifies a set of baked data. It is the application's responsibility to ensure that this data is unique
* across the lifetime of an Environment object.
*/
typedef struct {
IPLint32 identifier; /**< 32-bit signed integer that uniquely identifies this set of baked data. */
IPLBakedDataType type; /**< How this set of baked data should be interpreted. */
} IPLBakedDataIdentifier;
/** Creates a Convolution Effect object.
*
* \param renderer Handle to an Environmental Renderer object.
* \param identifier Unique identifier of the corresponding source, as defined in the baked data
* exported by the game engine. Each Convolution Effect object may have an identifier,
* which is used only if the Environment object provided by the game engine uses baked
* data for sound propagation. If so, the identifier of the Convolution Effect is used
* to look up the appropriate information from the baked data. Multiple Convolution
* Effect objects may be created with the same identifier; in that case they will use
* the same baked data.
* \param simulationType Whether this Convolution Effect object should use baked data or real-time simulation.
* \param inputFormat Format of all audio buffers passed as input to
* \c ::iplSetDryAudioForConvolutionEffect.
* \param outputFormat Format of all output audio buffers passed to \c ::iplGetWetAudioForConvolutionEffect.
* \param effect [out] Handle to the created Convolution Effect object.
*
* \return Status code indicating whether or not the operation succeeded.
*/
IPLAPI IPLerror iplCreateConvolutionEffect(IPLhandle renderer, IPLBakedDataIdentifier identifier, IPLSimulationType simulationType,
IPLAudioFormat inputFormat, IPLAudioFormat outputFormat, IPLhandle* effect);
/** Destroys a Convolution Effect object.
*
* \param effect [in, out] Address of a handle to the Convolution Effect object to destroy.
*/
IPLAPI IPLvoid iplDestroyConvolutionEffect(IPLhandle* effect);
/** Changes the identifier associated with a Convolution Effect object. This is useful when using a static listener
* bake, where you may want to teleport the listener between two or more locations for which baked data has
* been generated.
*
* \param effect Handle to a Convolution Effect object.
* \param identifier The new identifier of the Convolution Effect object.
*/
IPLAPI IPLvoid iplSetConvolutionEffectIdentifier(IPLhandle effect, IPLBakedDataIdentifier identifier);
/** Specifies a frame of dry audio for a Convolution Effect object. This is the audio data to which sound
* propagation effects should be applied.
*
* \param effect Handle to a Convolution Effect object.
* \param source Position, orientation, and directivity of the sound source emitting the dry audio.
* \param dryAudio Audio buffer containing the dry audio data.
*/
IPLAPI IPLvoid iplSetDryAudioForConvolutionEffect(IPLhandle effect, IPLSource source,
IPLAudioBuffer dryAudio);
/** Retrieves a frame of wet audio from a Convolution Effect object. This is the result of applying sound
* propagation effects to the dry audio previously specified using \c ::iplSetDryAudioForConvolutionEffect.
*
* \param effect Handle to a Convolution Effect object.
* \param listenerPosition World-space position of the listener.
* \param listenerAhead Unit vector in the direction in which the listener is looking.
* \param listenerUp Unit vector pointing upwards from the listener.
* \param wetAudio Audio buffer which will be populated with the wet audio data.
*/
IPLAPI IPLvoid iplGetWetAudioForConvolutionEffect(IPLhandle effect, IPLVector3 listenerPosition,
IPLVector3 listenerAhead, IPLVector3 listenerUp, IPLAudioBuffer wetAudio);
/** Retrieves a mixed frame of wet audio. This is the sum of all wet audio data from all Convolution Effect
* objects that were created using the given Environmental Renderer object. Unless using TrueAudio Next for
* convolution, this is likely to provide a significant performance boost to the audio thread as compared to
* calling \c ::iplGetWetAudioForConvolutionEffect for each Convolution Effect separately. On the other hand, doing
* so makes it impossible to apply additional DSP effects for specific sources before mixing.
*
* \param renderer Handle to an Environmental Renderer object.
* \param listenerPosition World-space position of the listener.
* \param listenerAhead Unit vector in the direction in which the listener is looking.
* \param listenerUp Unit vector pointing upwards from the listener.
* \param mixedWetAudio Audio buffer which will be populated with the wet audio data.
*/
IPLAPI IPLvoid iplGetMixedEnvironmentalAudio(IPLhandle renderer, IPLVector3 listenerPosition,
IPLVector3 listenerAhead, IPLVector3 listenerUp, IPLAudioBuffer mixedWetAudio);
/** Resets any internal state maintained by a Convolution Effect object. This is useful if the Convolution Effect
* object is going to be disabled/unused for a few frames; resetting the internal state will prevent an audible
* glitch when the Convolution Effect object is re-enabled at a later time.
*
* \param effect Handle to a Convolution Effect object.
*/
IPLAPI IPLvoid iplFlushConvolutionEffect(IPLhandle effect);
/** \} */
/*****************************************************************************************************************/
/* Acoustic Probes */
/*****************************************************************************************************************/
/** \defgroup probes Acoustic Probes
* Functions for creating and manipulating acoustic probes. Acoustic probes are points are which Phonon samples
* the acoustics of a scene when baking. The functions in this module allow the game engine to generate probes
* in specific regions of the scene, store them for baking and run-time use, and visualize them in the game
* engine's editor.
*
* Every probe has a position, and a radius of influence. The baked data corresponding to a probe is only used
* within its radius of influence. Each probe is associated with a reverb (parametric, convolution, or both),
* as well as zero or more acoustic responses from various sound sources.
* \{
*/
/** The algorithm to use when generating a set of probes. Probes are generated by specifying a bounding box for a
* portion of the scene, and an algorithm for filling the volume of the box with probes. You can generate probes
* using different algorithms in different portions of a scene. The bounding boxes used for probe generation in
* different regions may overlap, although this is not typical.
*/
typedef enum {
IPL_PLACEMENT_CENTROID, /**< Places a single probe in the center of the box. The radius of the probe is
large enough to fill the interior of the box. */
IPL_PLACEMENT_OCTREE, /**< Generates probes throughout the volume of the box. The algorithm is adaptive,
and generates more probes in regions of higher geometric complexity, and
fewer probes around empty space. <b>This option is currently not supported</b>.*/
IPL_PLACEMENT_UNIFORMFLOOR /**< Generates probes that are uniformly-spaced, at a fixed height above solid
geometry. A probe will never be generated above another probe unless there is
a solid object between them. The goal is to model floors or terrain, and
generate probes that are a fixed height above the floor or terrain, and
uniformly-spaced along the horizontal plane. This algorithm is not suitable
for scenarios where the listener may fly into a region with no probes;
if this happens, the listener will not be influenced by any of the baked
data. */
} IPLProbePlacement;
/** Parameters that specify how probes should be created by \c ::iplCreateProbeBox. */
typedef struct {
IPLProbePlacement placement; /**< The placement algorithm to use for creating probes. */
IPLfloat32 spacing; /**< Spacing between probes along the horizontal plane. Only
used if \c placement is \c ::IPL_PLACEMENT_UNIFORMFLOOR. */
IPLfloat32 heightAboveFloor; /**< Height of the probes above the closest floor or terrain
surfaces. Only used if \c placement is
\c ::IPL_PLACEMENT_UNIFORMFLOOR. */
IPLint32 maxOctreeTriangles; /**< The maximum number of triangles to store in an octree leaf
node. Only used if \c placement is \c ::IPL_PLACEMENT_OCTREE. */
IPLint32 maxOctreeDepth; /**< The maximum depth of the octree. Increasing this value increases
density of the generated probes. Only used if \c placement is
\c ::IPL_PLACEMENT_OCTREE. */
} IPLProbePlacementParams;
/** A callback that is called to update the application on the progress of the \c ::iplCreateProbeBox function.
* You can use this to provide visual feedback to the user, like a progress bar.
*
* \param progress Fraction of the probe generation process that has been completed, between
* 0.0 and 1.0.
*/
typedef void (*IPLProbePlacementProgressCallback)(IPLfloat32 progress);
/** Generates probes within a box. This function should typically be called from the game engine's editor, in
* response to the user indicating that they want to generate probes in the scene.
*
* \param context Handle to the Context object used by the game engine.
* \param scene Handle to the Scene object.
* \param boxLocalToWorldTransform 4x4 local to world transform matrix laid out in column-major format.
* \param placementParams Parameters specifying how probes should be generated.
* \param progressCallback Pointer to a function that reports the percentage of this function's
* work that has been completed. May be \c NULL.
* \param probeBox [out] Handle to the created Probe Box object.
*
* \return Status code indicating whether or not the operation succeeded.
*/
IPLAPI IPLerror iplCreateProbeBox(IPLhandle context, IPLhandle scene, IPLfloat32* boxLocalToWorldTransform,
IPLProbePlacementParams placementParams, IPLProbePlacementProgressCallback progressCallback,
IPLhandle* probeBox);
/** Destroys a Probe Box object.
*
* \param probeBox [in, out] Address of a handle to the Probe Box object to destroy.
*/
IPLAPI IPLvoid iplDestroyProbeBox(IPLhandle* probeBox);
/** Retrieves spheres describing the positions and influence radii of all probes in the Probe Box object. This
* function should typically be called from the game engine's editor, and the retrieved spheres should be used
* for visualization.
*
* \param probeBox Handle to a Probe Box object.
* \param probeSpheres [out] Array into which information about the probe spheres is returned. It is the
* the caller's responsibility to manage memory for this array. The array must be
* large enough to hold all the spheres in the Probe Box object. May be \c NULL, in
* which case no spheres are returned; this is useful when finding out the number of
* probes in the Probe Box object.
*
* \return The number of probes in the Probe Box object.
*/
IPLAPI IPLint32 iplGetProbeSpheres(IPLhandle probeBox, IPLSphere* probeSpheres);
/** Serializes a Probe Box object to a byte array. This is typically called by the game engine's editor in order
* to save the Probe Box object's data to disk.
*
* \param probeBox Handle to a Probe Box object.
* \param data [out] Byte array into which the Probe Box object will be serialized. It is the
* caller's responsibility to manage memory for this array. The array must be large
* enough to hold all the data in the Probe Box object. May be \c NULL, in which case
* no data is returned; this is useful when finding out the size of the data stored
* in the Probe Box object.
*
* \return Size (in bytes) of the serialized data.
*/
IPLAPI IPLint32 iplSaveProbeBox(IPLhandle probeBox, IPLbyte* data);
/** Deserializes a Probe Box object from a byte array. This is typically called by the game engine's editor when
* loading a Probe Box object from disk.
*
* \param context Handle to the Context object used by the game engine.
* \param data Byte array containing the serialized representation of the Probe Box object. Must
* not be \c NULL.
* \param size Size (in bytes) of the serialized data.
* \param probeBox [out] Handle to the created Probe Box object.
*
* \return Status code indicating whether or not the operation succeeded.
*/
IPLAPI IPLerror iplLoadProbeBox(IPLhandle context, IPLbyte* data, IPLint32 size, IPLhandle* probeBox);
/** Creates a Probe Batch object. A Probe Batch object represents a set of probes that are loaded and unloaded
* from memory as a unit when the game is played. A Probe Batch may contain probes from multiple Probe Boxes;
* multiple Probe Batches may contain probes from the same Probe Box. At run-time, Phonon does not use Probe
* Boxes, it only needs Probe Batches. The typical workflow is as follows:
*
* 1. Using the editor, the designer creates Probe Boxes to sample the scene.
* 2. Using the editor, the designer specifies Probe Batches, and decides which probes are part of each Probe
* Batch.
* 3. The editor saves the Probe Batches along with the rest of the scene data for use at run-time.
* 4. At run-time, Phonon uses the Probe Batches to retrieve baked data.
*
* \param context Handle to the Context object used by the game engine.
* \param probeBatch [out] Handle to the created Probe Batch object.
*
* \return Status code indicating whether or not the operation succeeded.
*/
IPLAPI IPLerror iplCreateProbeBatch(IPLhandle context, IPLhandle* probeBatch);
/** Destroys a Probe Batch object.
*
* \param probeBatch [in, out] Address of a handle to the Probe Batch object to destroy.
*/
IPLAPI IPLvoid iplDestroyProbeBatch(IPLhandle* probeBatch);
/** Adds a specific probe from a Probe Box to a Probe Batch. Once all probes in a Probe Box have been assigned to
* their respective Probe Batches, you can destroy the Probe Box object; the baked data for the probes will
* be retained by the Probe Batch.
*
* \param probeBatch Handle to a Probe Batch object into which the probe should be added.
* \param probeBox Handle to a Probe Box object from which the probe should be added.
* \param probeIndex Index of the probe to add. The index is defined relative to the array of probes
* returned by \c ::iplGetProbeSpheres.
*/
IPLAPI IPLvoid iplAddProbeToBatch(IPLhandle probeBatch, IPLhandle probeBox, IPLint32 probeIndex);
/** Finalizes the set of probes that comprise a Probe Batch. Calling this function builds internal data
* structures that are used to rapidly determine which probes influence any given point in 3D space. You may
* not call \c ::iplAddProbeToBatch after calling this function. You must call this function before calling
* \c ::iplAddProbeBatch to add this Probe Batch object to a Probe Manager object.
*
* \param probeBatch Handle to a ProbeBatch object.
*/
IPLAPI IPLvoid iplFinalizeProbeBatch(IPLhandle probeBatch);
/** Serializes a Probe Batch object to a byte array. This is typically called by the game engine's editor in order
* to save the Probe Batch object's data to disk.
*
* \param probeBatch Handle to a Probe Batch object.
* \param data [out] Byte array into which the Probe Batch object will be serialized. It is the
* caller's responsibility to manage memory for this array. The array must be large
* enough to hold all the data in the Probe Batch object. May be \c NULL, in which
* case no data is returned; this is useful when finding out the size of the data
* stored in the Probe Batch object.
*
* \return Size (in bytes) of the serialized data.
*/
IPLAPI IPLint32 iplSaveProbeBatch(IPLhandle probeBatch, IPLbyte* data);
/** Deserializes a Probe Batch object from a byte array. This is typically called by the game engine's editor when
* loading a Probe Batch object from disk. Calling this function implicitly calls \c ::iplFinalizeProbeBatch, so
* you do not need to call it explicitly.
*
* \param context Handle to the Context object used by the game engine.
* \param data Byte array containing the serialized representation of the Probe Batch object. Must
* not be \c NULL.
* \param size Size (in bytes) of the serialized data.
* \param probeBatch [out] Handle to the created Probe Batch object.
*
* \return Status code indicating whether or not the operation succeeded.
*/
IPLAPI IPLerror iplLoadProbeBatch(IPLhandle context, IPLbyte* data, IPLint32 size, IPLhandle* probeBatch);
/** Creates a Probe Manager object. A Probe Manager object manages a set of Probe Batch objects are runtime.
* It is typically exported from the game engine to the audio engine via an Environment object. Probe Batch
* objects can be dynamically added to or removed from a Probe Manager object.
*
* \param context Handle to the Context object used by the game engine.
* \param probeManager [out] Handle to the created Probe Manager object.
*
* \return Status code indicating whether or not the operation succeeded.
*/
IPLAPI IPLerror iplCreateProbeManager(IPLhandle context, IPLhandle* probeManager);
/** Destroys a Probe Manager object.
*
* \param probeManager [in, out] Address of a handle to the Probe Manager object to destroy.
*/
IPLAPI IPLvoid iplDestroyProbeManager(IPLhandle* probeManager);
/** Adds a Probe Batch to a Probe Manager object. Once this function returns, probes in the Probe Batch will be
* used to calculate sound propagation effects.
*
* \param probeManager Handle to a Probe Manager object.
* \param probeBatch Handle to the Probe Batch object to add.
*/
IPLAPI IPLvoid iplAddProbeBatch(IPLhandle probeManager, IPLhandle probeBatch);
/** Removes a Probe Batch from a Probe Manager object. Once this function returns, probes in the Probe Batch will
* no longer be used to calculate sound propagation effects.
*
* \param probeManager Handle to a Probe Manager object.
* \param probeBatch Handle to the Probe Batch object to remove.
*/
IPLAPI IPLvoid iplRemoveProbeBatch(IPLhandle probeManager, IPLhandle probeBatch);
/** \} */
/*****************************************************************************************************************/
/* Baking */
/*****************************************************************************************************************/
/** \defgroup baking Baking
* Functions for baking sound propagation information into acoustic probes. Baking allows detailed acoustic
* responses to be calculated at design time (just like precomputed lighting), resulting in higher quality
* sound propagation effects, reduced CPU usage, but increased memory and disk space usage. Phonon lets you bake
* parametric or convolution reverb at each probe, as well as source-to-listener sound propagation effects from
* various sources.
* \{
*/
/** Specifies the kind of acoustic responses to save in the baked data.
*/
typedef struct {
IPLbool bakeParametric; /**< Enables the generation of I3DL2-compliant parametric reverb. This is most
suited for calculating reverb in relatively enclosed spaces. It is less
suitable for open spaces, or source-to-listener propagation. It consumes
very little memory per probe. */
IPLbool bakeConvolution; /**< Enables the generation of detailed impulse responses for convolution reverb.
This is suited for all kinds of spaces, and for reverb as well as
source-to-listener propagation. However, it consumes significantly more
memory per probe. */
} IPLBakingSettings;
/** A callback that is called to update the application on the progress of the \c ::iplBakeReverb or
* \c ::iplBakePropagation functions. You can use this to provide visual feedback to the user, like a progress
* bar.
*
* \param progress Fraction of the baking process that has been completed, between 0.0 and 1.0.
*/
typedef void (*IPLBakeProgressCallback)(IPLfloat32 progress);
/** Bakes reverb at all probes in a Probe Box. Phonon defines reverb as the indirect sound received at a probe
* when a source is placed at the probe's location. This is a time-consuming operation, and should typically be
* called from the game engine's editor. The \c numThreads set on the \c IPLSimulationSettings structure passed
* when calling \c ::iplCreateEnvironment to create the Environment object are used for multi-threaded baking.
*
* \param environment Handle to an Environment object.
* \param probeBox Handle to the Probe Box containing the probes for which to bake reverb.
* \param bakingSettings The kind of acoustic responses to bake.
* \param progressCallback Pointer to a function that reports the percentage of this function's work that
* has been completed. May be \c NULL.
*/
IPLAPI IPLvoid iplBakeReverb(IPLhandle environment, IPLhandle probeBox, IPLBakingSettings bakingSettings,
IPLBakeProgressCallback progressCallback);
/** Bakes propagation effects from a specified source to all probes in a Probe Box. Sources are defined in terms
* of a position and a sphere of influence; all probes in the Probe Box that lie within the sphere of influence
* are processed by this function. This is a time-consuming operation, and should typically be called from the
* game engine's editor. The \c numThreads set on the \c IPLSimulationSettings structure passed when calling
* \c ::iplCreateEnvironment to create the Environment object are used for multi-threaded baking.
*
* \param environment Handle to an Environment object.
* \param probeBox Handle to the Probe Box containing the probes for which to bake reverb.
* \param sourceInfluence Sphere defined by the source position (at its center) and its radius of
* influence.
* \param sourceIdentifier Identifier of the source. At run-time, a Convolution Effect object can use this
* identifier to look up the correct impulse response information.
* \param bakingSettings The kind of acoustic responses to bake.
* \param progressCallback Pointer to a function that reports the percentage of this function's work that
* has been completed. May be \c NULL.
*/
IPLAPI IPLvoid iplBakePropagation(IPLhandle environment, IPLhandle probeBox, IPLSphere sourceInfluence,
IPLBakedDataIdentifier sourceIdentifier, IPLBakingSettings bakingSettings, IPLBakeProgressCallback progressCallback);
/** Bakes propagation effects from all probes in a Probe Box to a specified listener. Listeners are defined
* solely by their position; their orientation may freely change at run-time. This is a time-consuming
* operation, and should typically be called from the game engine's editor. The \c numThreads set on the
* \c IPLSimulationSettings structure passed when calling \c ::iplCreateEnvironment to create the Environment
* object are used for multi-threaded baking.
*
* \param environment Handle to an Environment object.
* \param probeBox Handle to the Probe Box containing the probes for which to bake reverb.
* \param listenerInfluence Position and influence radius of the listener.
* \param listenerIdentifier Identifier of the listener. At run-time, a Convolution Effect object can use this
* identifier to look up the correct impulse response information.
* \param bakingSettings The kind of acoustic responses to bake.
* \param progressCallback Pointer to a function that reports the percentage of this function's work that
* has been completed. May be \c NULL.
*/
IPLAPI IPLvoid iplBakeStaticListener(IPLhandle environment, IPLhandle probeBox, IPLSphere listenerInfluence,
IPLBakedDataIdentifier listenerIdentifier, IPLBakingSettings bakingSettings, IPLBakeProgressCallback progressCallback);
/** Cancels any bake operations that may be in progress. Typically, an application will call \c ::iplBakeReverb
* or \c ::iplBakePropagation in a separate thread from the editor's GUI thread, to keep the GUI responsive.
* This function can be called from the GUI thread to safely and prematurely terminate execution of any
* of these functions.
*/
IPLAPI IPLvoid iplCancelBake();
/** Deletes all baked data in a Probe Box that is associated with a given source. If no such baked data
* exists, this function does nothing.
*
* \param probeBox Handle to a Probe Box object.
* \param identifier Identifier of the source whose baked data is to be deleted.
*/
IPLAPI IPLvoid iplDeleteBakedDataByIdentifier(IPLhandle probeBox, IPLBakedDataIdentifier identifier);
/** Returns the size (in bytes) of the baked data stored in a Probe Box corresponding to a given source.
* This is useful for displaying statistics in the editor's GUI.
*
* \param probeBox Handle to a Probe Box object.
* \param identifier Identifier of the source whose baked data size is to be returned.
*
* \return Size (in bytes) of the baked data stored in the Probe Box corresponding to the source.
*/
IPLAPI IPLint32 iplGetBakedDataSizeByIdentifier(IPLhandle probeBox, IPLBakedDataIdentifier identifier);
/** \} */
#ifdef __cplusplus
}
#endif
#endif