PageRenderTime 51ms CodeModel.GetById 15ms RepoModel.GetById 0ms app.codeStats 0ms

/Source/WebCore/platform/audio/HRTFElevation.cpp

https://bitbucket.org/cyanogenmod/android_external_webkit
C++ | 266 lines | 165 code | 49 blank | 52 comment | 31 complexity | 28b9670c57588ac4452a278bddb66d37 MD5 | raw file
Possible License(s): LGPL-2.0, BSD-3-Clause, LGPL-2.1
  1. /*
  2. * Copyright (C) 2010 Google Inc. All rights reserved.
  3. *
  4. * Redistribution and use in source and binary forms, with or without
  5. * modification, are permitted provided that the following conditions
  6. * are met:
  7. *
  8. * 1. Redistributions of source code must retain the above copyright
  9. * notice, this list of conditions and the following disclaimer.
  10. * 2. Redistributions in binary form must reproduce the above copyright
  11. * notice, this list of conditions and the following disclaimer in the
  12. * documentation and/or other materials provided with the distribution.
  13. * 3. Neither the name of Apple Computer, Inc. ("Apple") nor the names of
  14. * its contributors may be used to endorse or promote products derived
  15. * from this software without specific prior written permission.
  16. *
  17. * THIS SOFTWARE IS PROVIDED BY APPLE AND ITS CONTRIBUTORS "AS IS" AND ANY
  18. * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
  19. * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
  20. * DISCLAIMED. IN NO EVENT SHALL APPLE OR ITS CONTRIBUTORS BE LIABLE FOR ANY
  21. * DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
  22. * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
  23. * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
  24. * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
  25. * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
  26. * THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
  27. */
  28. #include "config.h"
  29. #if ENABLE(WEB_AUDIO)
  30. #include "HRTFElevation.h"
  31. #include "AudioBus.h"
  32. #include "AudioFileReader.h"
  33. #include "Biquad.h"
  34. #include "FFTFrame.h"
  35. #include "HRTFPanner.h"
  36. #include <algorithm>
  37. #include <math.h>
  38. #include <wtf/OwnPtr.h>
  39. using namespace std;
  40. namespace WebCore {
  41. const unsigned HRTFElevation::AzimuthSpacing = 15;
  42. const unsigned HRTFElevation::NumberOfRawAzimuths = 360 / AzimuthSpacing;
  43. const unsigned HRTFElevation::InterpolationFactor = 8;
  44. const unsigned HRTFElevation::NumberOfTotalAzimuths = NumberOfRawAzimuths * InterpolationFactor;
  45. // Takes advantage of the symmetry and creates a composite version of the two measured versions. For example, we have both azimuth 30 and -30 degrees
  46. // where the roles of left and right ears are reversed with respect to each other.
  47. bool HRTFElevation::calculateSymmetricKernelsForAzimuthElevation(int azimuth, int elevation, double sampleRate, const String& subjectName,
  48. RefPtr<HRTFKernel>& kernelL, RefPtr<HRTFKernel>& kernelR)
  49. {
  50. RefPtr<HRTFKernel> kernelL1;
  51. RefPtr<HRTFKernel> kernelR1;
  52. bool success = calculateKernelsForAzimuthElevation(azimuth, elevation, sampleRate, subjectName, kernelL1, kernelR1);
  53. if (!success)
  54. return false;
  55. // And symmetric version
  56. int symmetricAzimuth = !azimuth ? 0 : 360 - azimuth;
  57. RefPtr<HRTFKernel> kernelL2;
  58. RefPtr<HRTFKernel> kernelR2;
  59. success = calculateKernelsForAzimuthElevation(symmetricAzimuth, elevation, sampleRate, subjectName, kernelL2, kernelR2);
  60. if (!success)
  61. return false;
  62. // Notice L/R reversal in symmetric version.
  63. kernelL = HRTFKernel::createInterpolatedKernel(kernelL1.get(), kernelR2.get(), 0.5);
  64. kernelR = HRTFKernel::createInterpolatedKernel(kernelR1.get(), kernelL2.get(), 0.5);
  65. return true;
  66. }
  67. bool HRTFElevation::calculateKernelsForAzimuthElevation(int azimuth, int elevation, double sampleRate, const String& subjectName,
  68. RefPtr<HRTFKernel>& kernelL, RefPtr<HRTFKernel>& kernelR)
  69. {
  70. // Valid values for azimuth are 0 -> 345 in 15 degree increments.
  71. // Valid values for elevation are -45 -> +90 in 15 degree increments.
  72. bool isAzimuthGood = azimuth >= 0 && azimuth <= 345 && (azimuth / 15) * 15 == azimuth;
  73. ASSERT(isAzimuthGood);
  74. if (!isAzimuthGood)
  75. return false;
  76. bool isElevationGood = elevation >= -45 && elevation <= 90 && (elevation / 15) * 15 == elevation;
  77. ASSERT(isElevationGood);
  78. if (!isElevationGood)
  79. return false;
  80. // Construct the resource name from the subject name, azimuth, and elevation, for example:
  81. // "IRC_Composite_C_R0195_T015_P000"
  82. // Note: the passed in subjectName is not a string passed in via JavaScript or the web.
  83. // It's passed in as an internal ASCII identifier and is an implementation detail.
  84. int positiveElevation = elevation < 0 ? elevation + 360 : elevation;
  85. String resourceName = String::format("IRC_%s_C_R0195_T%03d_P%03d", subjectName.utf8().data(), azimuth, positiveElevation);
  86. OwnPtr<AudioBus> impulseResponse(AudioBus::loadPlatformResource(resourceName.utf8().data(), sampleRate));
  87. ASSERT(impulseResponse.get());
  88. if (!impulseResponse.get())
  89. return false;
  90. size_t responseLength = impulseResponse->length();
  91. size_t expectedLength = static_cast<size_t>(256 * (sampleRate / 44100.0));
  92. // Check number of channels and length. For now these are fixed and known.
  93. bool isBusGood = responseLength == expectedLength && impulseResponse->numberOfChannels() == 2;
  94. ASSERT(isBusGood);
  95. if (!isBusGood)
  96. return false;
  97. AudioChannel* leftEarImpulseResponse = impulseResponse->channelByType(AudioBus::ChannelLeft);
  98. AudioChannel* rightEarImpulseResponse = impulseResponse->channelByType(AudioBus::ChannelRight);
  99. // Note that depending on the fftSize returned by the panner, we may be truncating the impulse response we just loaded in.
  100. const size_t fftSize = HRTFPanner::fftSizeForSampleRate(sampleRate);
  101. kernelL = HRTFKernel::create(leftEarImpulseResponse, fftSize, sampleRate, true);
  102. kernelR = HRTFKernel::create(rightEarImpulseResponse, fftSize, sampleRate, true);
  103. return true;
  104. }
  105. // The range of elevations for the IRCAM impulse responses varies depending on azimuth, but the minimum elevation appears to always be -45.
  106. //
  107. // Here's how it goes:
  108. static int maxElevations[] = {
  109. // Azimuth
  110. //
  111. 90, // 0
  112. 45, // 15
  113. 60, // 30
  114. 45, // 45
  115. 75, // 60
  116. 45, // 75
  117. 60, // 90
  118. 45, // 105
  119. 75, // 120
  120. 45, // 135
  121. 60, // 150
  122. 45, // 165
  123. 75, // 180
  124. 45, // 195
  125. 60, // 210
  126. 45, // 225
  127. 75, // 240
  128. 45, // 255
  129. 60, // 270
  130. 45, // 285
  131. 75, // 300
  132. 45, // 315
  133. 60, // 330
  134. 45 // 345
  135. };
  136. PassOwnPtr<HRTFElevation> HRTFElevation::createForSubject(const String& subjectName, int elevation, double sampleRate)
  137. {
  138. bool isElevationGood = elevation >= -45 && elevation <= 90 && (elevation / 15) * 15 == elevation;
  139. ASSERT(isElevationGood);
  140. if (!isElevationGood)
  141. return 0;
  142. OwnPtr<HRTFKernelList> kernelListL = adoptPtr(new HRTFKernelList(NumberOfTotalAzimuths));
  143. OwnPtr<HRTFKernelList> kernelListR = adoptPtr(new HRTFKernelList(NumberOfTotalAzimuths));
  144. // Load convolution kernels from HRTF files.
  145. int interpolatedIndex = 0;
  146. for (unsigned rawIndex = 0; rawIndex < NumberOfRawAzimuths; ++rawIndex) {
  147. // Don't let elevation exceed maximum for this azimuth.
  148. int maxElevation = maxElevations[rawIndex];
  149. int actualElevation = min(elevation, maxElevation);
  150. bool success = calculateKernelsForAzimuthElevation(rawIndex * AzimuthSpacing, actualElevation, sampleRate, subjectName, kernelListL->at(interpolatedIndex), kernelListR->at(interpolatedIndex));
  151. if (!success)
  152. return 0;
  153. interpolatedIndex += InterpolationFactor;
  154. }
  155. // Now go back and interpolate intermediate azimuth values.
  156. for (unsigned i = 0; i < NumberOfTotalAzimuths; i += InterpolationFactor) {
  157. int j = (i + InterpolationFactor) % NumberOfTotalAzimuths;
  158. // Create the interpolated convolution kernels and delays.
  159. for (unsigned jj = 1; jj < InterpolationFactor; ++jj) {
  160. double x = double(jj) / double(InterpolationFactor); // interpolate from 0 -> 1
  161. (*kernelListL)[i + jj] = HRTFKernel::createInterpolatedKernel(kernelListL->at(i).get(), kernelListL->at(j).get(), x);
  162. (*kernelListR)[i + jj] = HRTFKernel::createInterpolatedKernel(kernelListR->at(i).get(), kernelListR->at(j).get(), x);
  163. }
  164. }
  165. OwnPtr<HRTFElevation> hrtfElevation = adoptPtr(new HRTFElevation(kernelListL.release(), kernelListR.release(), elevation, sampleRate));
  166. return hrtfElevation.release();
  167. }
  168. PassOwnPtr<HRTFElevation> HRTFElevation::createByInterpolatingSlices(HRTFElevation* hrtfElevation1, HRTFElevation* hrtfElevation2, double x, double sampleRate)
  169. {
  170. ASSERT(hrtfElevation1 && hrtfElevation2);
  171. if (!hrtfElevation1 || !hrtfElevation2)
  172. return 0;
  173. ASSERT(x >= 0.0 && x < 1.0);
  174. OwnPtr<HRTFKernelList> kernelListL = adoptPtr(new HRTFKernelList(NumberOfTotalAzimuths));
  175. OwnPtr<HRTFKernelList> kernelListR = adoptPtr(new HRTFKernelList(NumberOfTotalAzimuths));
  176. HRTFKernelList* kernelListL1 = hrtfElevation1->kernelListL();
  177. HRTFKernelList* kernelListR1 = hrtfElevation1->kernelListR();
  178. HRTFKernelList* kernelListL2 = hrtfElevation2->kernelListL();
  179. HRTFKernelList* kernelListR2 = hrtfElevation2->kernelListR();
  180. // Interpolate kernels of corresponding azimuths of the two elevations.
  181. for (unsigned i = 0; i < NumberOfTotalAzimuths; ++i) {
  182. (*kernelListL)[i] = HRTFKernel::createInterpolatedKernel(kernelListL1->at(i).get(), kernelListL2->at(i).get(), x);
  183. (*kernelListR)[i] = HRTFKernel::createInterpolatedKernel(kernelListR1->at(i).get(), kernelListR2->at(i).get(), x);
  184. }
  185. // Interpolate elevation angle.
  186. double angle = (1.0 - x) * hrtfElevation1->elevationAngle() + x * hrtfElevation2->elevationAngle();
  187. OwnPtr<HRTFElevation> hrtfElevation = adoptPtr(new HRTFElevation(kernelListL.release(), kernelListR.release(), static_cast<int>(angle), sampleRate));
  188. return hrtfElevation.release();
  189. }
  190. void HRTFElevation::getKernelsFromAzimuth(double azimuthBlend, unsigned azimuthIndex, HRTFKernel* &kernelL, HRTFKernel* &kernelR, double& frameDelayL, double& frameDelayR)
  191. {
  192. bool checkAzimuthBlend = azimuthBlend >= 0.0 && azimuthBlend < 1.0;
  193. ASSERT(checkAzimuthBlend);
  194. if (!checkAzimuthBlend)
  195. azimuthBlend = 0.0;
  196. unsigned numKernels = m_kernelListL->size();
  197. bool isIndexGood = azimuthIndex < numKernels;
  198. ASSERT(isIndexGood);
  199. if (!isIndexGood) {
  200. kernelL = 0;
  201. kernelR = 0;
  202. return;
  203. }
  204. // Return the left and right kernels.
  205. kernelL = m_kernelListL->at(azimuthIndex).get();
  206. kernelR = m_kernelListR->at(azimuthIndex).get();
  207. frameDelayL = m_kernelListL->at(azimuthIndex)->frameDelay();
  208. frameDelayR = m_kernelListR->at(azimuthIndex)->frameDelay();
  209. int azimuthIndex2 = (azimuthIndex + 1) % numKernels;
  210. double frameDelay2L = m_kernelListL->at(azimuthIndex2)->frameDelay();
  211. double frameDelay2R = m_kernelListR->at(azimuthIndex2)->frameDelay();
  212. // Linearly interpolate delays.
  213. frameDelayL = (1.0 - azimuthBlend) * frameDelayL + azimuthBlend * frameDelay2L;
  214. frameDelayR = (1.0 - azimuthBlend) * frameDelayR + azimuthBlend * frameDelay2R;
  215. }
  216. } // namespace WebCore
  217. #endif // ENABLE(WEB_AUDIO)