audioEngine.js 12 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295
  1. import { Observable } from "../Misc/observable.js";
  2. import { Logger } from "../Misc/logger.js";
  3. import { AbstractEngine } from "../Engines/abstractEngine.js";
  4. import { IsWindowObjectExist } from "../Misc/domManagement.js";
  5. // Sets the default audio engine to Babylon.js
  6. AbstractEngine.AudioEngineFactory = (hostElement, audioContext, audioDestination) => {
  7. return new AudioEngine(hostElement, audioContext, audioDestination);
  8. };
  9. /**
  10. * This represents the default audio engine used in babylon.
  11. * It is responsible to play, synchronize and analyse sounds throughout the application.
  12. * @see https://doc.babylonjs.com/features/featuresDeepDive/audio/playingSoundsMusic
  13. */
  14. export class AudioEngine {
  15. /**
  16. * Gets the current AudioContext if available.
  17. */
  18. get audioContext() {
  19. if (!this._audioContextInitialized) {
  20. this._initializeAudioContext();
  21. }
  22. return this._audioContext;
  23. }
  24. /**
  25. * Instantiates a new audio engine.
  26. *
  27. * There should be only one per page as some browsers restrict the number
  28. * of audio contexts you can create.
  29. * @param hostElement defines the host element where to display the mute icon if necessary
  30. * @param audioContext defines the audio context to be used by the audio engine
  31. * @param audioDestination defines the audio destination node to be used by audio engine
  32. */
  33. constructor(hostElement = null, audioContext = null, audioDestination = null) {
  34. this._audioContext = null;
  35. this._audioContextInitialized = false;
  36. this._muteButton = null;
  37. this._audioDestination = null;
  38. /**
  39. * Gets whether the current host supports Web Audio and thus could create AudioContexts.
  40. */
  41. this.canUseWebAudio = false;
  42. /**
  43. * Defines if Babylon should emit a warning if WebAudio is not supported.
  44. * @ignoreNaming
  45. */
  46. // eslint-disable-next-line @typescript-eslint/naming-convention
  47. this.WarnedWebAudioUnsupported = false;
  48. /**
  49. * Gets whether or not mp3 are supported by your browser.
  50. */
  51. this.isMP3supported = false;
  52. /**
  53. * Gets whether or not ogg are supported by your browser.
  54. */
  55. this.isOGGsupported = false;
  56. /**
  57. * Gets whether audio has been unlocked on the device.
  58. * Some Browsers have strong restrictions about Audio and won't autoplay unless
  59. * a user interaction has happened.
  60. */
  61. this.unlocked = false;
  62. /**
  63. * Defines if the audio engine relies on a custom unlocked button.
  64. * In this case, the embedded button will not be displayed.
  65. */
  66. this.useCustomUnlockedButton = false;
  67. /**
  68. * Event raised when audio has been unlocked on the browser.
  69. */
  70. this.onAudioUnlockedObservable = new Observable();
  71. /**
  72. * Event raised when audio has been locked on the browser.
  73. */
  74. this.onAudioLockedObservable = new Observable();
  75. this._tryToRun = false;
  76. this._onResize = () => {
  77. this._moveButtonToTopLeft();
  78. };
  79. if (!IsWindowObjectExist()) {
  80. return;
  81. }
  82. if (typeof window.AudioContext !== "undefined") {
  83. this.canUseWebAudio = true;
  84. }
  85. const audioElem = document.createElement("audio");
  86. this._hostElement = hostElement;
  87. this._audioContext = audioContext;
  88. this._audioDestination = audioDestination;
  89. try {
  90. if (audioElem &&
  91. !!audioElem.canPlayType &&
  92. (audioElem.canPlayType('audio/mpeg; codecs="mp3"').replace(/^no$/, "") || audioElem.canPlayType("audio/mp3").replace(/^no$/, ""))) {
  93. this.isMP3supported = true;
  94. }
  95. }
  96. catch (e) {
  97. // protect error during capability check.
  98. }
  99. try {
  100. if (audioElem && !!audioElem.canPlayType && audioElem.canPlayType('audio/ogg; codecs="vorbis"').replace(/^no$/, "")) {
  101. this.isOGGsupported = true;
  102. }
  103. }
  104. catch (e) {
  105. // protect error during capability check.
  106. }
  107. }
  108. /**
  109. * Flags the audio engine in Locked state.
  110. * This happens due to new browser policies preventing audio to autoplay.
  111. */
  112. lock() {
  113. this._triggerSuspendedState();
  114. }
  115. /**
  116. * Unlocks the audio engine once a user action has been done on the dom.
  117. * This is helpful to resume play once browser policies have been satisfied.
  118. */
  119. unlock() {
  120. if (this._audioContext?.state === "running") {
  121. this._hideMuteButton();
  122. if (!this.unlocked) {
  123. // Notify users that the audio stack is unlocked/unmuted
  124. this.unlocked = true;
  125. this.onAudioUnlockedObservable.notifyObservers(this);
  126. }
  127. return;
  128. }
  129. // On iOS, if the audio context resume request was sent from an event other than a `click` event, then
  130. // the resume promise will never resolve and the only way to get the audio context unstuck is to
  131. // suspend it and make another resume request.
  132. if (this._tryToRun) {
  133. this._audioContext?.suspend().then(() => {
  134. this._tryToRun = false;
  135. this._triggerRunningState();
  136. });
  137. }
  138. else {
  139. this._triggerRunningState();
  140. }
  141. }
  142. _resumeAudioContext() {
  143. if (this._audioContext?.resume) {
  144. return this._audioContext.resume();
  145. }
  146. return Promise.resolve();
  147. }
  148. _initializeAudioContext() {
  149. try {
  150. if (this.canUseWebAudio) {
  151. if (!this._audioContext) {
  152. this._audioContext = new AudioContext();
  153. }
  154. // create a global volume gain node
  155. this.masterGain = this._audioContext.createGain();
  156. this.masterGain.gain.value = 1;
  157. if (!this._audioDestination) {
  158. this._audioDestination = this._audioContext.destination;
  159. }
  160. this.masterGain.connect(this._audioDestination);
  161. this._audioContextInitialized = true;
  162. if (this._audioContext.state === "running") {
  163. // Do not wait for the promise to unlock.
  164. this._triggerRunningState();
  165. }
  166. }
  167. }
  168. catch (e) {
  169. this.canUseWebAudio = false;
  170. Logger.Error("Web Audio: " + e.message);
  171. }
  172. }
  173. _triggerRunningState() {
  174. if (this._tryToRun) {
  175. return;
  176. }
  177. this._tryToRun = true;
  178. this._resumeAudioContext()
  179. .then(() => {
  180. this._tryToRun = false;
  181. if (this._muteButton) {
  182. this._hideMuteButton();
  183. }
  184. // Notify users that the audio stack is unlocked/unmuted
  185. this.unlocked = true;
  186. this.onAudioUnlockedObservable.notifyObservers(this);
  187. })
  188. .catch(() => {
  189. this._tryToRun = false;
  190. this.unlocked = false;
  191. });
  192. }
  193. _triggerSuspendedState() {
  194. this.unlocked = false;
  195. this.onAudioLockedObservable.notifyObservers(this);
  196. this._displayMuteButton();
  197. }
  198. _displayMuteButton() {
  199. if (this.useCustomUnlockedButton || this._muteButton) {
  200. return;
  201. }
  202. this._muteButton = document.createElement("BUTTON");
  203. this._muteButton.className = "babylonUnmuteIcon";
  204. this._muteButton.id = "babylonUnmuteIconBtn";
  205. this._muteButton.title = "Unmute";
  206. const imageUrl = !window.SVGSVGElement
  207. ? "https://cdn.babylonjs.com/Assets/audio.png"
  208. : "data:image/svg+xml;charset=UTF-8,%3Csvg%20version%3D%221.1%22%20xmlns%3D%22http%3A%2F%2Fwww.w3.org%2F2000%2Fsvg%22%20width%3D%2239%22%20height%3D%2232%22%20viewBox%3D%220%200%2039%2032%22%3E%3Cpath%20fill%3D%22white%22%20d%3D%22M9.625%2018.938l-0.031%200.016h-4.953q-0.016%200-0.031-0.016v-12.453q0-0.016%200.031-0.016h4.953q0.031%200%200.031%200.016v12.453zM12.125%207.688l8.719-8.703v27.453l-8.719-8.719-0.016-0.047v-9.938zM23.359%207.875l1.406-1.406%204.219%204.203%204.203-4.203%201.422%201.406-4.219%204.219%204.219%204.203-1.484%201.359-4.141-4.156-4.219%204.219-1.406-1.422%204.219-4.203z%22%3E%3C%2Fpath%3E%3C%2Fsvg%3E";
  209. const css = ".babylonUnmuteIcon { position: absolute; left: 20px; top: 20px; height: 40px; width: 60px; background-color: rgba(51,51,51,0.7); background-image: url(" +
  210. imageUrl +
  211. "); background-size: 80%; background-repeat:no-repeat; background-position: center; background-position-y: 4px; border: none; outline: none; transition: transform 0.125s ease-out; cursor: pointer; z-index: 9999; } .babylonUnmuteIcon:hover { transform: scale(1.05) } .babylonUnmuteIcon:active { background-color: rgba(51,51,51,1) }";
  212. const style = document.createElement("style");
  213. style.appendChild(document.createTextNode(css));
  214. document.getElementsByTagName("head")[0].appendChild(style);
  215. document.body.appendChild(this._muteButton);
  216. this._moveButtonToTopLeft();
  217. this._muteButton.addEventListener("touchend", () => {
  218. this._triggerRunningState();
  219. }, true);
  220. this._muteButton.addEventListener("click", () => {
  221. this.unlock();
  222. }, true);
  223. window.addEventListener("resize", this._onResize);
  224. }
  225. _moveButtonToTopLeft() {
  226. if (this._hostElement && this._muteButton) {
  227. this._muteButton.style.top = this._hostElement.offsetTop + 20 + "px";
  228. this._muteButton.style.left = this._hostElement.offsetLeft + 20 + "px";
  229. }
  230. }
  231. _hideMuteButton() {
  232. if (this._muteButton) {
  233. document.body.removeChild(this._muteButton);
  234. this._muteButton = null;
  235. }
  236. }
  237. /**
  238. * Destroy and release the resources associated with the audio context.
  239. */
  240. dispose() {
  241. if (this.canUseWebAudio && this._audioContextInitialized) {
  242. if (this._connectedAnalyser && this._audioContext) {
  243. this._connectedAnalyser.stopDebugCanvas();
  244. this._connectedAnalyser.dispose();
  245. this.masterGain.disconnect();
  246. this.masterGain.connect(this._audioContext.destination);
  247. this._connectedAnalyser = null;
  248. }
  249. this.masterGain.gain.value = 1;
  250. }
  251. this.WarnedWebAudioUnsupported = false;
  252. this._hideMuteButton();
  253. window.removeEventListener("resize", this._onResize);
  254. this.onAudioUnlockedObservable.clear();
  255. this.onAudioLockedObservable.clear();
  256. }
  257. /**
  258. * Gets the global volume sets on the master gain.
  259. * @returns the global volume if set or -1 otherwise
  260. */
  261. getGlobalVolume() {
  262. if (this.canUseWebAudio && this._audioContextInitialized) {
  263. return this.masterGain.gain.value;
  264. }
  265. else {
  266. return -1;
  267. }
  268. }
  269. /**
  270. * Sets the global volume of your experience (sets on the master gain).
  271. * @param newVolume Defines the new global volume of the application
  272. */
  273. setGlobalVolume(newVolume) {
  274. if (this.canUseWebAudio && this._audioContextInitialized) {
  275. this.masterGain.gain.value = newVolume;
  276. }
  277. }
  278. /**
  279. * Connect the audio engine to an audio analyser allowing some amazing
  280. * synchronization between the sounds/music and your visualization (VuMeter for instance).
  281. * @see https://doc.babylonjs.com/features/featuresDeepDive/audio/playingSoundsMusic#using-the-analyser
  282. * @param analyser The analyser to connect to the engine
  283. */
  284. connectToAnalyser(analyser) {
  285. if (this._connectedAnalyser) {
  286. this._connectedAnalyser.stopDebugCanvas();
  287. }
  288. if (this.canUseWebAudio && this._audioContextInitialized && this._audioContext) {
  289. this._connectedAnalyser = analyser;
  290. this.masterGain.disconnect();
  291. this._connectedAnalyser.connectAudioNodes(this.masterGain, this._audioContext.destination);
  292. }
  293. }
  294. }
  295. //# sourceMappingURL=audioEngine.js.map