aboutsummaryrefslogtreecommitdiff
path: root/files/de/web/api/audiocontext
diff options
context:
space:
mode:
Diffstat (limited to 'files/de/web/api/audiocontext')
-rw-r--r--files/de/web/api/audiocontext/decodeaudiodata/index.html218
-rw-r--r--files/de/web/api/audiocontext/index.html232
2 files changed, 450 insertions, 0 deletions
diff --git a/files/de/web/api/audiocontext/decodeaudiodata/index.html b/files/de/web/api/audiocontext/decodeaudiodata/index.html
new file mode 100644
index 0000000000..32cfda28eb
--- /dev/null
+++ b/files/de/web/api/audiocontext/decodeaudiodata/index.html
@@ -0,0 +1,218 @@
+---
+title: AudioContext.decodeAudioData()
+slug: Web/API/AudioContext/decodeAudioData
+translation_of: Web/API/BaseAudioContext/decodeAudioData
+---
+<p>{{ APIRef("Web Audio API") }}</p>
+
+<div>
+<p>Die Methode decodeAudioData() im Interface {{ domxref("AudioContext") }} wird dazu benutzt um in einem {{ domxref("ArrayBuffer")}} enthaltene Audiodaten asynchron zu dekodieren. Im Normalfall wird der ArrayBuffer mit der  {{domxref("XMLHttpRequest")}} <code>response</code> Eigenschaft befüllt, nachdem der <code>responseType</code> auf <code>arraybuffer gesetzt wurde</code>. Der dekodierte AudioBuffer wird auf die Samplerate des AudioContextes angepasst und anschließend an ein Callback oder Promise weitergegeben.</p>
+</div>
+
+<p>Dies ist die bevorzugte Methode im eine Audioquelle für die Web Audio API aus einem Audiotrack zu generieren.</p>
+
+<h2 id="Syntax">Syntax</h2>
+
+<p>Alte Callbacksyntax:</p>
+
+<pre class="syntaxbox">audioCtx.decodeAudioData(audioData, function(decodedData) {
+ // use the dec​oded data here
+});</pre>
+
+<p>Neue Promise basierte Syntax:</p>
+
+<pre class="syntaxbox">audioCtx.decodeAudioData(audioData).then(function(decodedData) {
+ // use the decoded data here
+});</pre>
+
+<h2 id="Beispiel">Beispiel</h2>
+
+<p>In diesem Abschnitt wird zuerst die ältere Callback basierte Syntax und anschließend die neue Promise basierte Syntax behandelt.</p>
+
+<h3 id="Ältere_Callbacksyntax">Ältere Callbacksyntax</h3>
+
+<p>In diesem Beispiel nutzt die getData() Funktion XHR um einen Audiotrack zu laden, indem sie die responseType Eigenschaft setzt um einen ArrayBuffer als Antwort zu erhalten, welcher anschließend in der audioData variable gespeichert wird. Dieser Buffer wird nun an die decodeAudioData() Funktion übergeben; Das success Callback nutzt die erfolgreich dekodierten PCM Daten, erstellt einen {{ domxref("AudioBufferSourceNode") }} mit Hilfe von {{ domxref("AudioContext.createBufferSource()") }}, verbindet diese Quelle mit {{domxref("AudioContext.destination") }} und setzt den Schleifenmodus.</p>
+
+<p>Die Buttons in diesem Beispiel rufen lediglich getData() um die Daten zu laden und die Wiedergabe zu starten, sowie stop() um die Wiedergabe anzuhalten auf. Wenn die stop() Methode der Quelle aufgerufen wird die Quelle geleert.</p>
+
+<div class="note">
+<p><strong>Note</strong>: You can <a href="http://mdn.github.io/decode-audio-data/">run the example live</a> (or <a href="https://github.com/mdn/decode-audio-data">view the source</a>.)</p>
+</div>
+
+<pre class="brush: js">// Variablen definieren
+
+var audioCtx = new (window.AudioContext || window.webkitAudioContext)();
+var source;
+
+var pre = document.querySelector('pre');
+var myScript = document.querySelector('script');
+var play = document.querySelector('.play');
+var stop = document.querySelector('.stop');
+
+// Benutzt XHR um einen Track zu laden und
+// decodeAudioData um diesen zu dekodieren und in einen Buffer zu kopieren.
+// Anschließend wird der Buffer in eine Quelle kopiert.
+
+function getData() {
+ source = audioCtx.createBufferSource();
+ var request = new XMLHttpRequest();
+
+ request.open('GET', 'viper.ogg', true);
+
+ request.responseType = 'arraybuffer';
+
+
+ request.onload = function() {
+ var audioData = request.response;
+
+ audioCtx.decodeAudioData(audioData, function(buffer) {
+ source.buffer = buffer;
+
+ source.connect(audioCtx.destination);
+ source.loop = true;
+ },
+
+ function(e){"Error with decoding audio data" + e.err});
+
+ }
+
+ request.send();
+}
+
+// Buttons setzen um Wiedergabe zu starten und stoppen
+
+play.onclick = function() {
+  getData();
+  source.start(0);
+  play.setAttribute('disabled', 'disabled');
+}
+
+stop.onclick = function() {
+  source.stop(0);
+  play.removeAttribute('disabled');
+}
+
+
+// Script ausgeben
+
+pre.innerHTML = myScript.innerHTML;</pre>
+
+<h3 id="Neue_Promise_basierte_Syntax">Neue Promise basierte Syntax</h3>
+
+<pre class="brush: js">ctx.decodeAudioData(compressedBuffer).then(function(decodedData) {
+ // Die dekodierten PCM Daten können hier verwendet werden
+});</pre>
+
+<h2 id="Parameter">Parameter</h2>
+
+<dl>
+ <dt>ArrayBuffer</dt>
+ <dd>Ein ArrayBuffer der die zu dekodierenden Daten enthält. Dieser wird normalerweise durch einen {{domxref("XMLHttpRequest")}} befüllt nachdem der <code>responseType</code> auf <code>arraybuffer gesetzt wurde</code>.</dd>
+ <dt>DecodeSuccessCallback</dt>
+ <dd>Ein Callback das aufgerufen wird wenn die Dekodierung erfolgreich abgeschlossen wird. Der einzige Parameter der an diese Funktion übergeben wird stellt einen AudioBuffer dar, der die dekodierten Audiodaten enthält. Normalerweise wird dieser Buffer an einen {{domxref("AudioBufferSourceNode")}} übergeben, von wo er wiedergeben und verändert werden kann.</dd>
+ <dt>DecodeErrorCallback</dt>
+ <dd>Ein optionales Callback das bei einem Fehler während der Dekodierung aufgerufen wird.</dd>
+</dl>
+
+<h2 id="Rückgabewerte">Rückgabewerte</h2>
+
+<p>Ein {{domxref("AudioBuffer") }} der die dekodierten Audiodaten enthällt.</p>
+
+<h2 id="Spezifikationen">Spezifikationen</h2>
+
+<table class="standard-table">
+ <tbody>
+ <tr>
+ <th scope="col">Spezifikation</th>
+ <th scope="col">Status</th>
+ <th scope="col">Kommentar</th>
+ </tr>
+ <tr>
+ <td>{{SpecName('Web Audio API', '#widl-AudioContext-decodeAudioData-Promise-AudioBuffer--ArrayBuffer-audioData-DecodeSuccessCallback-successCallback-DecodeErrorCallback-errorCallback', 'decodeAudioData()')}}</td>
+ <td>{{Spec2('Web Audio API')}}</td>
+ <td> </td>
+ </tr>
+ </tbody>
+</table>
+
+<h2 id="Browserunterstützung">Browserunterstützung</h2>
+
+<div>{{CompatibilityTable}}</div>
+
+<div id="compat-desktop">
+<table class="compat-table">
+ <tbody>
+ <tr>
+ <th>Feature</th>
+ <th>Chrome</th>
+ <th>Firefox (Gecko)</th>
+ <th>Internet Explorer</th>
+ <th>Opera</th>
+ <th>Safari (WebKit)</th>
+ </tr>
+ <tr>
+ <td>Basic support</td>
+ <td>{{CompatChrome(10.0)}}{{property_prefix("webkit")}}</td>
+ <td>{{CompatGeckoDesktop(25.0)}} </td>
+ <td>{{CompatNo}}</td>
+ <td>15.0{{property_prefix("webkit")}}<br>
+ 22 (unprefixed)</td>
+ <td>6.0{{property_prefix("webkit")}}</td>
+ </tr>
+ <tr>
+ <td>Promise-based syntax</td>
+ <td>{{CompatChrome(49.0)}}</td>
+ <td>{{CompatVersionUnknown}}</td>
+ <td>{{CompatNo}}</td>
+ <td>{{CompatVersionUnknown}}</td>
+ <td>{{CompatNo}}</td>
+ </tr>
+ </tbody>
+</table>
+</div>
+
+<div id="compat-mobile">
+<table class="compat-table">
+ <tbody>
+ <tr>
+ <th>Feature</th>
+ <th>Android</th>
+ <th>Android Webview</th>
+ <th>Firefox Mobile (Gecko)</th>
+ <th>Firefox OS</th>
+ <th>IE Mobile</th>
+ <th>Opera Mobile</th>
+ <th>Safari Mobile</th>
+ <th>Chrome for Android</th>
+ </tr>
+ <tr>
+ <td>Basic support</td>
+ <td>{{CompatUnknown}}</td>
+ <td>{{CompatVersionUnknown}}</td>
+ <td>26.0</td>
+ <td>1.2</td>
+ <td>{{CompatUnknown}}</td>
+ <td>{{CompatUnknown}}</td>
+ <td>{{CompatUnknown}}</td>
+ <td>{{CompatChrome(33.0)}}</td>
+ </tr>
+ <tr>
+ <td>Promise-based syntax</td>
+ <td>{{CompatUnknown}}</td>
+ <td>{{CompatChrome(49.0)}}</td>
+ <td>{{CompatVersionUnknown}}</td>
+ <td>{{CompatVersionUnknown}}</td>
+ <td>{{CompatNo}}</td>
+ <td>{{CompatUnknown}}</td>
+ <td>{{CompatUnknown}}</td>
+ <td>{{CompatChrome(49.0)}}</td>
+ </tr>
+ </tbody>
+</table>
+</div>
+
+<h2 id="Siehe_auch">Siehe auch</h2>
+
+<ul>
+ <li><a href="/en-US/docs/Web_Audio_API/Using_Web_Audio_API">Using the Web Audio API</a></li>
+</ul>
diff --git a/files/de/web/api/audiocontext/index.html b/files/de/web/api/audiocontext/index.html
new file mode 100644
index 0000000000..cc2c2db92e
--- /dev/null
+++ b/files/de/web/api/audiocontext/index.html
@@ -0,0 +1,232 @@
+---
+title: AudioContext
+slug: Web/API/AudioContext
+translation_of: Web/API/AudioContext
+---
+<p>{{APIRef("Web Audio API")}}</p>
+
+<div>
+<p>Die <code>AudioContext</code> Schnittstelle bildet einen Audioverarbeitungsdiagramm, aus mehreren miteinander verbundenen Audiomodulen bestehend, ab. Bei jedem dieser Module handelt es sich um einen Knoten ({{domxref("AudioNode")}}). Ein AudioContext kontrolliert sowohl die Erstellung der einzelnen in ihm enthaltenen Knoten als auch den Prozess der Audioverarbeitung oder des Dekodierens. Als erster Schritt muss immer ein Audio Kontext angelegt werden, da sämtliche Funktionen innerhalb dieses Kontextes ausgeführt werden.</p>
+</div>
+
+<p><code>Ein AudioContext</code> kann das Ziel von Events sein, aufgrund dessen unterstützt er auch die {{domxref("EventTarget")}} Schnittstelle.</p>
+
+<h2 id="Eigenschaften">Eigenschaften</h2>
+
+<dl>
+ <dt>{{domxref("AudioContext.currentTime")}} {{readonlyInline}}</dt>
+ <dd>Returns a double representing an ever-increasing hardware time in seconds used for scheduling. It starts at <code>0</code>.</dd>
+ <dt>{{domxref("AudioContext.destination")}} {{readonlyInline}}</dt>
+ <dd>Returns an {{domxref("AudioDestinationNode")}} representing the final destination of all audio in the context. It can be thought of as the audio-rendering device.</dd>
+ <dt>{{domxref("AudioContext.listener")}} {{readonlyInline}}</dt>
+ <dd>Returns the {{domxref("AudioListener")}} object, used for 3D spatialization.</dd>
+ <dt>{{domxref("AudioContext.sampleRate")}} {{readonlyInline}}</dt>
+ <dd>Returns a float representing the sample rate (in samples per second) used by all nodes in this context. The sample-rate of an {{domxref("AudioContext")}} cannot be changed.</dd>
+ <dt>{{domxref("AudioContext.state")}} {{readonlyInline}}</dt>
+ <dd>Returns the current state of the <code>AudioContext</code>.</dd>
+ <dt>{{domxref("AudioContext.mozAudioChannelType")}} {{ non-standard_inline() }} {{readonlyInline}}</dt>
+ <dd>Used to return the audio channel that the sound playing in an {{domxref("AudioContext")}} will play in, on a Firefox OS device.</dd>
+</dl>
+
+<h3 id="Event_handlers">Event handlers</h3>
+
+<dl>
+ <dt>{{domxref("AudioContext.onstatechange")}}</dt>
+ <dd>An event handler that runs when an event of type {{event("statechange")}} has fired. This occurs when the <code>AudioContext</code>'s state changes, due to the calling of one of the state change methods ({{domxref("AudioContext.suspend")}}, {{domxref("AudioContext.resume")}}, or {{domxref("AudioContext.close")}}.)</dd>
+</dl>
+
+<h2 id="Methoden">Methoden</h2>
+
+<p><em>Implementiert zusätzlich die Methoden der Schnittstelle </em>{{domxref("EventTarget")}}.</p>
+
+<dl>
+ <dt>{{domxref("AudioContext.close()")}}</dt>
+ <dd>Closes the audio context, releasing any system audio resources that it uses.</dd>
+ <dt>{{domxref("AudioContext.createBuffer()")}}</dt>
+ <dd>Creates a new, empty {{ domxref("AudioBuffer") }} object, which can then be populated by data and played via an {{ domxref("AudioBufferSourceNode") }}.</dd>
+ <dt>{{domxref("AudioContext.createBufferSource()")}}</dt>
+ <dd>Creates an {{domxref("AudioBufferSourceNode")}}, which can be used to play and manipulate audio data contained within an {{ domxref("AudioBuffer") }} object. {{ domxref("AudioBuffer") }}s are created using {{domxref("AudioContext.createBuffer")}} or returned by {{domxref("AudioContext.decodeAudioData")}} when it successfully decodes an audio track.</dd>
+ <dt>{{domxref("AudioContext.createMediaElementSource()")}}</dt>
+ <dd>Creates a {{domxref("MediaElementAudioSourceNode")}} associated with an {{domxref("HTMLMediaElement")}}. This can be used to play and manipulate audio from {{HTMLElement("video")}} or {{HTMLElement("audio")}} elements.</dd>
+ <dt>{{domxref("AudioContext.createMediaStreamSource()")}}</dt>
+ <dd>Creates a {{domxref("MediaStreamAudioSourceNode")}} associated with a {{domxref("MediaStream")}} representing an audio stream which may come from the local computer microphone or other sources.</dd>
+ <dt>{{domxref("AudioContext.createMediaStreamDestination()")}}</dt>
+ <dd>Creates a {{domxref("MediaStreamAudioDestinationNode")}} associated with a {{domxref("MediaStream")}} representing an audio stream which may be stored in a local file or sent to another computer.</dd>
+ <dt>{{domxref("AudioContext.createScriptProcessor()")}}</dt>
+ <dd>Creates a {{domxref("ScriptProcessorNode")}}, which can be used for direct audio processing via JavaScript.</dd>
+ <dt>{{domxref("AudioContext.createStereoPanner()")}}</dt>
+ <dd>Creates a {{domxref("StereoPannerNode")}}, which can be used to apply stereo panning to an audio source.</dd>
+ <dt>{{domxref("AudioContext.createAnalyser()")}}</dt>
+ <dd>Creates an {{domxref("AnalyserNode")}}, which can be used to expose audio time and frequency data and for example to create data visualisations.</dd>
+ <dt>{{domxref("AudioContext.createBiquadFilter()")}}</dt>
+ <dd>Creates a {{domxref("BiquadFilterNode")}}, which represents a second order filter configurable as several different common filter types: high-pass, low-pass, band-pass, etc.</dd>
+ <dt>{{domxref("AudioContext.createChannelMerger()")}}</dt>
+ <dd>Creates a {{domxref("ChannelMergerNode")}}, which is used to combine channels from multiple audio streams into a single audio stream.</dd>
+ <dt>{{domxref("AudioContext.createChannelSplitter()")}}</dt>
+ <dd>Creates a {{domxref("ChannelSplitterNode")}}, which is used to access the individual channels of an audio stream and process them separately.</dd>
+ <dt>{{domxref("AudioContext.createConvolver()")}}</dt>
+ <dd>Creates a {{domxref("ConvolverNode")}}, which can be used to apply convolution effects to your audio graph, for example a reverberation effect.</dd>
+ <dt>{{domxref("AudioContext.createDelay()")}}</dt>
+ <dd>Creates a {{domxref("DelayNode")}}, which is used to delay the incoming audio signal by a certain amount. This node is also useful to create feedback loops in a Web Audio API graph.</dd>
+ <dt>{{domxref("AudioContext.createDynamicsCompressor()")}}</dt>
+ <dd>Creates a {{domxref("DynamicsCompressorNode")}}, which can be used to apply acoustic compression to an audio signal.</dd>
+ <dt>{{domxref("AudioContext.createGain()")}}</dt>
+ <dd>Creates a {{domxref("GainNode")}}, which can be used to control the overall volume of the audio graph.</dd>
+ <dt>{{domxref("AudioContext.createOscillator()")}}</dt>
+ <dd>Creates an {{domxref("OscillatorNode")}}, a source representing a periodic waveform. It basically generates a tone.</dd>
+ <dt>{{domxref("AudioContext.createPanner()")}}</dt>
+ <dd>Creates a {{domxref("PannerNode")}}, which is used to spatialise an incoming audio stream in 3D space.</dd>
+ <dt>{{domxref("AudioContext.createPeriodicWave()")}}</dt>
+ <dd>Creates a {{domxref("PeriodicWave")}}, used to define a periodic waveform that can be used to determine the output of an {{ domxref("OscillatorNode") }}.</dd>
+ <dt>{{domxref("AudioContext.createWaveShaper()")}}</dt>
+ <dd>Creates a {{domxref("WaveShaperNode")}}, which is used to implement non-linear distortion effects.</dd>
+ <dt>{{domxref("AudioContext.createAudioWorker()")}}</dt>
+ <dd>Creates an {{domxref("AudioWorkerNode")}}, which can interact with a web worker thread to generate, process, or analyse audio directly. This was added to the spec on August 29 2014, and is not implemented in any browser yet.</dd>
+ <dt>{{domxref("AudioContext.decodeAudioData()")}}</dt>
+ <dd>Asynchronously decodes audio file data contained in an {{domxref("ArrayBuffer")}}. In this case, the ArrayBuffer is usually loaded from an {{domxref("XMLHttpRequest")}}'s <code>response</code> attribute after setting the <code>responseType</code> to <code>arraybuffer</code>. This method only works on complete files, not fragments of audio files.</dd>
+ <dt>{{domxref("AudioContext.resume()")}}</dt>
+ <dd>Resumes the progression of time in an audio context that has previously been suspended.</dd>
+ <dt>{{domxref("AudioContext.suspend()")}}</dt>
+ <dd>Suspends the progression of time in the audio context, temporarily halting audio hardware access and reducing CPU/battery usage in the process.</dd>
+</dl>
+
+<h2 id="Obsolete_Methoden">Obsolete Methoden</h2>
+
+<dl>
+ <dt>{{domxref("AudioContext.createJavaScriptNode()")}}</dt>
+ <dd>Creates a {{domxref("JavaScriptNode")}}, used for direct audio processing via JavaScript. This method is obsolete, and has been replaced by {{domxref("AudioContext.createScriptProcessor()")}}.</dd>
+ <dt>{{domxref("AudioContext.createWaveTable()")}}</dt>
+ <dd>Creates a {{domxref("WaveTableNode")}}, used to define a periodic waveform. This method is obsolete, and has been replaced by {{domxref("AudioContext.createPeriodicWave()")}}.</dd>
+</dl>
+
+<h2 id="Beispiele">Beispiele</h2>
+
+<p>Grundsätzliche Deklarierung eines Audio Kontextes:</p>
+
+<pre class="brush: js">var audioCtx = new AudioContext();</pre>
+
+<p>Browserunabhängige Variante:</p>
+
+<pre class="brush: js">var AudioContext = window.AudioContext || window.webkitAudioContext;
+var audioCtx = new AudioContext();
+
+var oscillatorNode = audioCtx.createOscillator();
+var gainNode = audioCtx.createGain();
+var finish = audioCtx.destination;
+// etc.</pre>
+
+<h2 id="Spezifikationen">Spezifikationen</h2>
+
+<table class="standard-table">
+ <tbody>
+ <tr>
+ <th scope="col">Specification</th>
+ <th scope="col">Status</th>
+ <th scope="col">Comment</th>
+ </tr>
+ <tr>
+ <td>{{SpecName('Web Audio API', '#the-audiocontext-interface', 'AudioContext')}}</td>
+ <td>{{Spec2('Web Audio API')}}</td>
+ <td> </td>
+ </tr>
+ </tbody>
+</table>
+
+<h2 id="Browserkompatibilität">Browserkompatibilität</h2>
+
+<div>{{CompatibilityTable}}</div>
+
+<div id="compat-desktop">
+<table class="compat-table">
+ <tbody>
+ <tr>
+ <th>Feature</th>
+ <th>Chrome</th>
+ <th>Firefox (Gecko)</th>
+ <th>Internet Explorer</th>
+ <th>Opera</th>
+ <th>Safari (WebKit)</th>
+ </tr>
+ <tr>
+ <td>Basic support</td>
+ <td>{{CompatChrome(10.0)}}{{property_prefix("webkit")}}<br>
+ 35</td>
+ <td>{{CompatGeckoDesktop(25.0)}} </td>
+ <td>{{CompatNo}}</td>
+ <td>15.0{{property_prefix("webkit")}}<br>
+ 22</td>
+ <td>6.0{{property_prefix("webkit")}}</td>
+ </tr>
+ <tr>
+ <td><code>createStereoPanner()</code></td>
+ <td>{{CompatChrome(42.0)}}</td>
+ <td>{{CompatGeckoDesktop(37.0)}} </td>
+ <td>{{CompatNo}}</td>
+ <td>{{CompatNo}}</td>
+ <td>{{CompatNo}}</td>
+ </tr>
+ <tr>
+ <td><code>onstatechange</code>, <code>state</code>, <code>suspend()</code>, <code>resume()</code></td>
+ <td>{{CompatVersionUnknown}}</td>
+ <td>{{CompatGeckoDesktop(40.0)}}</td>
+ <td>{{CompatNo}}</td>
+ <td>{{CompatNo}}</td>
+ <td>{{CompatNo}}</td>
+ </tr>
+ </tbody>
+</table>
+</div>
+
+<div id="compat-mobile">
+<table class="compat-table">
+ <tbody>
+ <tr>
+ <th>Feature</th>
+ <th>Android</th>
+ <th>Firefox Mobile (Gecko)</th>
+ <th>Firefox OS</th>
+ <th>IE Mobile</th>
+ <th>Opera Mobile</th>
+ <th>Safari Mobile</th>
+ <th>Chrome for Android</th>
+ </tr>
+ <tr>
+ <td>Basic support</td>
+ <td>{{CompatNo}}</td>
+ <td>{{CompatGeckoDesktop(37.0)}} </td>
+ <td>2.2</td>
+ <td>{{CompatNo}}</td>
+ <td>{{CompatNo}}</td>
+ <td>{{CompatNo}}</td>
+ <td>{{CompatVersionUnknown}}</td>
+ </tr>
+ <tr>
+ <td><code>createStereoPanner()</code></td>
+ <td>{{CompatNo}}</td>
+ <td>{{CompatVersionUnknown}}</td>
+ <td>{{CompatVersionUnknown}}</td>
+ <td>{{CompatNo}}</td>
+ <td>{{CompatNo}}</td>
+ <td>{{CompatNo}}</td>
+ <td>{{CompatVersionUnknown}}</td>
+ </tr>
+ <tr>
+ <td><code>onstatechange</code>, <code>state</code>, <code>suspend()</code>, <code>resume()</code></td>
+ <td>{{CompatNo}}</td>
+ <td>{{CompatVersionUnknown}}</td>
+ <td>{{CompatVersionUnknown}}</td>
+ <td>{{CompatNo}}</td>
+ <td>{{CompatNo}}</td>
+ <td>{{CompatNo}}</td>
+ <td>{{CompatVersionUnknown}}</td>
+ </tr>
+ </tbody>
+</table>
+</div>
+
+<h2 id="Siehe_auch">Siehe auch</h2>
+
+<ul style="margin-left: 40px;">
+ <li><a href="/en-US/docs/Web_Audio_API/Using_Web_Audio_API">Using the Web Audio API</a></li>
+ <li>{{domxref("OfflineAudioContext")}}</li>
+</ul>