aboutsummaryrefslogtreecommitdiff
path: root/files/ru/web/api/analysernode/index.html
blob: 3eba1fb760a44fc1eb8e39fea617b453c4e585a6 (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
---
title: AnalyserNode
slug: Web/API/AnalyserNode
translation_of: Web/API/AnalyserNode
---
<p>{{APIRef("Web Audio API")}}</p>

<p><strong><code>AnalyserNode</code></strong><strong> </strong>интерфейс который предоставляет частоту (frequency) в реальном времени.  Элемент {{domxref("AudioNode")}} который пропускает аудиопоток неизменным от ввода (input) до вывода (output), но позволяет использовать генерируемые данные, обрабатывать или создавать аудиовизуализацию.</p>

<p><code>AnalyserNode</code> имеет только один ввод и один вывод. Узел работает даже если вывод не подсоединён.</p>

<p><img alt="Without modifying the audio stream, the node allows to get the frequency and time-domain data associated to it, using a FFT." src="https://mdn.mozillademos.org/files/12970/fttaudiodata_en.svg" style="height: 206px; width: 693px;"></p>

<table class="properties">
 <tbody>
  <tr>
   <th scope="row">Количество входов</th>
   <td><code>1</code></td>
  </tr>
  <tr>
   <th scope="row">Количество выводов</th>
   <td><code>1</code> (but may be left unconnected)</td>
  </tr>
  <tr>
   <th scope="row">Channel count mode</th>
   <td><code>"explicit"</code></td>
  </tr>
  <tr>
   <th scope="row">Количество каналов</th>
   <td><code>1</code></td>
  </tr>
  <tr>
   <th scope="row">Channel interpretation</th>
   <td><code>"speakers"</code></td>
  </tr>
 </tbody>
</table>

<h2 id="Наследование">Наследование</h2>

<p>Интерфейс унаследован от  следующих родительских интерфейсов:</p>

<p>{{InheritanceDiagram}}</p>

<h2 id="Constructor">Constructor</h2>

<dl>
 <dt>{{domxref("AnalyserNode.AnalyserNode", "AnalyserNode()")}}</dt>
 <dd>Creates a new instance of an <code>AnalyserNode</code> object.</dd>
</dl>

<h2 id="Properties">Properties</h2>

<p><em>Inherits properties from its parent, </em><em>{{domxref("AudioNode")}}</em>.</p>

<dl>
 <dt>{{domxref("AnalyserNode.fftSize")}}</dt>
 <dd>Is an unsigned long value representing the size of the FFT (<a href="http://en.wikipedia.org/wiki/Fast_Fourier_transform" title="/en-US/docs/">Fast Fourier Transform</a>) to be used to determine the frequency domain.</dd>
 <dt>{{domxref("AnalyserNode.frequencyBinCount")}} {{readonlyInline}}</dt>
 <dd>Is an unsigned long value half that of the FFT size. This generally equates to the number of data values you will have to play with for the visualization.</dd>
 <dt>{{domxref("AnalyserNode.minDecibels")}}</dt>
 <dd>Is a double value representing the minimum power value in the scaling range for the FFT analysis data, for conversion to unsigned byte values — basically, this specifies the minimum value for the range of results when using <code>getByteFrequencyData()</code>.</dd>
 <dt>{{domxref("AnalyserNode.maxDecibels")}}</dt>
 <dd>Is a double value representing the maximum power value in the scaling range for the FFT analysis data, for conversion to unsigned byte values — basically, this specifies the maximum value for the range of results when using <code>getByteFrequencyData()</code>.</dd>
 <dt>{{domxref("AnalyserNode.smoothingTimeConstant")}}</dt>
 <dd>Is a double value representing the averaging constant with the last analysis frame — basically, it makes the transition between values over time smoother.</dd>
</dl>

<h2 id="Methods">Methods</h2>

<p><em>Inherits methods from its parent, </em><em>{{domxref("AudioNode")}}</em>.</p>

<dl>
 <dt>{{domxref("AnalyserNode.getFloatFrequencyData()")}}</dt>
 <dd>Copies the current frequency data into a {{domxref("Float32Array")}} array passed into it.</dd>
</dl>

<dl>
 <dt>{{domxref("AnalyserNode.getByteFrequencyData()")}}</dt>
 <dd>Copies the current frequency data into a {{domxref("Uint8Array")}} (unsigned byte array) passed into it.</dd>
</dl>

<dl>
 <dt>{{domxref("AnalyserNode.getFloatTimeDomainData()")}}</dt>
 <dd>Copies the current waveform, or time-domain, data into a {{domxref("Float32Array")}} array passed into it.</dd>
 <dt>{{domxref("AnalyserNode.getByteTimeDomainData()")}}</dt>
 <dd>Copies the current waveform, or time-domain, data into a {{domxref("Uint8Array")}} (unsigned byte array) passed into it.</dd>
</dl>

<h2 id="Examples">Examples</h2>

<div class="note">
<p><strong>Note</strong>: See the guide <a href="/en-US/docs/Web/API/Web_Audio_API/Visualizations_with_Web_Audio_API">Visualizations with Web Audio API</a> for more information on creating audio visualizations.</p>
</div>

<h3 id="Basic_usage">Basic usage</h3>

<p>The following example shows basic usage of an {{domxref("AudioContext")}} to create an <code>AnalyserNode</code>, then {{domxref("window.requestAnimationFrame()","requestAnimationFrame")}} and {{htmlelement("canvas")}} to collect time domain data repeatedly and draw an "oscilloscope style" output of the current audio input. For more complete applied examples/information, check out our <a href="https://mdn.github.io/voice-change-o-matic/">Voice-change-O-matic</a> demo (see <a href="https://github.com/mdn/voice-change-o-matic/blob/gh-pages/scripts/app.js#L128-L205">app.js lines 128–205</a> for relevant code).</p>

<pre class="brush: js notranslate">var audioCtx = new(window.AudioContext || window.webkitAudioContext)();
var analyser = audioCtx.createAnalyser();

// ...

analyser.fftSize = 2048;
var bufferLength = analyser.frequencyBinCount;
var dataArray = new Uint8Array(bufferLength);
analyser.getByteTimeDomainData(dataArray);

// Get a canvas defined with ID "oscilloscope"
var canvas = document.getElementById("oscilloscope");
var canvasCtx = canvas.getContext("2d");

// draw an oscilloscope of the current audio source

function draw() {

  requestAnimationFrame(draw);

  analyser.getByteTimeDomainData(dataArray);

  canvasCtx.fillStyle = "rgb(200, 200, 200)";
  canvasCtx.fillRect(0, 0, canvas.width, canvas.height);

  canvasCtx.lineWidth = 2;
  canvasCtx.strokeStyle = "rgb(0, 0, 0)";

  canvasCtx.beginPath();

  var sliceWidth = canvas.width * 1.0 / bufferLength;
  var x = 0;

  for (var i = 0; i &lt; bufferLength; i++) {

    var v = dataArray[i] / 128.0;
    var y = v * canvas.height / 2;

    if (i === 0) {
      canvasCtx.moveTo(x, y);
    } else {
      canvasCtx.lineTo(x, y);
    }

    x += sliceWidth;
  }

  canvasCtx.lineTo(canvas.width, canvas.height / 2);
  canvasCtx.stroke();
}

draw();
</pre>

<h2 id="Specifications">Specifications</h2>

<table class="standard-table">
 <tbody>
  <tr>
   <th scope="col">Specification</th>
   <th scope="col">Status</th>
   <th scope="col">Comment</th>
  </tr>
  <tr>
   <td>{{SpecName('Web Audio API', '#the-analysernode-interface', 'AnalyserNode')}}</td>
   <td>{{Spec2('Web Audio API')}}</td>
   <td></td>
  </tr>
 </tbody>
</table>

<h2 id="Browser_compatibility">Browser compatibility</h2>

<div>


<p>{{Compat("api.AnalyserNode")}}</p>
</div>

<h2 id="See_also">See also</h2>

<ul>
 <li><a href="/en-US/docs/Web_Audio_API/Using_Web_Audio_API">Using the Web Audio API</a></li>
</ul>