aboutsummaryrefslogtreecommitdiff
path: root/files/zh-cn/web/api/baseaudiocontext/createdelay/index.html
blob: f683ed9320671c1f6d55d167ab01312f8cd79bb3 (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
---
title: AudioContext.createDelay()
slug: Web/API/BaseAudioContext/createDelay
translation_of: Web/API/BaseAudioContext/createDelay
original_slug: Web/API/AudioContext/createDelay
---
<p>{{ APIRef("Web Audio API") }}</p>

<div>
<p>  <code>createDelay() 是 </code> {{ domxref("AudioContext") }}   的一个方法,作用是将输入音频信号延迟一定时间。(比如可以实现 对着话筒说句话,然后几秒后 这句话从音响里播放出来)</p>

<p> </p>
</div>

<h2 id="语法">语法</h2>

<pre class="brush: js">var audioCtx = new AudioContext();
var synthDelay = audioCtx.createDelay(<em>maxDelayTime</em>);</pre>

<h3 id="参数">参数</h3>

<dl>
 <dt><em>maxDelayTime</em></dt>
 <dd>设置最大允许延迟的时间,以“秒”为单位</dd>
</dl>

<h3 id="返回">返回</h3>

<p>A {{domxref("DelayNode")}}. The default {{domxref("DelayNode.delayTime")}} if no parameter is passed to <code>createDelay()</code> is 0 seconds.</p>

<p>以上是原文,大意是返回延时时间,没有设置时默认是0</p>

<p> </p>

<h2 id="示例">示例</h2>

<p>首先是中文版的简洁的示例,这个例子中 话筒里接收到的声音 会延迟3秒 从音响中播放</p>

<pre>window.AudioContext = window.AudioContext || window.webkitAudioContext || window.mozAudioContext || window.msAudioContext;

<code>try {//音频相关api
    var audioContext = new window.AudioContext();
</code>    var synthDelay = <code>audioContext</code>.createDelay(5.0);
<code>} catch (e) {
    alert("你浏览器不支持");
}


var error = function (error) {alert("有错误"); };

//以下是获取麦克风
if (navigator.getUserMedia) { //标准api
 navigator.getUserMedia({ "audio": true },
 function (stream) {
 micto(stream);    //具体工作
                   }, error);
}else if(navigator.webkitGetUserMedia) {   //webkit api
 navigator.webkitGetUserMedia({audio:true, video:  false },
 function (stream) {
  micto(stream); //具体工作
                   }, error);
 }else if (navigator.mozGetUserMedia) {  //火狐 api
 navigator.mozGetUserMedia({ "audio": true },
 function (stream) {
  micto(stream);//具体工作
                   }, error);
 }else if (navigator.msGetUserMedia) { //ie api
 navigator.msGetUserMedia({ "audio": true },
 function (stream) {
  micto(stream);//具体工作
                   }, error);
 } else {
   alert("您的浏览器版不支持这个api");
}





 var micto = function(stream) {

  synthDelay.delayTime.value = 3.0;   //延迟3秒

  var source = audioContext.createMediaStreamSource(stream);

  source.connect(synthDelay);

  synthDelay.connect(audioContext.destination);

      }
 </code></pre>

<p> </p>

<p> 以下是英文版示例</p>

<p>We have created a simple example that allows you to play three different samples on a constant loop — see <a href="http://chrisdavidmills.github.io/create-delay/">create-delay</a> (you can also <a href="https://github.com/chrisdavidmills/create-delay">view the source code</a>). If you just press the play buttons, the loops will start immediately; if you slide the sliders up to the right, then press the play buttons, a delay will be introduced, so the looping sounds don't start playing for a short amount of time.</p>

<pre class="brush: js;highlight[4,15,16,21,22]">var AudioContext = window.AudioContext || window.webkitAudioContext;
var audioCtx = new AudioContext();

var synthDelay = audioCtx.createDelay(5.0);

  ...

var synthSource;

playSynth.onclick = function() {
  synthSource = audioCtx.createBufferSource();
  synthSource.buffer = buffers[2];
  synthSource.loop = true;
  synthSource.start();
  synthSource.connect(synthDelay);
  synthDelay.connect(destination);
  this.setAttribute('disabled', 'disabled');
}

stopSynth.onclick = function() {
  synthSource.disconnect(synthDelay);
  synthDelay.disconnect(destination);
  synthSource.stop();
  playSynth.removeAttribute('disabled');
}

...

var delay1;
rangeSynth.oninput = function() {
delay1 = rangeSynth.value;
synthDelay.delayTime.value = delay1;
}
</pre>

<h2 id="Specifications">Specifications</h2>

<table class="standard-table">
 <tbody>
  <tr>
   <th scope="col">Specification</th>
   <th scope="col">Status</th>
   <th scope="col">Comment</th>
  </tr>
  <tr>
   <td>{{SpecName('Web Audio API', '#widl-AudioContext-createDelay-DelayNode-double-maxDelayTime', 'createDelay()')}}</td>
   <td>{{Spec2('Web Audio API')}}</td>
   <td> </td>
  </tr>
 </tbody>
</table>

<h2 id="Browser_compatibility">Browser compatibility</h2>

{{Compat("api.BaseAudioContext.createDelay")}}

<h2 id="See_also">See also</h2>

<ul>
 <li><a href="/en-US/docs/Web_Audio_API/Using_Web_Audio_API">Using the Web Audio API</a></li>
</ul>