1 |
|
---|
2 | var _dspdebug = 0;
|
---|
3 |
|
---|
4 | var chromaTransform;
|
---|
5 | var frequencyPlotter;
|
---|
6 |
|
---|
7 | var concatLimit;
|
---|
8 | var concatCount;
|
---|
9 | var concatSignal;
|
---|
10 |
|
---|
11 | var jsMadProcessing = null;
|
---|
12 |
|
---|
13 | function freqPlayerLoadedAudioMetadata() {
|
---|
14 | console.log("started freqPlayerLoadedAudioMetadata()");
|
---|
15 |
|
---|
16 | var audio = document.getElementById('rta-audio-element');
|
---|
17 | var channels;
|
---|
18 | var samplerate;
|
---|
19 | var frameBufferLength;
|
---|
20 | var duration;
|
---|
21 |
|
---|
22 | if (webAudioContext != null) {
|
---|
23 |
|
---|
24 | var webAudioSource = webAudioContext.createMediaElementSource(audio);
|
---|
25 |
|
---|
26 | var webAudioJSProcessor = webAudioContext.createJavaScriptNode(1024,2,2);
|
---|
27 | webAudioJSProcessor.onaudioprocess = freqPlayerAudioAvailable;
|
---|
28 |
|
---|
29 | // Connect the processing graph: source -> jsProcessor -> analyser -> destination
|
---|
30 | webAudioSource.connect(webAudioJSProcessor);
|
---|
31 | webAudioJSProcessor.connect(webAudioContext.destination);
|
---|
32 |
|
---|
33 | // For now, fake audio values for browsers such as Chrome, so later code works
|
---|
34 | channels = 2;
|
---|
35 | samplerate = 44100;
|
---|
36 | frameBufferLength = 2048;
|
---|
37 | duration = audio.duration;
|
---|
38 | } else if (typeof audio.mozChannels !== "undefined") {
|
---|
39 | // Firefox audio API
|
---|
40 | channels = audio.mozChannels;
|
---|
41 | samplerate = audio.mozSampleRate;
|
---|
42 | frameBufferLength = audio.mozFrameBufferLength;
|
---|
43 | duration = audio.duration;
|
---|
44 |
|
---|
45 | } else {
|
---|
46 | throw new Error('AudioContext not supported.');
|
---|
47 | }
|
---|
48 |
|
---|
49 | var numSamples = frameBufferLength/channels;
|
---|
50 |
|
---|
51 | var info_mess = "frequency-player: { ";
|
---|
52 | info_mess += "num channels: " + channels;
|
---|
53 | info_mess += ", sample rate: " + samplerate;
|
---|
54 | info_mess += ", frame buffer length: " + frameBufferLength;
|
---|
55 | info_mess += ", num samples per frame: " + numSamples;
|
---|
56 | info_mess += ", duration: " + duration + "(secs)";
|
---|
57 | info_mess += " }";
|
---|
58 | console.info(info_mess);
|
---|
59 |
|
---|
60 | concatLimit = 8;
|
---|
61 | concatCount = 0;
|
---|
62 | concatSignal = new Float32Array(concatLimit * numSamples); // mono
|
---|
63 |
|
---|
64 | var totalNumSamples = duration * samplerate;
|
---|
65 | var totalNumFrames = Math.floor(totalNumSamples / (numSamples*concatLimit));
|
---|
66 |
|
---|
67 | var frame_mess = "processed frames: { ";
|
---|
68 | frame_mess += "concatLimit: " + concatLimit;
|
---|
69 | frame_mess += ", total num frames = " + totalNumFrames;
|
---|
70 | frame_mess += " }";
|
---|
71 | console.info(frame_mess);
|
---|
72 |
|
---|
73 | var canvas = document.getElementById('freq-plot');
|
---|
74 | var label_canvas = document.getElementById('freq-plot-labels');
|
---|
75 |
|
---|
76 | var transformMode = $('input:radio[name=visualization]:checked').val();
|
---|
77 | var colorMappingMode = $('input:radio[name=colorMapping]:checked').val();
|
---|
78 |
|
---|
79 | // ****
|
---|
80 | //chromaTransform = new ChromaTransform(transformMode, frameBufferLength*concatLimit, channels,samplerate,totalNumFrames);
|
---|
81 | activeWorkflow = new ActiveWorkflow(transformMode, frameBufferLength, concatLimit,
|
---|
82 | channels,samplerate,totalNumFrames);
|
---|
83 |
|
---|
84 | jsMadProcessing = new JsMadProcessing("mysong",activeWorkflow);
|
---|
85 |
|
---|
86 | chromaTransform = activeWorkflow.chromaCpt;
|
---|
87 |
|
---|
88 | frequencyPlotter = new FrequencyPlotter(audio,canvas,label_canvas,activeWorkflow,colorMappingMode,totalNumFrames);
|
---|
89 |
|
---|
90 | //console.log("finished loadedMetadata()");
|
---|
91 | }
|
---|
92 |
|
---|
93 | var audioMode = "stopped";
|
---|
94 | var needsRefresh = true;
|
---|
95 |
|
---|
96 | function setTransformMode(elem)
|
---|
97 | {
|
---|
98 | var transformMode = $(elem).val();
|
---|
99 |
|
---|
100 | frequencyPlotter.setTransformMode(transformMode); // set mode + display labels for plot
|
---|
101 | }
|
---|
102 |
|
---|
103 | function setColorMappingMode(elem)
|
---|
104 | {
|
---|
105 | var mappingMode = $(elem).val();
|
---|
106 |
|
---|
107 | frequencyPlotter.setColorMappingMode(mappingMode);
|
---|
108 | }
|
---|
109 |
|
---|
110 | function completedSelfSimilarityMatrixComputation(fracAvailable)
|
---|
111 | {
|
---|
112 |
|
---|
113 | var $progressElem = $('#processing');
|
---|
114 | var displayProgress = ($progressElem.length>0) ? true : false;
|
---|
115 |
|
---|
116 | if (displayProgress) {
|
---|
117 | $progressElem.css('display','none');
|
---|
118 | }
|
---|
119 |
|
---|
120 | var audio = document.getElementById('rta-audio-element');
|
---|
121 | var duration = audio.duration;
|
---|
122 | var currentTime = audio.currentTime;
|
---|
123 |
|
---|
124 | //frequencyPlotter.plotSelfSimilarityMatrix(chromaTransform.csm,currentTime/duration);
|
---|
125 |
|
---|
126 | frequencyPlotter.plotSelfSimilarityMatrix(chromaTransform.csm,fracAvailable);
|
---|
127 | }
|
---|
128 |
|
---|
129 | function monitorSelfSimilarityMatrixComputation(fracAvailable)
|
---|
130 | {
|
---|
131 |
|
---|
132 | var $progressElem = $('#processing');
|
---|
133 | var $progressTextElem = $('#processingText');
|
---|
134 |
|
---|
135 | var displayProgress = ($progressElem.length>0) ? true : false;
|
---|
136 |
|
---|
137 | var progress = chromaTransform.progress;
|
---|
138 |
|
---|
139 | if (progress < 100) {
|
---|
140 | if (displayProgress) {
|
---|
141 | $progressTextElem.html("Processing: " + progress + "%");
|
---|
142 | }
|
---|
143 | setTimeout(function(){monitorSelfSimilarityMatrixComputation(fracAvailable)},200);
|
---|
144 | }
|
---|
145 | else {
|
---|
146 | completedSelfSimilarityMatrixComputation(fracAvailable);
|
---|
147 | }
|
---|
148 |
|
---|
149 | }
|
---|
150 |
|
---|
151 | function plotSimilarityMatrix()
|
---|
152 | {
|
---|
153 |
|
---|
154 | if (frequencyPlotter.hasStoredSelfSimilarityMatrix()) {
|
---|
155 | console.log("Using stored self-similarity matrix image");
|
---|
156 | frequencyPlotter.plotStoredSelfSimilarityMatrix();
|
---|
157 | return;
|
---|
158 | }
|
---|
159 |
|
---|
160 | var $progressElem = $('#processing');
|
---|
161 | var displayProgress = ($progressElem.length>0) ? true : false;
|
---|
162 | if (displayProgress) {
|
---|
163 | $progressElem.css('display','block');
|
---|
164 | var $progressTitleElem = $('#processingTitle');
|
---|
165 | $progressTitleElem.html("Computing Chromagram Self-similarity Matrix");
|
---|
166 | }
|
---|
167 |
|
---|
168 | var jsMadProcessedLen = jsMadProcessing.playBufferStore.length/concatLimit;
|
---|
169 | var totalNumFrames = frequencyPlotter.totalNumFrames;
|
---|
170 |
|
---|
171 | //console.log("*** playBufferStore len = " + jsMadProcessedLen);
|
---|
172 | //console.log("*** total num frames = " + frequencyPlotter.totalNumFrames);
|
---|
173 |
|
---|
174 | var fracAvailable = Math.min(jsMadProcessedLen/totalNumFrames,1.0);
|
---|
175 |
|
---|
176 | var cfs = chromaTransform.getChromaFeaturesSequence();
|
---|
177 | chromaTransform.computeSelfSimilarityMatrix(cfs); // starts the progress calculation, but need to be monitored until finished
|
---|
178 |
|
---|
179 | monitorSelfSimilarityMatrixComputation(fracAvailable);
|
---|
180 |
|
---|
181 | }
|
---|
182 |
|
---|
183 | function plotSimilarityMatrixWhenPaused(elem)
|
---|
184 | {
|
---|
185 | // toglePlayMode reflects what graphic the button is showing => displaying "play" means it is paused (or hasn't been played yet)
|
---|
186 | //if (togglePlayMode.match("play")) {
|
---|
187 |
|
---|
188 | // the following could be done by more directly studying 'elem'
|
---|
189 | var showSimilarityMatrix = $('input:checkbox[name=similarityMatrix]').prop("checked");
|
---|
190 | if (showSimilarityMatrix) {
|
---|
191 | plotSimilarityMatrix();
|
---|
192 | }
|
---|
193 | else {
|
---|
194 | frequencyPlotter.replotVisual();
|
---|
195 | }
|
---|
196 | //}
|
---|
197 | }
|
---|
198 |
|
---|
199 |
|
---|
200 | function acceleratedProcessingCheckbox(elem)
|
---|
201 | {
|
---|
202 | // the following could be done by more directly studying 'elem'
|
---|
203 | var speedup = $('input:checkbox[name=acceleratedProcessingInput]').prop("checked");
|
---|
204 | if (speedup) {
|
---|
205 | console.log("frequency-player: set JsMad Processing to be faster than realtime");
|
---|
206 | jsMadProcessing.setSpeedupParams(100,0); // e.g. if Firefox
|
---|
207 | }
|
---|
208 | else {
|
---|
209 | console.log("frequency-player: set JsMad Processing process in batches of 10, with a 1 sec delay");
|
---|
210 | jsMadProcessing.setSpeedupParams(10,1000);
|
---|
211 | }
|
---|
212 | }
|
---|
213 |
|
---|
214 |
|
---|
215 | function plotFrequencySpectrum(fbSpectrum)
|
---|
216 | {
|
---|
217 | var fft = chromaTransform.fft;
|
---|
218 |
|
---|
219 | var canvas = document.getElementById('freq-plot');
|
---|
220 | var ctx = canvas.getContext('2d');
|
---|
221 |
|
---|
222 | // Clear the canvas before drawing spectrum
|
---|
223 | ctx.clearRect(0,0, canvas.width, canvas.height);
|
---|
224 |
|
---|
225 | for (var i = 0; i < fft.spectrum.length; i++ ) {
|
---|
226 | // multiply spectrum by a zoom value
|
---|
227 | magnitude = fft.spectrum[i] * 4000;
|
---|
228 |
|
---|
229 | // Draw rectangle bars for each frequency bin
|
---|
230 | ctx.fillRect(i * 4, canvas.height, 3, -magnitude);
|
---|
231 | }
|
---|
232 | }
|
---|
233 |
|
---|
234 |
|
---|
235 | var _fp_first_aa = true;
|
---|
236 |
|
---|
237 |
|
---|
238 | function freqPlayerAudioAvailable(event) {
|
---|
239 | //console.log("audioAvailable");
|
---|
240 |
|
---|
241 | if (togglePlayMode.match("pause")) { // (counter-intuitively) means it is playing
|
---|
242 | if (playTrackerMode.match("playing")) {
|
---|
243 | // means it is in dualPlay() mode
|
---|
244 | return; // don't want the RTA audio playing while dual-playing is doing its panning thing
|
---|
245 | }
|
---|
246 | }
|
---|
247 |
|
---|
248 |
|
---|
249 | if (typeof selectedMeandreWorkflow !== "undefined") {
|
---|
250 | // Have a meandre workflow embedded in the page
|
---|
251 | if (selectedMeandreWorkflow.meandreDataOutput.length>0) {
|
---|
252 | // play some processed data
|
---|
253 |
|
---|
254 | //var rta_audio = document.getElementById('rta-audio-element');
|
---|
255 |
|
---|
256 | console.log("*** freqPlayerAudioAvailable, Meandre workflow setting 'mysong' volume to 0");
|
---|
257 |
|
---|
258 | soundManager.setVolume("mysong",0);
|
---|
259 | pannedPlayback.volume = 1.0;
|
---|
260 |
|
---|
261 | var frame = selectedMeandreWorkflow.meandreDataOutput[0];
|
---|
262 | //console.log("*** meanderDataOutput len = " + selectedMeandreWorkflow.meandreDataOutput.length
|
---|
263 | // + ", frame size of head = " + frame.length);
|
---|
264 |
|
---|
265 |
|
---|
266 | if (webAudioContext!=null) {
|
---|
267 | // Working with Web audio
|
---|
268 | console.log("*** processing meandre output as Web audio");
|
---|
269 |
|
---|
270 | var inputLen = frame.length;
|
---|
271 |
|
---|
272 | var outputArrayL = event.outputBuffer.getChannelData(0);
|
---|
273 | var outputArrayR = event.outputBuffer.getChannelData(1);
|
---|
274 |
|
---|
275 | if (audio.paused) {
|
---|
276 | // not processing/playing any new audio
|
---|
277 |
|
---|
278 | for (var i=0; i<stereo_frame.length; i+=2) {
|
---|
279 | outputArrayL[i] = stereo_frame;
|
---|
280 | }
|
---|
281 | for (var i=0; i<outputArrayR.length; i++) {
|
---|
282 | outputArrayR[i] = 0;
|
---|
283 | }
|
---|
284 | return;
|
---|
285 | }
|
---|
286 |
|
---|
287 | // And again the fake values to keep the rest of the code happy
|
---|
288 | // **** Are these actually used in this block??
|
---|
289 | channels = 2;
|
---|
290 | frameBufferLength = channels * inputLen;
|
---|
291 | samplerate = 44100;
|
---|
292 |
|
---|
293 | // play the result
|
---|
294 | for (var i=0; i<outputArrayL.length; i++) {
|
---|
295 | outputArrayL[i] = frame[i];
|
---|
296 | }
|
---|
297 | for (var i=0; i<outputArrayR.length; i++) {
|
---|
298 | outputArrayR[i] = frame[i];
|
---|
299 | }
|
---|
300 |
|
---|
301 | //written = frame.length;
|
---|
302 |
|
---|
303 | if (togglePlayMode == "play") {
|
---|
304 | // displaying play button (which counter-intuatively means it must be paused)
|
---|
305 | return;
|
---|
306 | }
|
---|
307 |
|
---|
308 | //console.log("***### popping head of meandreDataOutput queue");
|
---|
309 | selectedMeandreWorkflow.meandreDataOutput.shift();
|
---|
310 | }
|
---|
311 |
|
---|
312 | else {
|
---|
313 |
|
---|
314 | var stereo_frame = new Float32Array(frame.length*2);
|
---|
315 | for (var i=0; i<frame.length; i++) {
|
---|
316 | stereo_frame[2*i] = frame[i];
|
---|
317 | stereo_frame[2*i+1] = frame[i];
|
---|
318 | }
|
---|
319 |
|
---|
320 |
|
---|
321 | // Start playing the audio data returned from the Meandre workflow
|
---|
322 | var written = pannedPlayback.mozWriteAudio(stereo_frame);
|
---|
323 |
|
---|
324 | if (written == 0) {
|
---|
325 | // audio has backed up => give it a chance to clear
|
---|
326 | return;
|
---|
327 | }
|
---|
328 |
|
---|
329 | if (togglePlayMode == "play") {
|
---|
330 | // displaying play button (which counter-intuatively means it must be paused)
|
---|
331 | return;
|
---|
332 | }
|
---|
333 |
|
---|
334 | if (written != stereo_frame.length) {
|
---|
335 | //console.log("*** frame size was " + frame.length + " but only wrote out " + written);
|
---|
336 | //selectedMeandreWorkflow.meandreDataOutput[0] = frame.subarray(written);
|
---|
337 |
|
---|
338 | selectedMeandreWorkflow.meandreDataOutput[0] = frame.subarray(written/2);
|
---|
339 | }
|
---|
340 | else {
|
---|
341 | //console.log("***### popping head of meandreDataOutput queue");
|
---|
342 | selectedMeandreWorkflow.meandreDataOutput.shift();
|
---|
343 | }
|
---|
344 |
|
---|
345 | }
|
---|
346 |
|
---|
347 | return; // Data output from Meandre workflow has been player => Prevent any subsequent audio from being played
|
---|
348 | }
|
---|
349 |
|
---|
350 | }
|
---|
351 |
|
---|
352 |
|
---|
353 |
|
---|
354 | var audio = document.getElementById('rta-audio-element');
|
---|
355 |
|
---|
356 | var channels;
|
---|
357 | var samplerate;
|
---|
358 | var frameBufferLength;
|
---|
359 |
|
---|
360 | var fb = null;
|
---|
361 |
|
---|
362 | if (webAudioContext!=null) {
|
---|
363 | // Working with Web audio
|
---|
364 |
|
---|
365 | var inputArrayL = event.inputBuffer.getChannelData(0);
|
---|
366 | var inputArrayR = event.inputBuffer.getChannelData(1);
|
---|
367 |
|
---|
368 | var inputLen = inputArrayL.length;
|
---|
369 |
|
---|
370 | var outputArrayL = event.outputBuffer.getChannelData(0);
|
---|
371 | var outputArrayR = event.outputBuffer.getChannelData(1);
|
---|
372 |
|
---|
373 |
|
---|
374 | //soundManager.setVolume("mysong",0);
|
---|
375 | //pannedPlayback.volume = 1.0;
|
---|
376 |
|
---|
377 | //outputArrayL.set(inputArrayL);
|
---|
378 | //outputArrayR.set(inputArrayR);
|
---|
379 |
|
---|
380 | if (audio.paused) {
|
---|
381 | // not processing/playing any new audio
|
---|
382 |
|
---|
383 | for (var i=0; i<outputArrayL.length; i++) {
|
---|
384 | outputArrayL[i] = 0;
|
---|
385 | }
|
---|
386 | for (var i=0; i<outputArrayR.length; i++) {
|
---|
387 | outputArrayR[i] = 0;
|
---|
388 | }
|
---|
389 | return;
|
---|
390 | }
|
---|
391 |
|
---|
392 | fb = new Float32Array(2*inputLen);
|
---|
393 |
|
---|
394 | for (var i=0; i<inputLen; i++) {
|
---|
395 | fb[2*i] = inputArrayL[i];
|
---|
396 | fb[(2*i)+1] = inputArrayR[i];
|
---|
397 |
|
---|
398 | //outputArrayL[i] = inputArrayL[i];
|
---|
399 | //outputArrayR[i] = inputArrayR[i];
|
---|
400 | }
|
---|
401 |
|
---|
402 | // And again the fake values to keep the rest of the code happy
|
---|
403 | channels = 2;
|
---|
404 | frameBufferLength = channels * inputLen;
|
---|
405 | samplerate = 44100;
|
---|
406 |
|
---|
407 | if (_fp_first_aa) {
|
---|
408 | console.log("*** !!!!! freqPlayerAudioAvailable, web audio context setting 'mysong' volume to 0");
|
---|
409 | soundManager.setVolume("mysong",0.0);
|
---|
410 | _fp_first_aa = false;
|
---|
411 | }
|
---|
412 |
|
---|
413 | // Apply Graphics Equalizer, and then play the result
|
---|
414 |
|
---|
415 | if ((typeof grapheqL !== "undefined") && (typeof grapheqR !== "undefined")) {
|
---|
416 | outputArrayL.set(grapheqL.process(inputArrayL));
|
---|
417 | outputArrayR.set(grapheqR.process(inputArrayR));
|
---|
418 | }
|
---|
419 | else {
|
---|
420 | outputArrayL.set(inputArrayL);
|
---|
421 | outputArrayR.set(inputArrayR);
|
---|
422 | }
|
---|
423 |
|
---|
424 | }
|
---|
425 | else {
|
---|
426 | // Working with Firefox Audio API
|
---|
427 | channels = audio.mozChannels;
|
---|
428 | samplerate = audio.mozSampleRate;
|
---|
429 | frameBufferLength = audio.mozFrameBufferLength;
|
---|
430 |
|
---|
431 | fb = event.frameBuffer;
|
---|
432 |
|
---|
433 | // Apply Graphics Equalizer, and then play the result
|
---|
434 |
|
---|
435 |
|
---|
436 | if (channels==2) {
|
---|
437 | var mono_len = fb.length/2;
|
---|
438 | var leftSignal = new Float32Array(mono_len);
|
---|
439 | var rightSignal = new Float32Array(mono_len);
|
---|
440 |
|
---|
441 | for (var i=0; i<mono_len; i++) {
|
---|
442 | leftSignal[i] = fb[2*i];
|
---|
443 | rightSignal[i] = fb[(2*i)+1];
|
---|
444 | }
|
---|
445 |
|
---|
446 | if ((typeof grapheqL !== "undefined") && (typeof grapheqR !== "undefined")) {
|
---|
447 | leftSignal = grapheqL.process(leftSignal);
|
---|
448 | rightSignal = grapheqR.process(rightSignal);
|
---|
449 | }
|
---|
450 |
|
---|
451 | for (var i=0; i<mono_len; i++) {
|
---|
452 | fb[2*i] = leftSignal[i];
|
---|
453 | fb[(2*i)+1] = rightSignal[i];
|
---|
454 | }
|
---|
455 | }
|
---|
456 | else {
|
---|
457 | // mono input, but presumable stero out
|
---|
458 |
|
---|
459 | // untested code // *****
|
---|
460 |
|
---|
461 | var fbLeft;
|
---|
462 | var fbRight;
|
---|
463 |
|
---|
464 | if ((typeof grapheqL !== "undefined") && (typeof grapheqR !== "undefined")) {
|
---|
465 | fbLeft = grapheqL.process(fb);
|
---|
466 | fbRight = grapheqR.process(fb);
|
---|
467 | }
|
---|
468 | else {
|
---|
469 | fbLeft = new Float32Array(mono_len);
|
---|
470 | fbRight = new Float32Array(mono_len);
|
---|
471 | for (var i=0; i<mono_len; i++) {
|
---|
472 | fbLeft[i] = fb[i];
|
---|
473 | fbRight[i] = fb[i];
|
---|
474 | }
|
---|
475 | }
|
---|
476 |
|
---|
477 | var mono_len = fb.length;
|
---|
478 | fb = new Float32Array(mono_len * 2);
|
---|
479 |
|
---|
480 | for (var i=0; i<mono_len; i++) {
|
---|
481 | fb[2*i] = fbLeft[i];
|
---|
482 | fb[(2*i)+1] = fbRight[i];
|
---|
483 | }
|
---|
484 | }
|
---|
485 |
|
---|
486 |
|
---|
487 | if (_fp_first_aa) {
|
---|
488 | console.log("*** freqPlayerAudioAvailable, Firefox audio API setting 'mysong' volume to 0");
|
---|
489 | soundManager.setVolume("mysong",0.0);
|
---|
490 | _fp_first_aa = false;
|
---|
491 | }
|
---|
492 |
|
---|
493 |
|
---|
494 | var written = pannedPlayback.mozWriteAudio(fb); // could equally make it baseSong ??
|
---|
495 | if (written != fb.length) {
|
---|
496 | console.warn("Did not write out as many samples for output as were in the input");
|
---|
497 | console.warn("input size = " + fb.length + ", output size = " + written);
|
---|
498 | }
|
---|
499 |
|
---|
500 |
|
---|
501 | }
|
---|
502 |
|
---|
503 | var numSamples = fb.length/channels;
|
---|
504 |
|
---|
505 | if (fb.length != frameBufferLength) {
|
---|
506 | console.error("fb length = " + fb.length);
|
---|
507 | console.error("frame buffer length = " + frameBufferLength);
|
---|
508 | console.error("Expected these values to be the same");
|
---|
509 | }
|
---|
510 |
|
---|
511 |
|
---|
512 | concatCount++;
|
---|
513 |
|
---|
514 | if (concatCount==concatLimit) {
|
---|
515 |
|
---|
516 | frequencyPlotter.plotVisualLine(needsRefresh,audio.currentTime);
|
---|
517 |
|
---|
518 | needsRefresh = false;
|
---|
519 |
|
---|
520 | concatCount=0;
|
---|
521 | }
|
---|
522 |
|
---|
523 | // Uncomment the following line to have the audio data being decoded by
|
---|
524 | // the 'rta-audio-element' audio element pushed in to the Javascript DSP workflow
|
---|
525 | //
|
---|
526 | // *** Note: The following has been superceeded by the combination of
|
---|
527 | // JsMad and embeeded Meandre as a way of processing audio, which can operate
|
---|
528 | // at faster then realtime
|
---|
529 | // The related Meandre block of code is at the start of this method
|
---|
530 |
|
---|
531 | //activeWorkflow.pumpDataHead("rawInput",fb);
|
---|
532 |
|
---|
533 | }
|
---|
534 |
|
---|