source: gs3-extensions/web-audio/trunk/js-dsp/frequency-player.js@ 28401

Last change on this file since 28401 was 28401, checked in by davidb, 11 years ago

if-statement guards added for when the graphics equalizer is not in use

File size: 12.9 KB
Line 
1
2var _dspdebug = 0;
3
4var chromaTransform;
5var frequencyPlotter;
6
7var concatLimit;
8var concatCount;
9var concatSignal;
10
11var jsMadProcessing = null;
12
13function freqPlayerLoadedAudioMetadata() {
14 console.log("started freqPlayerLoadedAudioMetadata()");
15
16 var audio = document.getElementById('rta-audio-element');
17 var channels;
18 var samplerate;
19 var frameBufferLength;
20 var duration;
21
22 if (webAudioContext != null) {
23
24 var webAudioSource = webAudioContext.createMediaElementSource(audio);
25
26 var webAudioJSProcessor = webAudioContext.createJavaScriptNode(1024,2,2);
27 webAudioJSProcessor.onaudioprocess = freqPlayerAudioAvailable;
28
29 // Connect the processing graph: source -> jsProcessor -> analyser -> destination
30 webAudioSource.connect(webAudioJSProcessor);
31 webAudioJSProcessor.connect(webAudioContext.destination);
32
33 // For now, fake audio values for browsers such as Chrome, so later code works
34 channels = 2;
35 samplerate = 44100;
36 frameBufferLength = 2048;
37 duration = audio.duration;
38 } else if (typeof audio.mozChannels !== "undefined") {
39 // Firefox audio API
40 channels = audio.mozChannels;
41 samplerate = audio.mozSampleRate;
42 frameBufferLength = audio.mozFrameBufferLength;
43 duration = audio.duration;
44
45 } else {
46 throw new Error('AudioContext not supported.');
47 }
48
49 var numSamples = frameBufferLength/channels;
50
51 var info_mess = "frequency-player: { ";
52 info_mess += "num channels: " + channels;
53 info_mess += ", sample rate: " + samplerate;
54 info_mess += ", frame buffer length: " + frameBufferLength;
55 info_mess += ", num samples per frame: " + numSamples;
56 info_mess += ", duration: " + duration + "(secs)";
57 info_mess += " }";
58 console.info(info_mess);
59
60 concatLimit = 8;
61 concatCount = 0;
62 concatSignal = new Float32Array(concatLimit * numSamples); // mono
63
64 var totalNumSamples = duration * samplerate;
65 var totalNumFrames = Math.floor(totalNumSamples / (numSamples*concatLimit));
66
67 var frame_mess = "processed frames: { ";
68 frame_mess += "concatLimit: " + concatLimit;
69 frame_mess += ", total num frames = " + totalNumFrames;
70 frame_mess += " }";
71 console.info(frame_mess);
72
73 var canvas = document.getElementById('freq-plot');
74 var label_canvas = document.getElementById('freq-plot-labels');
75
76 var transformMode = $('input:radio[name=visualization]:checked').val();
77 var colorMappingMode = $('input:radio[name=colorMapping]:checked').val();
78
79 // ****
80 //chromaTransform = new ChromaTransform(transformMode, frameBufferLength*concatLimit, channels,samplerate,totalNumFrames);
81 activeWorkflow = new ActiveWorkflow(transformMode, frameBufferLength, concatLimit,
82 channels,samplerate,totalNumFrames);
83
84 jsMadProcessing = new JsMadProcessing("mysong",activeWorkflow);
85
86 chromaTransform = activeWorkflow.chromaCpt;
87
88 frequencyPlotter = new FrequencyPlotter(audio,canvas,label_canvas,activeWorkflow,colorMappingMode,totalNumFrames);
89
90 //console.log("finished loadedMetadata()");
91}
92
93var audioMode = "stopped";
94var needsRefresh = true;
95
96function setTransformMode(elem)
97{
98 var transformMode = $(elem).val();
99
100 frequencyPlotter.setTransformMode(transformMode); // set mode + display labels for plot
101}
102
103function setColorMappingMode(elem)
104{
105 var mappingMode = $(elem).val();
106
107 frequencyPlotter.setColorMappingMode(mappingMode);
108}
109
110function completedSelfSimilarityMatrixComputation(fracAvailable)
111{
112
113 var $progressElem = $('#processing');
114 var displayProgress = ($progressElem.length>0) ? true : false;
115
116 if (displayProgress) {
117 $progressElem.css('display','none');
118 }
119
120 var audio = document.getElementById('rta-audio-element');
121 var duration = audio.duration;
122 var currentTime = audio.currentTime;
123
124 //frequencyPlotter.plotSelfSimilarityMatrix(chromaTransform.csm,currentTime/duration);
125
126 frequencyPlotter.plotSelfSimilarityMatrix(chromaTransform.csm,fracAvailable);
127}
128
129function monitorSelfSimilarityMatrixComputation(fracAvailable)
130{
131
132 var $progressElem = $('#processing');
133 var $progressTextElem = $('#processingText');
134
135 var displayProgress = ($progressElem.length>0) ? true : false;
136
137 var progress = chromaTransform.progress;
138
139 if (progress < 100) {
140 if (displayProgress) {
141 $progressTextElem.html("Processing: " + progress + "%");
142 }
143 setTimeout(function(){monitorSelfSimilarityMatrixComputation(fracAvailable)},200);
144 }
145 else {
146 completedSelfSimilarityMatrixComputation(fracAvailable);
147 }
148
149}
150
151function plotSimilarityMatrix()
152{
153 var $progressElem = $('#processing');
154 var displayProgress = ($progressElem.length>0) ? true : false;
155 if (displayProgress) {
156 $progressElem.css('display','block');
157 var $progressTitleElem = $('#processingTitle');
158 $progressTitleElem.html("Computing Chromagram Self-similarity Matrix");
159 }
160
161 var jsMadProcessedLen = jsMadProcessing.playBufferStore.length/concatLimit;
162 var totalNumFrames = frequencyPlotter.totalNumFrames;
163
164 //console.log("*** playBufferStore len = " + jsMadProcessedLen);
165 //console.log("*** total num frames = " + frequencyPlotter.totalNumFrames);
166
167 var fracAvailable = Math.min(jsMadProcessedLen/totalNumFrames,1.0);
168
169 var cfs = chromaTransform.getChromaFeaturesSequence();
170 chromaTransform.computeSelfSimilarityMatrix(cfs); // starts the progress calculation, but need to be monitored until finished
171
172 monitorSelfSimilarityMatrixComputation(fracAvailable);
173
174}
175
176function plotSimilarityMatrixWhenPaused(elem)
177{
178 // toglePlayMode reflects what graphic the button is showing => displaying "play" means it is paused (or hasn't been played yet)
179 //if (togglePlayMode.match("play")) {
180
181 // the following could be done by more directly studying 'elem'
182 var showSimilarityMatrix = $('input:checkbox[name=similarityMatrix]').prop("checked");
183 if (showSimilarityMatrix) {
184 plotSimilarityMatrix();
185 }
186 else {
187 frequencyPlotter.replotVisual();
188 }
189 //}
190}
191
192function plotFrequencySpectrum(fbSpectrum)
193{
194 var fft = chromaTransform.fft;
195
196 var canvas = document.getElementById('freq-plot');
197 var ctx = canvas.getContext('2d');
198
199 // Clear the canvas before drawing spectrum
200 ctx.clearRect(0,0, canvas.width, canvas.height);
201
202 for (var i = 0; i < fft.spectrum.length; i++ ) {
203 // multiply spectrum by a zoom value
204 magnitude = fft.spectrum[i] * 4000;
205
206 // Draw rectangle bars for each frequency bin
207 ctx.fillRect(i * 4, canvas.height, 3, -magnitude);
208 }
209}
210
211
212var _fp_first_aa = true;
213
214
215function freqPlayerAudioAvailable(event) {
216 //console.log("audioAvailable");
217
218 if (togglePlayMode.match("pause")) { // (counter-intuitively) means it is playing
219 if (playTrackerMode.match("playing")) {
220 // means it is in dualPlay() mode
221 return; // don't want the RTA audio playing while dual-playing is doing its panning thing
222 }
223 }
224
225 var audio = document.getElementById('rta-audio-element');
226
227 var channels;
228 var samplerate;
229 var frameBufferLength;
230
231 var fb = null;
232
233 if (webAudioContext!=null) {
234 // Working with Web audio
235
236 var inputArrayL = event.inputBuffer.getChannelData(0);
237 var inputArrayR = event.inputBuffer.getChannelData(1);
238
239 var inputLen = inputArrayL.length;
240
241 var outputArrayL = event.outputBuffer.getChannelData(0);
242 var outputArrayR = event.outputBuffer.getChannelData(1);
243
244
245 //soundManager.setVolume("mysong",0);
246 //pannedPlayback.volume = 1.0;
247
248 //outputArrayL.set(inputArrayL);
249 //outputArrayR.set(inputArrayR);
250
251 if (audio.paused) {
252 // not processing/playing any new audio
253
254 for (var i=0; i<outputArrayL.length; i++) {
255 outputArrayL[i] = 0;
256 }
257 for (var i=0; i<outputArrayR.length; i++) {
258 outputArrayR[i] = 0;
259 }
260 return;
261 }
262
263 fb = new Float32Array(2*inputLen);
264
265 for (var i=0; i<inputLen; i++) {
266 fb[2*i] = inputArrayL[i];
267 fb[(2*i)+1] = inputArrayR[i];
268
269 //outputArrayL[i] = inputArrayL[i];
270 //outputArrayR[i] = inputArrayR[i];
271 }
272
273 // And again the fake values to keep the rest of the code happy
274 channels = 2;
275 frameBufferLength = channels * inputLen;
276 samplerate = 44100;
277
278 // Apply Graphics Equalizer, and then play the result
279 if (_fp_first_aa) {
280 console.log("*** !!!!! freqPlayerAudioAvailable, web audio context setting 'mysong' volume to 0");
281 soundManager.setVolume("mysong",0.0);
282 _fp_first_aa = false;
283 }
284
285 outputArrayL.set(grapheqL.process(inputArrayL));
286 outputArrayR.set(grapheqR.process(inputArrayR));
287 }
288 else {
289 // Working with Firefox Audio API
290 channels = audio.mozChannels;
291 samplerate = audio.mozSampleRate;
292 frameBufferLength = audio.mozFrameBufferLength;
293
294 fb = event.frameBuffer;
295
296 if (channels==2) {
297 var mono_len = fb.length/2;
298 var leftSignal = new Float32Array(mono_len);
299 var rightSignal = new Float32Array(mono_len);
300
301 for (var i=0; i<mono_len; i++) {
302 leftSignal[i] = fb[2*i];
303 rightSignal[i] = fb[(2*i)+1];
304 }
305
306 if ((typeof grapheqL !== "undefined") && (typeof grapheqR !== "undefined")) {
307 leftSignal = grapheqL.process(leftSignal);
308 rightSignal = grapheqR.process(rightSignal);
309 }
310
311 for (var i=0; i<mono_len; i++) {
312 fb[2*i] = leftSignal[i];
313 fb[(2*i)+1] = rightSignal[i];
314 }
315 }
316 else {
317 // mono input, but presumable stero out
318
319 // untested code // *****
320
321 var fbLeft;
322 var fbRight;
323
324 if ((typeof grapheqL !== "undefined") && (typeof grapheqR !== "undefined")) {
325 fbLeft = grapheqL.process(fb);
326 fbRight = grapheqR.process(fb);
327 }
328 else {
329 fbLeft = new Float32Array(mono_len);
330 fbRight = new Float32Array(mono_len);
331 for (var i=0; i<mono_len; i++) {
332 fbLeft[i] = fb[i];
333 fbRight[i] = fb[i];
334 }
335 }
336
337 var mono_len = fb.length;
338 fb = new Float32Array(mono_len * 2);
339
340 for (var i=0; i<mono_len; i++) {
341 fb[2*i] = fbLeft[i];
342 fb[(2*i)+1] = fbRight[i];
343 }
344 }
345
346 // Apply Graphics Equalizer, and then play the result
347
348 if (_fp_first_aa) {
349 console.log("*** freqPlayerAudioAvailable, Firefox audio API setting 'mysong' volume to 0");
350 soundManager.setVolume("mysong",0.0);
351 _fp_first_aa = false;
352 }
353
354 var written = pannedPlayback.mozWriteAudio(fb); // could equally make it baseSong ??
355 if (written != fb.length) {
356 console.warn("Did not write out as many samples for output as were in the input");
357 console.warn("input size = " + fb.length + ", output size = " + written);
358 }
359 }
360
361 var numSamples = fb.length/channels;
362
363 if (fb.length != frameBufferLength) {
364 console.error("fb length = " + fb.length);
365 console.error("frame buffer length = " + frameBufferLength);
366 console.error("Expected these values to be the same");
367 }
368
369
370 concatCount++;
371
372 if (concatCount==concatLimit) {
373
374 frequencyPlotter.plotVisualLine(needsRefresh,audio.currentTime);
375
376 needsRefresh = false;
377
378 concatCount=0;
379 }
380
381 // Uncomment the following line to have the audio data being decoded by
382 // the 'rta-audio-element' audio element pushed in to the Javascript DSP workflow
383 //activeWorkflow.pumpDataHead("rawInput",fb);
384
385 if (typeof selectedMeandreWorkflow !== "undefined") {
386 // Have a meandre workflow embedded in the page
387 if (selectedMeandreWorkflow.meandreDataOutput.length>0) {
388 // play some processed data
389
390 //var rta_audio = document.getElementById('rta-audio-element');
391
392 console.log("*** freqPlayerAudioAvailable, Meandre workflow setting 'mysong' volume to 0");
393
394 soundManager.setVolume("mysong",0);
395 pannedPlayback.volume = 1.0;
396
397 var frame = selectedMeandreWorkflow.meandreDataOutput[0];
398 //console.log("*** meanderDataOutput len = " + selectedMeandreWorkflow.meandreDataOutput.length
399 // + ", frame size of head = " + frame.length);
400
401 var stereo_frame = new Float32Array(frame.length*2);
402 for (var i=0; i<frame.length; i++) {
403 stereo_frame[2*i] = frame[i];
404 stereo_frame[2*i+1] = frame[i];
405 }
406
407 // Start playing the audio data returned from the Meandre workflow
408 var written = pannedPlayback.mozWriteAudio(stereo_frame);
409
410 if (written == 0) {
411 // audio has backed up => give it a chance to clear
412 return;
413 }
414
415 if (togglePlayMode == "play") {
416 // displaying play button (which counter-intuatively means it must be paused)
417 return;
418 }
419
420 if (written != stereo_frame.length) {
421 //console.log("*** frame size was " + frame.length + " but only wrote out " + written);
422 //selectedMeandreWorkflow.meandreDataOutput[0] = frame.subarray(written);
423
424 selectedMeandreWorkflow.meandreDataOutput[0] = frame.subarray(written/2);
425 }
426 else {
427 //console.log("***### popping head of meandreDataOutput queue");
428 selectedMeandreWorkflow.meandreDataOutput.shift();
429 }
430 }
431
432 }
433}
434
Note: See TracBrowser for help on using the repository browser.