source: gs3-extensions/web-audio/trunk/diy-audio-player/script/similarity-matrix-play.js@ 28548

Last change on this file since 28548 was 28548, checked in by davidb, 10 years ago

Changes after developing the demo for the SMAM-2013 keynote talk

File size: 21.2 KB
Line 
1
2// Audio objects
3var baseSong;
4var leftSong;
5var rightSong;
6
7var pannedPlayback = new Audio();
8//var pannedSink = new Sink();
9
10var leftBuffers = [];
11var rightBuffers = [];
12
13var leftStartTimeSecs = 0.0;
14var rightStartTimeSecs = 0.0;
15var leftStartTimeOffset = 0;
16var rightStartTimeOffset = 0;
17
18var playTrackerMode = "stopped";
19
20var startPlayX;
21var startPlayY;
22
23var pannedPlayWidth;
24var pannedPlayHeight;
25
26var prevPlayedX = -1;
27var prevPlayedY = -1;
28
29//var prevPlayedOffset = -1;
30
31var preparingDualAudio = 0;
32
33var _diyImages = "ext/diy-audio-player/images";
34
35function resetAudioBuffers()
36{
37 //console.log("Panned audio playback buffers initialized/reset");
38
39 pannedPlayback = new Audio();
40 if (webAudioContext != null) {
41 console.log("*** Web Audio filler for resetAudioBuffers()");
42 }
43 else {
44 pannedPlayback.mozSetup(2, baseSong.mozSampleRate);
45 }
46
47 //pannedSink = new Sink(baseSong.mozSampleRate,2);
48 //pannedSink.writeMode = "sink";
49
50 leftBuffers = [];
51 rightBuffers = [];
52
53 leftStartTimeSecs = 0.0;
54 rightStartTimeSecs = 0.0;
55 leftStartTimeOffset = 0;
56 rightStartTimeOffset = 0;
57
58 prevPlayedX = -1;
59 prevPlayedY = -1;
60
61 // prevPlayedOffset = -1;
62
63
64
65}
66
67
68
69function dualPlay(evt)
70{
71
72 //if ($('#playtracker').css('visibility') == 'hidden') { return };
73
74 var mouse_x = evt.pageX;
75 var mouse_y = evt.pageY;
76
77 // display busy/loading cursor
78 var playTrackerBusy = document.getElementById("playtrackerBusy");
79 playTrackerBusy.setAttribute("style","visibility: visible;");
80 preparingDualAudio = 1;
81
82 var selfSimImg = document.getElementById("selfSimImgId");
83 var imgWidth = selfSimImg.getAttribute("width");
84 var imgHeight = selfSimImg.getAttribute("height") * yScaleSimImg;
85
86 // console.log("**** image w x h = " + imgWidth +"," + imgHeight);
87
88 var img_x = $("#selfSimImgId").offset().left;
89 var img_y = $("#selfSimImgId").offset().top;
90
91 var x_org = mouse_x - img_x;
92 var y_org = mouse_y - img_y;
93 //var adjusted_y_org = imgHeight - y_org;
94 var adjusted_x_org = x_org - FrequencyPlotter.fudgeOffset;
95 var adjusted_y_org = y_org; // *****
96
97 //console.log("img (x y) = (" + img_x + "," + img_y + ")");
98 //console.log("mouse (x y) = (" + mouse_x + "," + mouse_y + ")");
99 //console.log("(x y) = (" + x_org + "," + adjusted_y_org + ")");
100
101 startPlayX = x_org;
102 startPlayY = y_org;
103
104 //pannedPlayWidth = imgWidth - x_org;
105 var fudgeWidth = imgWidth - (FrequencyPlotter.fudgeOffset + FrequencyPlotter.fudgeTrim);
106 pannedPlayWidth = fudgeWidth - adjusted_x_org;
107 //pannedPlayHeight = y_org;
108 pannedPlayHeight = imgHeight - y_org; // **** adjusted
109
110 resetAudioBuffers();
111
112 //var baseSongSrc="sites/localsite/collect/" + collect +"/import/"+source; // ****
113 // improvement to ensure plugin generate .ogg file, and then use that
114 var baseSongSrc=gs.collectionMetadata["httpPath"]+"/index/assoc/"+gs.documentMetadata["assocfilepath"] + "/doc.ogg";
115
116 //console.log("duration = " + duration);
117
118 //leftStartTimeSecs = duration * (x_org / imgWidth);
119 leftStartTimeSecs = duration * (adjusted_x_org / fudgeWidth);
120 rightStartTimeSecs = duration * (adjusted_y_org / imgHeight);
121
122 var sampleMult = baseSong.mozSampleRate * baseSong.mozChannels;
123 leftStartTimeOffset = leftStartTimeSecs * sampleMult;
124 rightStartTimeOffset = rightStartTimeSecs * sampleMult;
125
126 //leftSong.src = baseSongSrc + "#t=" + leftStartTimeSecs + ",";
127 //rightSong.src = baseSongSrc + "#t=" + rightStartTimeSecs + ",";
128
129 leftSong.currentTime = leftStartTimeSecs;
130 leftSong.play();
131
132 rightSong.currentTime = rightStartTimeSecs;
133 rightSong.play();
134
135 // Drives the visualization
136 baseSong.currentTime = leftStartTimeSecs;
137 baseSong.volume = 0.0;
138 baseSong.play();
139
140
141 // console.log("play tracker mode = " + playTrackerMode);
142 if (togglePlayMode.match("pause")) {
143 // i.e. currently displaying the pause button
144 // stop
145 if (playTrackerMode.match("stopped")) {
146 // => playing through the main play bar
147 soundManager.stopAll();
148 togglePlayVisual(document.getElementById("mainPlayButton"));
149 }
150 }
151
152 console.log("*** DualPlay:: muting 'mysong' volume to 0");
153 soundManager.setVolume("mysong",0.0);
154 var soundManagerFrameOffset = Math.round(1000.0*leftStartTimeSecs/250.0); // **** better to develop function for computation
155 playFromOffset("mysong",soundManagerFrameOffset);
156
157/*
158 if (!playTrackerMode.match("playing")) {
159 togglePlayVisual(document.getElementById("mainPlayButton"));
160 }
161*/
162
163}
164
165
166function playTrackerOn(evt)
167{
168 if (togglePlayMode.match("play") && $('input:checkbox[name=similarityMatrix]').prop("checked")) {
169
170 var playTracker = document.getElementById("playtracker");
171 playTrackerMove(evt);
172 playTracker.setAttribute("style","visibility: visible");
173 }
174}
175
176function playTrackerOff(evt)
177{
178 var playTracker = document.getElementById("playtracker");
179 playTracker.setAttribute("style","visibility: hidden");
180}
181
182function updateLeftAndRightTimes(cx,cy)
183{
184 var selfSimImg = document.getElementById("selfSimImgId");
185 var imgWidth = selfSimImg.getAttribute("width");
186 var imgHeight = selfSimImg.getAttribute("height") * yScaleSimImg;
187
188 //var mysong = document.getElementById("mysong");
189 //var barWidth = mysong.getElementWidth();
190
191 var fudgeWidth = imgWidth - (FrequencyPlotter.fudgeOffset + FrequencyPlotter.fudgeTrim);
192
193 // update L & R times on tracker
194 var currPlayedXInSecs = (cx/fudgeWidth) * duration; // **** adjusted
195 var currPlayedYInSecs = (cy/imgHeight) * duration;
196
197 // Deliberately crossed to fit visual playTracker circlea
198
199 var leftTime = document.getElementById("leftTime");
200 var leftText = "R: " + currPlayedXInSecs.toFixed(0) + " secs";
201 // (used to do 1 decimal place)
202
203 leftTextNode = leftTime.childNodes[0];
204 leftTextNode.nodeValue = leftText;
205
206 var rightTime = document.getElementById("rightTime");
207 var rightText = "L: " + currPlayedYInSecs.toFixed(0) + " secs";
208
209 rightTextNode = rightTime.childNodes[0];
210 rightTextNode.nodeValue = rightText;
211
212 // console.log("left text = " + leftText);
213 // console.log("right text = " + rightext);
214}
215
216
217function playTrackerMove(evt)
218{
219 if (!playTrackerMode.match("playing")) {
220 var mouse_x = evt.pageX;
221 var mouse_y = evt.pageY;
222
223 var img_x = $("#selfSimImgId").offset().left;
224 var img_y = $("#selfSimImgId").offset().top;
225
226 var x_org = mouse_x - img_x;
227 var y_org = mouse_y - img_y;
228
229 var adjusted_x_org = x_org - FrequencyPlotter.fudgeOffset;
230 var adjusted_y_org = y_org;
231
232 updateLeftAndRightTimes(adjusted_x_org,adjusted_y_org);
233
234 var playTracker = document.getElementById("playtracker");
235 var transform = "translate("+x_org+","+y_org+")";
236 playTracker.setAttribute("transform",transform);
237 }
238
239}
240
241function svgInitSimilarityPlay()
242{
243 var mysong = document.getElementById("mysong");
244 var barWidth = mysong.getElementWidth();
245
246 //var baseSongSrc="sites/localsite/collect/" + collect +"/import/"+source;
247 // improvement to ensure plugin generate .ogg file, and then use that
248 var baseSongSrc=gs.collectionMetadata["httpPath"]+"/index/assoc/"+gs.documentMetadata["assocfilepath"] + "/doc.ogg";
249
250 baseSong = document.getElementById("rta-audio-element");
251 //baseSong = new Audio();
252 //baseSong.src = baseSongSrc;
253 //baseSong.control = "control";
254
255 //baseSong.addEventListener('MozAudioAvailable', audioAvailable, false);
256 //baseSong.addEventListener('loadedmetadata', loadedMetadata, false);
257
258/*
259 // match the pannedPlayback sample rate to 'baseSong' once it is set up
260 baseSong.addEventListener('loadedmetadata', initPannedPlaybackSampleRate, false);
261
262 //leftSong = new Audio();
263 leftSong = document.getElementById("leftsong");
264 leftSong.addEventListener('MozAudioAvailable', audioAvailableLeft, false);
265 leftSong.addEventListener('loadedmetadata', loadedMetadataLeft, false);
266
267
268 //rightSong = new Audio();
269 rightSong = document.getElementById("rightsong")
270 rightSong.addEventListener('MozAudioAvailable', audioAvailableRight, false);
271 rightSong.addEventListener('loadedmetadata', loadedMetadataRight, false);
272
273*/
274
275 //graphicalEq = new GraphicalEq(sampleRate);
276 //graphicalEq.recalculateFilters();
277
278 // Set up self similarity image
279
280 //var selfSimImgSrc="sites/localsite/collect/" + collect +"/bordered/line-interlaced/"+self_sim_png;
281 var selfSimImgSrc="sites/" + site + "/collect/" + collect +"/images/blank-775x816.png";
282 //console.log("self sim img = " + selfSimImgSrc);
283
284 var simPlayLine = document.getElementById("simPlayLineGroup");
285
286 simPlayLine.setAttributeNS(SVG_NS,"transform","scale(1.0,"+yScaleSimImg+")");
287
288 var selfSimImg = document.createElementNS(SVG_NS,"image");
289 selfSimImg.setAttributeNS(XLINK_NS, "xlink:href", selfSimImgSrc);
290
291 var selfSimImgWidth = barWidth;
292 var selfSimImgHeight = barWidth * (815.0 / 776.0);
293
294 selfSimImg.setAttribute("x",0);
295 selfSimImg.setAttribute("y",0);
296 selfSimImg.setAttribute("id","selfSimImgId");
297 selfSimImg.setAttribute("width",barWidth);
298 selfSimImg.setAttribute("height",selfSimImgHeight);
299 selfSimImg.setAttribute("onclick","dualPlay(evt)");
300 selfSimImg.setAttribute("onmousemove","playTrackerMove(evt)");
301 selfSimImg.setAttribute("onmouseover","playTrackerOn(evt)");
302 //selfSimImg.setAttribute("onmouseout","playTrackerOff(evt)");
303
304 simPlayLine.appendChild(selfSimImg);
305
306 var svgSimilarityPlay = document.getElementById("svgSimilarityPlay");
307
308 //console.log("self sim height = " + selfSimImgHeight + " y scale = " + yScaleSimImg);
309 svgSimilarityPlay.setAttribute("height",0 + selfSimImgHeight * yScaleSimImg); // used to be 30 +
310
311 // Firefox doesn't seem to like this being initialized statically
312 // in the HTML page generate, so add it dynamically here
313 var playTrackerBusy = document.getElementById("playtrackerBusy");
314 playTrackerBusy.setAttributeNS(XLINK_NS,"xlink:href",
315 _diyImages + "/wait30trans.gif");
316
317}
318
319
320
321// Not callled anymore // ****
322
323function loadedMetadata() {
324 // Mute baseSong audio.
325 baseSong.volume = 0;
326
327 console.error("This should not be called anymore");
328
329}
330
331function initPannedPlaybackSampleRate() {
332
333 // Set up a pannedPlayback for the left and right song streams to be mixed and routed through
334
335 if (webAudioContext != null) {
336 console.log("*** Web Audio filler for initPannedPlaybackSampleRate()");
337 }
338 else {
339 console.info("similarity-matrix-play initPannedPlaybackSampleRate(): Setting up pannedPlayback with .mozSetup(2,"+baseSong.mozSampleRate+")");
340 pannedPlayback.mozSetup(2, baseSong.mozSampleRate);
341 }
342}
343
344function loadedMetadataLeft() {
345
346 //console.log("*** similarity-matrix-play: loadedMetadataLeft()");
347
348 // Mute leftSong audio.
349 leftSong.volume = 0;
350
351 if (webAudioContext != null) {
352
353 var webAudioSource = webAudioContext.createMediaElementSource(leftSong);
354
355 var webAudioJSProcessor = webAudioContext.createJavaScriptNode(1024,2,2);
356 webAudioJSProcessor.onaudioprocess = audioAvailableLeft;
357
358 // Connect the processing graph: source -> jsProcessor -> analyser -> destination
359 webAudioSource.connect(webAudioJSProcessor);
360 webAudioJSProcessor.connect(webAudioContext.destination);
361 }
362
363
364 //leftSong.currentTime = leftStartTimeSecs;
365 //leftSong.play();
366 //console.log("left start time = " + leftStartTimeSecs);
367}
368
369
370function loadedMetadataRight() {
371
372 //console.log("*** similarity-matrix-play: loadedMetadataRight()");
373
374 // Mute rightSong audio.
375 rightSong.volume = 0;
376
377 if (webAudioContext != null) {
378
379 var webAudioSource = webAudioContext.createMediaElementSource(rightSong);
380
381 var webAudioJSProcessor = webAudioContext.createJavaScriptNode(1024,2,2);
382 webAudioJSProcessor.onaudioprocess = audioAvailableRight;
383
384 // Connect the processing graph: source -> jsProcessor -> analyser -> destination
385 webAudioSource.connect(webAudioJSProcessor);
386 webAudioJSProcessor.connect(webAudioContext.destination);
387 }
388
389
390 //rightSong.currentTime = rightStartTimeSecs;
391 //rightSong.play();
392 //console.log("right start time = " + rightStartTimeSecs);
393
394}
395
396function audioAvailable(event) {
397 // Write the current framebuffer
398 //alert("audioAvailable() should no longer be called");
399 console.error("audioAvailable() should no longer be called");
400
401 var frameBuffer = event.frameBuffer; // frameBuffer is Float32Array
402 //writeAudio(frameBuffer);
403}
404
405var smf = true;
406
407function audioAvailableLeft(event)
408{
409 // Write the current framebuffer
410 if (webAudioContext != null) {
411
412 if (leftSong.paused) {
413 // not processing/playing any new audio
414 return;
415 }
416
417 if (smf) {
418 console.log("*** audioAvailableLeft() forcing mysong to 0 vol");
419 soundManager.setVolume("mysong",0.0);
420 leftSong.volume = 0.0;
421 rightSong.volume = 0.0;
422 smf = false;
423 }
424
425 var inputArrayL = event.inputBuffer.getChannelData(0);
426 var inputArrayR = event.inputBuffer.getChannelData(1);
427
428 var inputLen = inputArrayL.length;
429
430 var outputArrayL = event.outputBuffer.getChannelData(0);
431 var outputArrayR = event.outputBuffer.getChannelData(1);
432
433 for (var i=0; i<inputLen; i++) {
434 outputArrayL[i] = (inputArrayL[i] + inputArrayR[i])/2.0;
435 outputArrayR[i] = 0.0;
436 }
437 }
438 else {
439
440 if (smf) {
441 console.log("*** audioAvailableLeft() forcing mysong to 0 vol");
442 soundManager.setVolume("mysong",0.0);
443 leftSong.volume = 0.0;
444 rightSong.volume = 0.0;
445 smf = false;
446 }
447
448 var frameBuffer = event.frameBuffer;
449 writeAudio(frameBuffer,"left");
450 }
451
452}
453
454function audioAvailableRight(event)
455{
456 // Write the current framebuffer
457
458 if (webAudioContext != null) {
459
460 if (rightSong.paused) {
461 // not processing/playing any new audio
462 return;
463 }
464
465 var inputArrayL = event.inputBuffer.getChannelData(0);
466 var inputArrayR = event.inputBuffer.getChannelData(1);
467
468 var inputLen = inputArrayL.length;
469
470 var outputArrayL = event.outputBuffer.getChannelData(0);
471 var outputArrayR = event.outputBuffer.getChannelData(1);
472
473 for (var i=0; i<inputLen; i++) {
474 outputArrayL[i] = 0.0;
475 outputArrayR[i] = (inputArrayL[i] + inputArrayR[i])/2.0;
476 }
477 }
478
479 else {
480 var frameBuffer = event.frameBuffer;
481 writeAudio(frameBuffer,"right");
482 }
483}
484
485
486
487
488function activeAudioBuffer(audioBuffer,currentFullBufferSize,startTime)
489{
490 var buffer;
491 if (currentFullBufferSize < startTime) {
492 // active audio starts at offset within this segment of audioBuffer
493
494 var position = startTime - currentFullBufferSize;
495 buffer = audioBuffer.subarray(position);
496 }
497 else {
498 buffer = audioBuffer;
499 }
500 return buffer;
501}
502
503function panLeftEffect(buffer,s,panned_buffer,d)
504{
505 panned_buffer[d] = buffer[s];
506 if ((d+1) < panned_buffer.length) {
507 panned_buffer[d+1] = 0;
508 }
509}
510
511function panRightEffect(buffer,s,panned_buffer,d)
512{
513 panned_buffer[d] = 0;
514 if ((d+1) < panned_buffer.length) {
515 panned_buffer[d+1] = buffer[s];
516 }
517}
518
519function panAudioBuffersHead(buffer,position,panEffect)
520{
521 var remaining_len = buffer.length - position;
522 var panned_buffer;
523 if (baseSong.mozChannels == 1) {
524 panned_buffer = new Float32Array(remaining_len * 2);
525
526 var d=0;
527 for (var s=position; s < buffer.length; s++) {
528 panEffect(buffer,s,panned_buffer,d);
529 d += 2;
530 }
531 }
532 else {
533 // assume stereo
534 panned_buffer = new Float32Array(remaining_len);
535
536 var d=0;
537 for (var s=position; s < buffer.length; s+=2) {
538 panEffect(buffer,s,panned_buffer,d);
539
540 d += 2;
541 }
542 }
543
544 return panned_buffer;
545}
546
547function mergeLeftAndRightPan(leftPannedBuffer,rightPannedBuffer)
548{
549 // left and right buffers guaranteed to be stereo samples
550
551 // Deliberately crossed to fit visual playTracker circlea
552
553 var leftLen = leftPannedBuffer.length;
554 var rightLen = rightPannedBuffer.length;
555 var maxLen = Math.max(leftLen,rightLen);
556
557 var pannedBuffer = new Float32Array(maxLen);
558
559 for (i=0; i < maxLen; i+=2) {
560
561 if (i < leftLen-1) {
562 // i+1 actually right pos
563 pannedBuffer[i+1] = leftPannedBuffer[i];
564 }
565 else {
566 pannedBuffer[i+1] = 0;
567 }
568
569 if (i < rightLen-1) {
570 // actually left pos
571 pannedBuffer[i] = rightPannedBuffer[i+1];
572 }
573 else {
574 pannedBuffer[i] = 0;
575 }
576 }
577
578 return pannedBuffer;
579
580}
581
582function writeAudio(audioBuffer,channel)
583{
584
585 var mysong = document.getElementById("mysong");
586 var barWidth = mysong.getElementWidth();
587
588 var selfSimImg = document.getElementById("selfSimImgId");
589 var imgWidth = selfSimImg.getAttribute("width");
590 var imgHeight = selfSimImg.getAttribute("height") * yScaleSimImg;
591
592 var fudgeWidth = imgWidth - (FrequencyPlotter.fudgeOffset + FrequencyPlotter.fudgeTrim);
593
594 // audioBuffer is Float32Array
595 var audioBufferLen = audioBuffer.length;
596
597 if (channel.match("left")) {
598 //var leftBuffer = activeAudioBuffer(audioBuffer,totalFullBufferSize,leftStartTimeOffset);
599 leftBuffers.push({buffer: audioBuffer, position: 0});
600 }
601
602 if (channel.match("right")) {
603 //var rightBuffer = activeAudioBuffer(audioBuffer,totalFullBufferSize,rightStartTimeOffset);
604 rightBuffers.push({buffer: audioBuffer, position: 0});
605 }
606
607
608 // If there's buffered data in both left and right buffers,
609 // then it's time to play something
610
611 var currPlayedX = null;
612 var currPlayedY = null;
613
614 while ((leftBuffers.length > 0) && (rightBuffers.length > 0)) {
615
616 if (preparingDualAudio) {
617 var playTrackerBusy= document.getElementById("playtrackerBusy");
618 playTrackerBusy.setAttribute("style","visibility: hidden;");
619 preparingDualAudio = 0;
620 }
621
622
623 var leftBuffer = leftBuffers[0].buffer;
624 var leftBufferPos = leftBuffers[0].position;
625 var leftPannedBuffer = panAudioBuffersHead(leftBuffer,leftBufferPos,panLeftEffect);
626
627 var rightBuffer = rightBuffers[0].buffer;
628 var rightBufferPos = rightBuffers[0].position;
629 var rightPannedBuffer = panAudioBuffersHead(rightBuffer,rightBufferPos,panRightEffect);
630
631 var pannedBuffer = mergeLeftAndRightPan(leftPannedBuffer,rightPannedBuffer);
632
633 // Send something to be played
634 var written = pannedPlayback.mozWriteAudio(pannedBuffer);
635 //pannedSink.writeBufferSync(pannedBuffer);
636
637
638 //var written = pannedPlayback.mozWriteAudio(rightPannedBuffer);
639 //var written = pannedPlayback.mozWriteAudio(leftPannedBuffer);
640
641 // get current play pos, and plot on selfSimImg
642 var currentPlayedOffset = pannedPlayback.mozCurrentSampleOffset();
643 //var currentPlayedOffset = pannedSink.getSyncWriteOffset();
644
645 var sampleRate = pannedPlayback.mozSampleRate;
646 var numChannels = pannedPlayback.mozChannels;
647 //var sampleRate = pannedSink.sampleRate;
648 //var numChannels = pannedSink.channelCount;
649
650 var pannedDurationLeft = duration - leftStartTimeSecs;
651 var pannedDurationRight = duration - rightStartTimeSecs;
652
653 var totalNumSamplesLeft = sampleRate * numChannels * pannedDurationLeft;
654 var totalNumSamplesRight = sampleRate * numChannels * pannedDurationRight;
655
656 var currentPlayedPercLeft = currentPlayedOffset/totalNumSamplesLeft;
657 var currentPlayedPercRight = currentPlayedOffset/totalNumSamplesRight;
658
659
660 var currPlayedXPrecise = startPlayX + (currentPlayedPercLeft*pannedPlayWidth);
661 //var currPlayedYPrecise = startPlayY - (currentPlayedPercRight*pannedPlayHeight);
662 var currPlayedYPrecise = startPlayY + (currentPlayedPercRight*pannedPlayHeight); // **** adjusted
663
664 currPlayedX = Math.round(currPlayedXPrecise);
665 currPlayedY = Math.round(currPlayedYPrecise);
666
667
668 if ((currPlayedX != prevPlayedX) || (currPlayedY != prevPlayedY)) {
669
670 updateLeftAndRightTimes(currPlayedXPrecise,currPlayedYPrecise);
671
672 var playTracker = document.getElementById("playtracker");
673 var transform = "translate("+currPlayedX+","+currPlayedY+")";
674 playTracker.setAttribute("transform",transform);
675
676 if ((prevPlayedX == -1) && (prevPlayedY == -1)) {
677 // notice the first time this happens
678 playTracker.setAttribute("style","visibility: visible");
679 playTrackerMode = "playing";
680 }
681
682 prevPlayedX = currPlayedX;
683 prevPlayedY = currPlayedY;
684 }
685
686 // console.log("currently playing = " + currentPlayedSecs);
687
688
689 // If all data wasn't written, keep it in the buffers:
690 var doBreak = false;
691
692 if (leftBufferPos + written < leftBuffer.length) {
693 leftBuffers[0].position = leftBufferPos + written;
694 doBreak = true;
695 }
696 else {
697 leftBuffers.shift();
698 }
699
700 if (rightBufferPos + written < rightBuffer.length) {
701 rightBuffers[0].position = rightBufferPos + written;
702 doBreak = true;
703 }
704 else {
705 rightBuffers.shift();
706 }
707
708 if (doBreak) {
709 break;
710 }
711 }
712
713 var currentPlayedOffset = pannedPlayback.mozCurrentSampleOffset();
714 //var currentPlayedOffset = pannedSink.getSyncWriteOffset();
715
716 //if (currentPlayedOffset == prevPlayedOffset) {
717 // Nothing more is playing
718
719 //var exceededX = ((currPlayedX != null) && (currPlayedX>barWidth));
720 var exceededX = ((currPlayedX != null) && (currPlayedX>fudgeWidth));
721 //var exceededY = ((currPlayedY != null) && (currPlayedY<0));
722 var exceededY = ((currPlayedY != null) && (currPlayedY>imgHeight)); // **** adjusted
723
724 if ((exceededX || exceededY) && (!playTrackerMode.match("stopped"))) {
725
726 // console.log("detected stop condition");
727
728 // readed edge of play area
729 playTrackerMode = "stopped";
730 togglePlayVisual(document.getElementById("mainPlayButton"));
731 playTrackerOff(null);
732
733 console.log("*** setting 'mysong' volume back to 100%");
734 soundManager.setVolume("mysong",100);
735
736 //prevPlayedOffset = -1;
737 }
738 else {
739 //prevPlayedOffset = currentPlayedOffset;
740 }
741
742}
743
744
745
Note: See TracBrowser for help on using the repository browser.