forked from IDMIL/DigitalAudioWorkbench
-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathwidget_delta.js
More file actions
989 lines (926 loc) · 43.4 KB
/
widget_delta.js
File metadata and controls
989 lines (926 loc) · 43.4 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715
716
717
718
719
720
721
722
723
724
725
726
727
728
729
730
731
732
733
734
735
736
737
738
739
740
741
742
743
744
745
746
747
748
749
750
751
752
753
754
755
756
757
758
759
760
761
762
763
764
765
766
767
768
769
770
771
772
773
774
775
776
777
778
779
780
781
782
783
784
785
786
787
788
789
790
791
792
793
794
795
796
797
798
799
800
801
802
803
804
805
806
807
808
809
810
811
812
813
814
815
816
817
818
819
820
821
822
823
824
825
826
827
828
829
830
831
832
833
834
835
836
837
838
839
840
841
842
843
844
845
846
847
848
849
850
851
852
853
854
855
856
857
858
859
860
861
862
863
864
865
866
867
868
869
870
871
872
873
874
875
876
877
878
879
880
881
882
883
884
885
886
887
888
889
890
891
892
893
894
895
896
897
898
899
900
901
902
903
904
905
906
907
908
909
910
911
912
913
914
915
916
917
918
919
920
921
922
923
924
925
926
927
928
929
930
931
932
933
934
935
936
937
938
939
940
941
942
943
944
945
946
947
948
949
950
951
952
953
954
955
956
957
958
959
960
961
962
963
964
965
966
967
968
969
970
971
972
973
974
975
976
977
978
979
980
981
982
983
984
985
986
987
988
989
const BIT_DEPTH_MAX = 16;
const WEBAUDIO_MAX_SAMPLERATE = 96000;
const NUM_COLUMNS = 2;
const MAX_HARMONICS = 100;
function new_widget(panels, sliders, buttons, elem_id, elem_id2, margin_size, width_factor=1.0, height_factor=1.0) {sketch = p => {
//These define the different pages, which edit the text and panels/sliders whenever the next/prev buttons are pressed
/*
TO ADD A NEW PAGE:
1. Add a new case to the switch statement. If adding a page between two existing pages, this unfortunately requires updating the case number for all subsequent pages.
2. Set contentWrap.elt.innerHtml to the content displayed at the top of the page. Please use HTML script similar to existing pages (eg. header at the top, regular line breaks...)
3. To add a new panel, call updatePanel(panels, "name_of_your_panel", true);
To remove a panel, call the same function with false instead of true.
Similarly, with sliders, you can call updateSlider(sliders, "propName_of_your_slider", true);
To show or hide various buttons, call yourButton.show() or yourButton.hide() respectively.
To set the parameter of the simulation, call settings.propName = your_new_value; , replacing the propName with the desired setting.
Example: settings.fundFreq = 440; sets the fundamental frequency to 440 Hz
Note this only updates the panels/audio simulations, not the sliders
!! Whenever adding a new element, please remove that element from the preceding page! Otherwise, it will still be present when the user goes backwards.
4. Call break; at the end of the case statement!
Basic elements:
panels:
List of panels used in the widget: refer by name
Available panels:
"Input Signal Time Domain"
"Input Signal Time Domain with Delta Modulation"
"Reconstructed Signal Time Domain"
"Reconstructed Signal Time Domain using Delta Modulation"
"Input Signal FFT"
"Reconstructed Signal FFT"
"Reconstructed Signal using Delta Modulation FFT"
"Sampling Signal Time Domain"
"Sampling Signal Frequency Domain"
"Sampled Signal Time Domain"
"Sampled Signal FFT"
"Quantization Noise Time Domain"
"Quantization Noise FFT"
"Input with Sampled Signal Time Domain"
"Input (solid), Sampled (lollipop), Reconstructed (dotted), Time Domain"
sliders:
List of sliders used in the widget: refer by propName
Available sliders:
"fundFreq"
"numHarm"
"downsamplingFactor" (sampling rate)
"downsamplingFactorDelta"
"deltaStep"
"dither"
"bitDepth"
"amplitude"
"antialiasing"
"phase"
"ampZoom"
"timeZoom"
"freqZoom"
buttons:
The buttons that appear in the widget
Available buttons:
originalButton (plays original sound)
reconstructedButton (plays reconstructed sound)
reconstructedDeltaButton (plays reconstructed sound using delta modulation)
quantNoiseButton (plays quantization noise)
quantNoiseDeltaButton (plays quantization noise using delta modulation)
adaptiveSwitchButton (switches between adaptive and non-adaptive delta modulation)
settings:
The simulation parameters currently loaded. Same as the slider's propName.
*/
function updatePage(pageNum) {
switch(pageNum) {
//Chapter 1: Waveform Building
case 0:
contentWrap.elt.innerHTML = `
<H1>
Chapter 1: Waveform Building
</H1>
<hr>
Let's start with the basics. A sound is created by a <i>variation of pressure</i> through the air (or some other medium).<br>
The characteristics of this sound depend on the characteristics of the variation. <br>
If the variation of pressure (i.e. the <i>amplitude</i>) is very large, the sound will be loud. <br>
If the pressure changes rapidly, we say that the sound signal has a high <i>frequency</i>, and the pitch will be high.<br><br>
In order to visualize a sound, we usually plot its amplitude as a function of time. The simplest such sound is a <i>sine wave</i>, which looks like the function in the panel below.<br>
A sine wave only has one frequency at a certain amplitude, and can be written as: (amplitude)*sin(frequency*time)<br>
Try playing around with the frequency slider below and press the "Play original" button to see what it sounds like.<br>
<b>Careful not to hurt your ears!</b>`;
updatePanel(panels, "Input Signal Frequency Domain", false);
break;
case 1:
contentWrap.elt.innerHTML = `
<H1>
Chapter 1: Waveform Building
</H1>
<hr>
In the right panel, a vertical line at 440 Hz represents the sine wave frequency component.<br>
Remember that a sine (or a cosine) wave has only one frequency component.<br>
In other words, it represents a <i>simple harmonic motion</i> such as the motion of an ideal pendulum or a tuning fork.`;
settings.fundFreq = 440;
updatePanel(panels, "Input Signal Frequency Domain", true);
updateSlider(sliders, "numHarm", false);
break;
case 2:
contentWrap.elt.innerHTML = `
<H1>
Chapter 1: Waveform Building
</H1>
<hr>
However, in the real world, sounds aren't just composed of a single frequency component. Usually, on top of the main frequency, sounds will also have a multitude of smaller<br> frequency components called <i>harmonics</i>, situated at integer multiples of the original frequency.<br>
You may now add harmonics to the generated waveforms. You may choose to have only even or odd-integer harmonics, as well as different harmonic schemes.<br>
Try playing around with the parameters. Can you build a waveform with:
<ul>
<li>Square Waves?</li>
<li>Triangular Waves?</li>
<li>Sawtooth Waves?</li>
</ul>
What do each of these sound like?
`;
updateSlider(sliders, "numHarm", true);
updatePanel(panels, "Sampling Signal Time Domain", false);
updateSlider(sliders, "downsamplingFactor", false);
updatePanel(panels, "Input with Sampled Signal Time Domain", false);
break;
//Chapter 2: Sampling a Waveform in the Time Domain
case 3:
contentWrap.elt.innerHTML = `
<H1>
Chapter 2: Sampling a Waveform in the Time Domain
</H1>
<hr>
We're now interested in what happens when trying to record a signal in the real world. For sound, an input signal would be some kind of continuous signal, whether analogue<br>
or acoustic and would be captured either directly or by a microphone. In this case, we have a sinusoidal waveform.<br>
Before the continuous signal can be converted into a set of 0's and 1's, it must be sampled. A simple one-dimensional sampling system would be represented by: y[n] = x(nT<sub>s</sub>)<br>
This means that we simply measure the amplitude of the signal every T<sub>s</sub> seconds.
The bottom right panel represents said sampling method (the <i>impulse train</i>) that will poll the input x at time [n].<br>
The bottom left panel shows the resulting samples with amplitudes corresponding to the polled input signal.
`;
settings.numHarm = 1;
updateSlider(sliders, "numHarm", false);
updateSlider(sliders, "downsamplingFactor", true);
updatePanel(panels, "Input with Sampled Signal Time Domain", true);
updatePanel(panels, "Sampling Signal Time Domain", true);
reconstructedButton.hide();
break;
case 4:
contentWrap.elt.innerHTML = `
<H1>
Chapter 2: Sampling a Waveform in the Time Domain
</H1>
<hr>
After having measured the amplitude at each point shown, we will end up with a sequence of numbers representing our sound. Once converted to binary, this will be our sound file.<br>
From there, we can reconstruct what we think the input sound is. You may now listen to the reconstruction using the button below the page.
`;
reconstructedButton.show();
break;
case 5:
contentWrap.elt.innerHTML = `
<H1>
Chapter 2: Sampling a Waveform in the Time Domain
</H1>
<hr>
Try putting the sample rate to its minimum value. What do you see happening in the polled input signal?
`;
break;
case 6:
contentWrap.elt.innerHTML = `
<H1>
Chapter 2: Sampling a Waveform in the Time Domain
</H1>
<hr>
From now on, we will also show you the frequency domain of the reconstructed waveform. This is what we think the input signal's frequency is, based off the information we measure.<br>
Now, set the input signal frequency to 150 Hz. You may do this using the textboxes and "Update" buttons. How many samples do you get in each period?
`;
updatePanel(panels, "Sampling Signal Time Domain", false);
updatePanel(panels, "Reconstructed Signal FFT", true);
break;
case 7:
contentWrap.elt.innerHTML = `
<H1>
Chapter 2: Sampling a Waveform in the Time Domain
</H1>
<hr>
Now, what happens when you increase the input frequency?<br>
In particular, try making it so that the input frequency is exactly half the sampling frequency.<br>
What happens to the location of the samples? What should the resulting waveform sound like?
`;
break;
case 8:
contentWrap.elt.innerHTML = `
<H1>
Chapter 2: Sampling a Waveform in the Time Domain
</H1>
<hr>
You should've seen that when the sampling frequency is exactly twice that of the input frequency, <i>no variation</i> is detected by the sampling process. This would mean our recording has no sound!<br>
This is called the "<i>Nyquist frequency</i>", represented by the edge of the gray area in the frequency domain. We will now explore methods to deal with sounds whose frequency approaches the Nyquist.<br>
To start with, you may now control the phase of the input signal relative to the samples. What do you notice when you shift the phase of the input by a little bit?<br>
`;
updateSlider(sliders, "phase", true);
break;
case 9:
contentWrap.elt.innerHTML = `
<H1>
Chapter 2: Sampling a Waveform in the Time Domain
</H1>
<hr>
You may have seen that by shifting the phase, we are able to gain more information about the input signal than was previously available.<br>
Now, setting the phase back to zero, try decreasing the input frequency slightly below Nyquist.<br>
What do you notice? Can you tell what the resulting frequency would be? What happens if the input is slightly above Nyquist?<br>
`;
settings.phase = 0;
updateSlider(sliders, "phase", false);
updateSlider(sliders, "numHarm", false);
break;
case 10:
contentWrap.elt.innerHTML = `
<H1>
Chapter 2: Sampling a Waveform in the Time Domain
</H1>
<hr>
Near the Nyquist frequency, we see that the input frequency gets "duplicated" on either side of the Nyquist.<br>
This is an example of "<i>signal folding</i>", which happens due to the samples getting chosen at inconvenient spots in the input signal.<br>
As an extreme case, with the sampling rate at 3000 Hz, what happens when the input signal is at 2900? What happens if the input signal is way higher than 3000 Hz?<br>
`;
break;
case 11:
contentWrap.elt.innerHTML = `
<H1>
Chapter 2: Sampling a Waveform in the Time Domain
</H1>
<hr>
When the input signal is only 100 Hz below the sampling frequency, the reconstruction thinks that we're measuring a 100 Hz signal! This is, once again, an example of signal folding.<br>
In general, the reconstructed signal is unable to distinguish sounds above the Nyquist frequency. This means the frequency range of our recordings is limited by the Nyquist.<br>
Now, set the input signal frequency to 750 and the number of odd 1/x harmonics to 2.<br>
With the sampling rate at 3000 Hz, do you notice something in how the input signal is being sampled?
`;
updateSlider(sliders, "numHarm", true);
settings.phase = 0;
break;
case 12:
contentWrap.elt.innerHTML = `
<H1>
Chapter 2: Sampling a Waveform in the Time Domain
</H1>
<hr>
You may have noticed that the samples fell on the same spots whether or not you had 1 or 2 harmonics. This is because the second harmonic fell exactly on the Nyquist frequency (1500 Hz).<br>
Thus both the measurement of the fundamental frequency and its harmonics can be affected by our sampling process.<br>
This is something that must be taken into account as many sounds contain frequencies above the sampling range and this must be filtered out to prevent ghosting.
`;
break;
case 13:
contentWrap.elt.innerHTML = `
<H1>
Chapter 2: Sampling a Waveform in the Time Domain
</H1>
<hr>
The hearing range of the average human goes from 20 Hz to around 20,000 Hz.<br>
Using the information presented so far, can you explain why most recording devices use sampling rates of 48,000 Hz?
`;
updateSlider(sliders, "fundFreq", true);
updateSlider(sliders, "numHarm", true);
updateSlider(sliders, "downsamplingFactor", true);
updateSlider(sliders, "phase", true);
updatePanel(panels, "Input Signal Frequency Domain", true);
updatePanel(panels, "Input with Sampled Signal Time Domain", true);
updatePanel(panels, "Reconstructed Signal FFT", true);
updatePanel(panels, "Input Signal Time Domain with Delta Modulation", false);
reconstructedButton.show();
break;
//Add more pages here
//Chapter 5: Delta Modulation
case 14:
contentWrap.elt.innerHTML = `
<H1>
Chapter 5: Delta Modulation
</H1>
<hr>
So far, we've only looked at one sampling method, which is the simplest and most standard approach to recording sound. However, there exist many other methods, and each<br>
comes with its own advantages and limitations.<br>
For example,
In this chapter, we will cover a method called "<i>Delta Modulation</i>" sampling. The idea is that, instead of measuring the amplitude of the signal at one point, we will<br>
just compare the current amplitude with the previous one.<br>
Here's how we would implement this: Start the recording by measuring the current amplitude of the signal.<br>
At some time T<sub>s</sub> afterwards, if the amplitude is greater than the previous, assign the bit "1". If it is smaller, assign the bit "0".<br>
Update the current amplitude by adding or subtracting a "delta step". This is a fixed amplitude that must be decided on beforehand.<br>
As T<sub>s</sub> becomes very small, we will be able to reconstruct the shape of the waveform!
`;
settings.fundFreq = 440;
settings.numHarm = 1;
settings.phase = 0;
settings.timeZoom = 2;
updateSlider(sliders, "fundFreq", false);
updateSlider(sliders, "phase", false);
updateSlider(sliders, "numHarm", false);
updateSlider(sliders, "downsamplingFactor", false);
updatePanel(panels, "Input with Sampled Signal Time Domain", false);
updatePanel(panels, "Reconstructed Signal FFT", false);
updatePanel(panels, "Input Signal Time Domain with Delta Modulation", true);
reconstructedButton.hide();
updatePanel(panels, "Input Signal Time Domain", true);
updatePanel(panels, "Input Signal Frequency Domain", false);
updatePanel(panels, "Reconstructed Signal Time Domain using Delta Modulation", false);
updatePanel(panels, "Reconstructed Signal using Delta Modulation FFT", false);
reconstructedDeltaButton.hide();
break;
case 15:
contentWrap.elt.innerHTML = `
<H1>
Chapter 5: Delta Modulation
</H1>
<hr>
From now on, we will show you the input signal overlaid with the delta modulation steps, along with the reconstruction using the same algorithm.<br>
You also have access to the frequency domain of the input and reconstructed signal.<br>
Try playing around with the input signal and listening to the reconstructed sound. In what situations does the delta modulation algorithm fail to reproduce the input signal? Why?
`;
updateSlider(sliders, "fundFreq", true);
updateSlider(sliders, "numHarm", true);
updatePanel(panels, "Input Signal Time Domain", false);
updatePanel(panels, "Input Signal Frequency Domain", true);
updatePanel(panels, "Reconstructed Signal Time Domain using Delta Modulation", true);
updatePanel(panels, "Reconstructed Signal using Delta Modulation FFT", true);
reconstructedDeltaButton.show();
updateSlider(sliders, "downsamplingFactorDelta", false);
break;
case 16:
contentWrap.elt.innerHTML = `
<H1>
Chapter 5: Delta Modulation
</H1>
<hr>
You might have noticed that, at high input frequencies, the reconstruction is unable to "keep up" with the rapid change in amplitude. This leads to the creation of small triangular waves in the reconstruction.<br>
This is known as <i>waveform overloading</i>, and it causes the amplitude of the high frequency components to be attenuated (notice the spike in the frequency domain is much smaller).<br>
There are a few ways to fix this problem. For instance, try increasing the sampling frequency and see how the reconstruction is affected.<br>
Does the reconstruction sound better for high frequency inputs?
`;
updateSlider(sliders, "downsamplingFactorDelta", true);
break;
case 17:
contentWrap.elt.innerHTML = `
<H1>
Chapter 5: Delta Modulation
</H1>
<hr>
When the sampling frequency is sufficiently high, the algorithm is now able to follow rapid changes in the amplitude.<br>
Notice that since we are only assigning a single bit at each step, we are able to sample the signal much more frequently than in our previous sampling method.<br>
For a typical delta modulation algorithm, the sampling frequency can go to 4MHz or higher.<br>
However, this introduces an additional problem. At the maximum sampling frequency, try sending a low-frequency input signal with no harmonics. Do you notice anything abnormal in the reconstruction?<br>
`;
updateSlider(sliders, "deltaStep", false);
break;
case 18:
contentWrap.elt.innerHTML = `
<H1>
Chapter 5: Delta Modulation
</H1>
<hr>
When the input signal stays stable for a certain amount of time, the delta modulation rapidly oscillates around that value, as it is only able to increase or decrease by a fixed step.<br>
This creates a distinctive "buzzing" noise in the reconstruction, which you may have noticed by playing around with the parameters.<br>
In many ways, this is similar to the quantization phenomenon we saw previously.<br>
In order to reduce this, we can decrease the delta step. So far the delta step has increased or decreased by 5% of the amplitude range each time.<br>
Try playing around with this now. Are you able to get rid of the buzzing?
`;
updateSlider(sliders, "deltaStep", true);
break;
case 19:
contentWrap.elt.innerHTML = `
<H1>
Chapter 5: Delta Modulation
</H1>
<hr>
At very low delta steps, the reconstruction is much more sensitive to the details of the input signal, which gets rid of the buzzing sound from earlier.<br>
However, you might have noticed a new problem. Whenever there are large changes in the input, the reconstruction needs many more steps to catch up to it.<br>
So, if the step is too <i>low</i>, waveform overloading becomes a problem again, and high-frequency sounds get attenuated.<br>
If the step is too <i>high</i>, quantization becomes an issue, and low-frequency sounds will contain buzzing.<br>
In other words, it seems we need to adjust the delta step according to the behaviour of the input signal...
`;
adaptiveSwitchButton.hide();
break;
case 20:
contentWrap.elt.innerHTML = `
<H1>
Chapter 5: Delta Modulation
</H1>
<hr>
Here's how we can change the algorithm to do this. If the input signal is higher than our current amplitude, add the delta step as usual.<br>
If the input signal is higher twice in a row, add twice the delta step. If this happens three times in a row, add three times the delta step and so on.<br>
Once the input signal is lower than our current amplitude, reset the multiplier back to 1.<br>
In other words, consecutive changes in the delta modulation increase the delta step.<br>
This means that the reconstruction can react to abrupt changes in the input, while still capturing the details of the sections where the input is stable.<br>
`;
break;
case 21:
contentWrap.elt.innerHTML = `
<H1>
Chapter 5: Delta Modulation
</H1>
<hr>
This algorithm is known as "<i>adaptive delta modulation</i>". You may now switch between the adaptive and non-adaptive version using the button at the bottom.<br>
Try playing around with different situations where the non-adaptive version had problems. Does the adaptive version improve?
`;
adaptiveSwitchButton.show();
break;
}
reorderPanels();
reorderSliders();
}
/*
elem_id:
Tells the widget in which Div class to place the buttons in (Questions or answers etc)
elem_id2:
Tells the widget which div with the according class name to take into account for placing the widget in height terms/
margin_size:
Used to place the upload buttons on a specific place.
width_factor:
By default is 1 and determines the width of the widget
height_factor:
By default is 1 and determines the height of the widget
*/
var element = undefined;
console.log(elem_id);
if (elem_id) {
element = document.getElementById(elem_id);
console.log(element.id);
console.log(element.clientHeight, element.clientWidth);
}
var intro_text = document.getElementsByClassName(elem_id2);
var intro_height = 0;
var numPanels = panels.length;
var numSliders = sliders.length;
var old_x = 220;
let panelHeight, panelWidth, sliderWidth, sliderHeight, numColumns, contentWrap;
resize(1080, 1920);
// set display and fftSize to ensure there is enough data to fill the panels when zoomed all the way out
let fftSize = p.pow(2, p.round(p.log(panelWidth/minFreqZoom) / p.log(2)));
let displaySignalSize = p.max(fftSize, panelWidth/minTimeZoom) * 1.1; // 1.1 for 10% extra safety margin
let fft = new FFTJS(fftSize);
var settings =
{ amplitude : 1.0
, fundFreq : 1250 // input signal fundamental freq
, sampleRate : WEBAUDIO_MAX_SAMPLERATE
, downsamplingFactor : 2
, downsamplingFactorDelta : 2
, numHarm : 1 //Number of harmonics
, harmType : "Odd" // Harmonic series to evaluate - Odd, even or all
, harmSlope : "1/x" // Amplitude scaling for harmonics. can be used to create different shapes like saw or square
, harmonicFreqs : new Float32Array(MAX_HARMONICS) //Array storing harmonic frequency in hz
, harmonicAmps : new Float32Array(MAX_HARMONICS) //Array storing harmonic amp (0-1.0)
, phase : 0.0 // phase offset for input signal
, fftSize : fftSize
, bitDepth : BIT_DEPTH_MAX //quantization bit depth
, quantType : "midRise" // type of quantization
, dither : 0.0 // amplitude of white noise added to signal before quantization
, antialiasing : 0 // antialiasing filter order
, original: new Float32Array(displaySignalSize)
, downsampled: new Float32Array(1) // this gets re-inited when rendering waves
, downsampledDelta: new Float32Array(1)
, reconstructed: new Float32Array(displaySignalSize)
, reconstructedDelta: new Float32Array(displaySignalSize)
, stuffed: new Float32Array(displaySignalSize)
, quantNoiseStuffed: new Float32Array(displaySignalSize)
, quantNoiseStuffedDelta: new Float32Array(displaySignalSize)
, quantNoise: new Float32Array(displaySignalSize)
, original_pb: new Float32Array(p.floor(WEBAUDIO_MAX_SAMPLERATE*soundTimeSeconds))
, reconstructed_pb: new Float32Array(p.floor(WEBAUDIO_MAX_SAMPLERATE*soundTimeSeconds))
, reconstructedDelta_pb: new Float32Array(p.floor(WEBAUDIO_MAX_SAMPLERATE*soundTimeSeconds))
, quantNoise_pb: new Float32Array(p.floor(WEBAUDIO_MAX_SAMPLERATE*soundTimeSeconds))
, quantNoiseDelta_pb: new Float32Array(p.floor(WEBAUDIO_MAX_SAMPLERATE*soundTimeSeconds))
, originalFreq : fft.createComplexArray()
, stuffedFreq : fft.createComplexArray()
, reconstructedFreq : fft.createComplexArray()
, reconstructedDeltaFreq : fft.createComplexArray()
, quantNoiseFreq : fft.createComplexArray()
, quantNoiseDeltaFreq : fft.createComplexArray()
, snd : undefined
, maxVisibleFrequency : WEBAUDIO_MAX_SAMPLERATE / 2
, freqZoom : 1.0 //X axis zoom for frequency panels
, ampZoom : 1.0 // Y axis zoom for all panels
, timeZoom: 1.0 // X axis zoom for signal panels
, deltaFrequency: 96000
, deltaStep: 0.05
, deltaType: "non-adaptive"
, adaptiveNumSteps: 3 //Number of consecutive steps needed to trigger adaptive delta modulation
, element : element
, margine_size : margin_size+20
, p5: undefined
, render : undefined
, play : undefined
};
p.settings = settings;
var renderWaves = renderWavesImpl(settings, fft, p);
p.setup = function () {
settings.p5 = p;
settings.render = renderWaves;
settings.play = playWave;
p.createCanvas(p.windowWidth, p.windowHeight+500);
console.log(p.windowWidth,p.windowHeight)
p.textAlign(p.CENTER);
contentWrap = p.createDiv();
contentWrap.id("content-wrap");
contentWrap.position(0,100);
contentWrap.class("title qs");
contentWrap.elt.innerHtml = `
<H1>
Waveforms
</H1>
<hr>
<p id = "main page">
Leave blank
</p>`;
panels.forEach(panel => panel.setup(p, panelHeight, panelWidth, settings));
sliders.forEach(slider => slider.setup(p, settings));
sliders.forEach(slider => slider.updateValue(p));
renderWaves();
buttonSetup();
p.windowResized();
p.noLoop();
setTimeout(p.draw, 250);
};
p.draw = function() {
console.log("Page Num:", pageNum);
panels.forEach(panel => panel.drawPanel());
panels.forEach( (panel, index) => {
let y = p.floor(index / numColumns) * panelHeight;
let x = p.floor(index % numColumns) * panelWidth;
p.image(panel.buffer, x, y+intro_height);
});
};
p.windowResized = function() {
console.log(p.windowWidth,p.windowHeight)
let w = width_factor * p.windowWidth - 20; // TODO: get panel bezel somehow instead of hardcoded 20
let h = height_factor * p.windowHeight - 20;
resize(w, h);
intro_height = contentWrap.elt.clientHeight;
p.resizeCanvas(w, h);
panels.forEach(panel => panel.resize(panelHeight, panelWidth));
let sliderPosX = new Array(numColumns).fill(1);
sliderPosX.forEach((pos,index)=>{
sliderPosX[index] = 120+index*sliderWidth/numColumns + index*70;
});
yoffset = intro_height+p.ceil(numPanels/numColumns)*panelHeight+100;
console.log("slider position", sliderPosX, yoffset);
console.log("sliders:", sliders);
sliders.forEach( (slider, index) => {
let y;
if (numColumns == 2) {y=yoffset+(p.floor(index/numColumns))*sliderHeight;}
else {y=yoffset+index*sliderHeight;}
slider.resize(sliderPosX[index%numColumns], y, sliderWidth/numColumns,p);
});
if (numColumns == 2) {y=yoffset+(p.ceil(numSliders/numColumns))*sliderHeight+30;}
else {y=yoffset+numSliders*sliderHeight+30;}
let x = margin_size;
//originalButton.position(x + 20, y);
console.log(x+20,400, yoffset);
originalButton.position(x+20, y);
reconstructedButton.position(originalButton.x + originalButton.width +25, originalButton.y-8);
quantNoiseButton.position(reconstructedButton.x + reconstructedButton.width +25, reconstructedButton.y);
reconstructedDeltaButton.position(quantNoiseButton.x + quantNoiseButton.width +25, quantNoiseButton.y);
quantNoiseDeltaButton.position(reconstructedDeltaButton.x + reconstructedDeltaButton.width +25, reconstructedDeltaButton.y);
adaptiveSwitchButton.position(quantNoiseDeltaButton.x + quantNoiseDeltaButton.width +25, quantNoiseDeltaButton.y);
updateButton.position(adaptiveSwitchButton.x + adaptiveSwitchButton.width +40, adaptiveSwitchButton.y);
timeZoomSliderCheckbox.position(originalButton.x, originalButton.y + originalButton.height * 1.1);
inputFrequencySliderCheckbox.position(originalButton.x, originalButton.y+2*originalButton.height*1.1);
samplingFrequencySliderCheckbox.position(originalButton.x, originalButton.y+3*originalButton.height*1.1);
deltaStepSliderCheckbox.position(originalButton.x, originalButton.y+4*originalButton.height*1.1);
numHarmSliderCheckbox.position(originalButton.x, originalButton.y+5*originalButton.height*1.1);
inputDeltaPanelCheckbox.position(originalButton.x, originalButton.y+6*originalButton.height*1.1);
reconstructedDeltaPanelCheckbox.position(originalButton.x, originalButton.y+7*originalButton.height*1.1);
freqPanelsCheckbox.position(originalButton.x, originalButton.y+8*originalButton.height*1.1);
};
function resize(w, h) {
if (w < 800 || (numPanels % 2 == 1)) numColumns = 1;
else numColumns = 2;
let panelRows = Math.ceil((numPanels+1)/numColumns);
let sliderRows = Math.ceil((numSliders+1)/numColumns);
panelWidth = w / numColumns;
sliderWidth = w-300//w / numColumns - 200;
panelHeight = h / panelRows;
sliderHeight = 45;
if (sliderHeight < 30) { // keep sliders from getting squished
sliderHeight = 30;
let sliderPanelHeight = sliderHeight * sliderRows;
panelHeight = (h - sliderPanelHeight) / (panelRows - 1);
}
}
function buttonSetup() {
nextButton = p.createButton("Next Page");
nextButton.position(p.windowWidth/2+38,13,"absolute");
nextButton.class("button_round");
nextButton.mousePressed( () => {
pageNum++;
console.log("pageNum:", pageNum);
updatePage(pageNum);
p.windowResized();
redraw();
})
prevButton = p.createButton("Prev. Page");
prevButton.position(p.windowWidth/2-158,13,"absolute");
prevButton.class("button_round");
prevButton.mousePressed( () => {
pageNum--;
console.log("pageNum:", pageNum);
updatePage(pageNum);
p.windowResized();
redraw();
})
updatePage(initialPageNum);
originalButton = p.createButton("Play original");
originalButton.mousePressed( () => {
renderWaves(true);
if (!settings.snd) settings.snd = new (window.AudioContext || window.webkitAudioContext)();
playWave(settings.original_pb, WEBAUDIO_MAX_SAMPLERATE, settings.snd);
});
//originalButton.parent(element.id);
if(!buttons.includes("original")){
originalButton.hide();
}
originalButton.class("button");
reconstructedButton = p.createButton("Play reconstructed");
reconstructedButton.mousePressed( () => {
renderWaves(true);
if (!settings.snd) settings.snd = new (window.AudioContext || window.webkitAudioContext)();
playWave(settings.reconstructed_pb, WEBAUDIO_MAX_SAMPLERATE, settings.snd);
});
reconstructedButton.parent(element.id);
if(!buttons.includes("recon")){
reconstructedButton.hide();
}
reconstructedButton.class("button");
quantNoiseButton = p.createButton("Play quantization noise");
quantNoiseButton.mousePressed( () => {
renderWaves(true);
if (!settings.snd) settings.snd = new (window.AudioContext || window.webkitAudioContext)();
playWave(settings.quantNoise_pb, WEBAUDIO_MAX_SAMPLERATE, settings.snd);
});
quantNoiseButton.parent(element.id);
if(!buttons.includes("quant")){
quantNoiseButton.hide();
}
quantNoiseButton.class("button");
reconstructedDeltaButton = p.createButton("Play reconstructed delta modulation");
reconstructedDeltaButton.mousePressed( () => {
renderWaves(true);
if (!settings.snd) settings.snd = new (window.AudioContext || window.webkitAudioContext)();
playWave(settings.reconstructedDelta_pb, WEBAUDIO_MAX_SAMPLERATE, settings.snd);
});
reconstructedDeltaButton.parent(element.id);
if(!buttons.includes("reconDelta")){
reconstructedDeltaButton.hide();
}
reconstructedDeltaButton.class("button");
quantNoiseDeltaButton = p.createButton("Play quantization noise");
quantNoiseDeltaButton.mousePressed( () => {
renderWaves(true);
if (!settings.snd) settings.snd = new (window.AudioContext || window.webkitAudioContext)();
playWave(settings.quantNoiseDelta_pb, WEBAUDIO_MAX_SAMPLERATE, settings.snd);
});
quantNoiseDeltaButton.parent(element.id);
if(!buttons.includes("quantDelta")){
quantNoiseDeltaButton.hide();
}
quantNoiseDeltaButton.class("button");
adaptiveSwitchButton = p.createButton("Switch to adaptive modulation");
adaptiveSwitchButton.mousePressed( () => {
if (settings.deltaType == "adaptive") {settings.deltaType = "non-adaptive";adaptiveSwitchButton.html("Switch to adaptive modulation");}
else {settings.deltaType = "adaptive";adaptiveSwitchButton.html("Switch to non-adaptive modulation");}
settings.render();
settings.p5.draw();
});
adaptiveSwitchButton.parent(element.id);
if(!buttons.includes("adaptive")){
adaptiveSwitchButton.hide();
}
adaptiveSwitchButton.class("button");
timeZoomSliderCheckbox = p.createCheckbox("Time Zoom Slider");
timeZoomSliderCheckbox.parent(element.id);
inputFrequencySliderCheckbox = p.createCheckbox("Input Frequency Slider", true);
inputFrequencySliderCheckbox.parent(element.id);
samplingFrequencySliderCheckbox = p.createCheckbox("Sampling Frequency Slider", true);
samplingFrequencySliderCheckbox.parent(element.id);
deltaStepSliderCheckbox = p.createCheckbox("Delta Step", true);
deltaStepSliderCheckbox.parent(element.id);
numHarmSliderCheckbox = p.createCheckbox("Harmonics Slider", true);
numHarmSliderCheckbox.parent(element.id);
inputDeltaPanelCheckbox = p.createCheckbox("Input with Delta Modulation Panel", true);
inputDeltaPanelCheckbox.parent(element.id);
reconstructedDeltaPanelCheckbox = p.createCheckbox("Reconstruction with Delta Modulation Panel", true);
reconstructedDeltaPanelCheckbox.parent(element.id);
freqPanelsCheckbox = p.createCheckbox("Frequency Domain Panels", true);
freqPanelsCheckbox.parent(element.id);
updateButton = p.createButton("Add/Remove Widgets");
updateButton.mousePressed( () => {
updatePanel(panels, "Input Signal Time Domain with Delta Modulation", inputDeltaPanelCheckbox.checked());
updatePanel(panels, "Input Signal Frequency Domain", freqPanelsCheckbox.checked());
updatePanel(panels, "Reconstructed Signal Time Domain", reconstructedDeltaPanelCheckbox.checked());
updatePanel(panels, "Reconstructed Signal FFT", freqPanelsCheckbox.checked());
/*updatePanel(panels, "Input Signal Time Domain", true);
updatePanel(panels, "Sampled Signal FFT", true);
updatePanel(panels, "Sampling Signal Time Domain", true);
updatePanel(panels, "Sampling Signal Frequency Domain", true);
updatePanel(panels, "Sampled Signal Time Domain", true);
updatePanel(panels, "Sampled Signal Frequency Domain", true);
updatePanel(panels, "Quantization Noise Time Domain", true);
updatePanel(panels, "Quantization Noise FFT", true);
updatePanel(panels, "Input with Sampled Signal Time Domain", true);
updatePanel(panels, "Input (solid), Sampled (lollipop), Reconstructed (dotted), Time Domain", true);*/
updateSlider(sliders, "timeZoom", timeZoomSliderCheckbox.checked());
updateSlider(sliders, "fundFreq", inputFrequencySliderCheckbox.checked());
updateSlider(sliders, "downsamplingFactor", samplingFrequencySliderCheckbox.checked());
updateSlider(sliders, "deltaStep", deltaStepSliderCheckbox.checked());
updateSlider(sliders, "numHarm", numHarmSliderCheckbox.checked());
updateSlider(sliders, "", true);
updateSlider(sliders, "", true);
updateSlider(sliders, "", true);
updateSlider(sliders, "", true);
updateSlider(sliders, "", true);
updateSlider(sliders, "", true);
updateSlider(sliders, "", true);
updateSlider(sliders, "", true);
reorderPanels();
reorderSliders();
redraw();
});
updateButton.parent(element.id);
if (!buttons.includes("options")) {
timeZoomSliderCheckbox.hide();
inputFrequencySliderCheckbox.hide();
samplingFrequencySliderCheckbox.hide();
deltaStepSliderCheckbox.hide();
numHarmSliderCheckbox.hide();
inputDeltaPanelCheckbox.hide();
reconstructedDeltaPanelCheckbox.hide();
freqPanelsCheckbox.hide();
updateButton.hide();
}
text = p.select("main page");
console.log("text: ", text);
//nextButton.mousePressed( () => {console.log("does this work?");});
}
function redraw() {
settings.p5.windowResized();
settings.render();
settings.p5.draw();
}
function updatePanel(panels, name, checkBoxState) {
if (checkBoxState) {//Add the given panel if not present
let panelPresent = false;
for (let i=0;i<numPanels;i++) {
if (panels[i].name == name) {
panelPresent = true;
break;
}
}
if (!panelPresent) {
//Replace/add lines for more options
if (name == "Input Signal Time Domain with Delta Modulation") {panels.push(new deltaModPanel());}
if (name == "Input Signal Frequency Domain") {panels.push(new InputSigFreqPanel());}
if (name == "Reconstructed Signal Time Domain") {panels.push(new ReconstructedSigPanel());}
if (name == "Reconstructed Signal FFT") {panels.push(new ReconstructedSigFFTPanel());}
if (name == "Reconstructed Signal Time Domain using Delta Modulation") {panels.push(new reconstructedDeltaModSigPanel());}
if (name == "Reconstructed Signal using Delta Modulation FFT") {panels.push(new reconstructedDeltaModSigFFTPanel());}
if (name == "Input Signal Time Domain") {panels.push(new InputSigPanel());}
if (name == "Sampled Signal FFT") {panels.push(new SampledInputFFTPanel());}
if (name == "Sampling Signal Time Domain") {panels.push(new ImpulsePanel());}
if (name == "Sampling Signal Frequency Domain") {panels.push(new ImpulseFreqPanel());}
if (name == "Sampled Signal Time Domain") {panels.push(new SampledInputPanel());}
if (name == "Sampled Signal Frequency Domain") {panels.push(new SampledInputFreqPanel());}
if (name == "Quantization Noise Time Domain") {panels.push(new QuantNoisePanel());}
if (name == "Quantization Noise FFT") {panels.push(new QuantNoiseFFTPanel());}
if (name == "Input with Sampled Signal Time Domain") {panels.push(new InputPlusSampledPanel());}
if (name == "Input (solid), Sampled (lollipop), Reconstructed (dotted), Time Domain") {panels.push(new AllSignalsPanel());}
//reorderPanels();
//console.log(panels);
/* temp = panels[0]
panels[0] = panels[1]
panels[1] = temp */
numPanels++;
let w = width_factor * settings.p5.windowWidth - 20;
let h = height_factor * settings.p5.windowHeight - 20;
resize(w, h);
panels[numPanels-1].setup(settings.p5, panelHeight,panelWidth,settings);
}
} else {//Remove the given panel if present
let panelIndex = -1;
for (let i=0;i<numPanels;i++) {
if (panels[i].name == name) {
panelIndex=i;
break;
}
}
if (panelIndex != -1) {
panels[panelIndex].buffer.remove();
panels.splice(panelIndex,1);
//panels = reorderPanels(panels);
console.log(panels);
numPanels--;
settings.p5.windowResized();
}
}
}
function updateSlider(sliders, propName, checkBoxState) {
if (checkBoxState) {//Add the given slider if not present
let sliderPresent = false;
for (let i=0;i<numSliders;i++) {
if (sliders[i].propName == propName) {
sliderPresent = true;
break;
}
}
if (!sliderPresent) {
//Replace/add lines for more options
if (propName == "timeZoom") {sliders.push(new TimeZoomSlider());}
if (propName == "fundFreq") {sliders.push(new FreqSlider());}
if (propName == "downsamplingFactor") {sliders.push(new SampleRateSlider());}
if (propName == "downsamplingFactorDelta") {sliders.push(new sampleRateDeltaSlider());}
if (propName == "deltaStep") {sliders.push(new deltaStepSlider());}
if (propName == "numHarm") {sliders.push(new NumHarmSlider());}
if (propName == "phase") {sliders.push(new PhaseSlider());}
/*if (propName == "") {sliders.push(new ());}
if (propName == "") {sliders.push(new ());}
if (propName == "") {sliders.push(new ());}
if (propName == "") {sliders.push(new ());}
if (propName == "") {sliders.push(new ());}
if (propName == "") {sliders.push(new ());}*/
numSliders++;
sliders[numSliders-1].setup(settings.p5, settings);
sliders[numSliders-1].updateValue(settings.p5);
}
} else {//Remove the given slider if present
let sliderIndex = -1;
for (let i=0;i<numSliders;i++) {
if (sliders[i].propName == propName) {
sliderIndex=i;
break;
}
}
if (sliderIndex != -1) {
sliders[sliderIndex].slider.remove();
sliders[sliderIndex].textBox.remove();
sliders[sliderIndex].textLabel.remove();
sliders[sliderIndex].button.remove();
if (propName == "numHarm") {
sliders[sliderIndex].oddEvenSel.remove();
sliders[sliderIndex].slopeSel.remove();
}
sliders.splice(sliderIndex,1);
console.log(sliders);
numSliders--;
}
}
}
function reorderPanels() {
panelNames = [];
for (let i=0;i<panels.length;i++) {
panelNames.push(panels[i].name);
}
reorderedPanelNames = [];
//The first panel should be input signal time domain, if present
if (panelNames.includes("Input Signal Time Domain")) {
reorderedPanelNames.push("Input Signal Time Domain");
//Add the frequency domain if present
if (panelNames.includes("Input Signal Frequency Domain")) {
reorderedPanelNames.push("Input Signal Frequency Domain");
}
}
if (panelNames.includes("Input Signal Time Domain with Delta Modulation")) {
reorderedPanelNames.push("Input Signal Time Domain with Delta Modulation");
//Add the frequency domain if present
if (panelNames.includes("Input Signal Frequency Domain")) {
reorderedPanelNames.push("Input Signal Frequency Domain");
}
}
//The next panels should be the reconstruction
if (panelNames.includes("Reconstructed Signal Time Domain")) {
reorderedPanelNames.push("Reconstructed Signal Time Domain");
//Add the frequency domain if present
if (panelNames.includes("Reconstructed Signal FFT")) {
reorderedPanelNames.push("Reconstructed Signal FFT");
}
}
for (let i=0;i<reorderedPanelNames.length; i++) {
for (let j=0;j<panels.length;j++) {
if (panels[j].name == reorderedPanelNames[i]) {
temp = panels[i];
panels[i] = panels[j];
panels[j]=temp;
break;
}
}
}
//console.log("Got", panels, "returned", reorderedPanels)
return;
}
function reorderSliders() {
sliderNames = [];
for (let i=0;i<sliders.length;i++) {
sliderNames.push(sliders[i].propName);
}
reorderedSliderNames = [];
//The first slider should be frequency of the input signal, if present
if (sliderNames.includes("fundFreq")) {
reorderedSliderNames.push("fundFreq");
}
//The next slider should be the harmonics
if (sliderNames.includes("numHarm")) {
reorderedSliderNames.push("numHarm");
}
for (let i=0;i<reorderedSliderNames.length; i++) {
for (let j=0;j<sliders.length;j++) {
if (sliders[j].propName == reorderedSliderNames[i]) {
temp = sliders[i];
sliders[i] = sliders[j];
sliders[j]=temp;
break;
}
}
}
//console.log("Got", sliders, "returned", reorderedSliders)
return;
}
function playWave(wave, sampleRate, audioctx) {
var buffer = audioctx.createBuffer(1, wave.length, sampleRate);
buffer.copyToChannel(wave, 0, 0);
var source = audioctx.createBufferSource();
source.buffer = buffer;
source.connect(audioctx.destination);
source.start();
}
function downloadWave(wave, sampleRate, audioctx) {
}
};
return new p5(sketch); } // end function new_widget() { var sketch = p => {