• making MIDI -- foot in mouth?

    From luserdroog@21:1/5 to All on Sat Nov 27 15:11:57 2021
    I've been making grandiose comments and hyperboles about
    structure and abstraction and whatnot without putting up a lot
    code. And admittedly, I'm not super experienced in javascript
    per se compared to most of the regs here.

    So here's my baloney. Following just the reference referred on
    that old wotsit page, (http://www.idea2ic.com/File_Formats/midi.pdf)
    I cooked up some javascript to try to create a midi stream.

    Untested, and it doesn't use a priority queue. It doesn't have a UI.
    It doesn't use MVC. But it does try to shove the bits and fiddly
    stuff down to the bottom and present a higher-level interface
    (inspired by combinators). But I hope it shows some of the
    power of splitting the code into functions.

    midi.js:

    const gen_header = (fmt,tracks,delta_ticks_per_quarter) =>
    'MThd' + bytes(4,6) + bytes(2,fmt) + bytes(2,tracks) + bytes(2,delta_ticks_per_quarter);
    const format_single = 0,
    format_multi_sync = 1,
    format_multi_async = 2;

    const gen_track_header = (length) =>
    'MTrk' + bytes(2,length);

    function gen_midi_file( notes ){
    var events = assemble( notes_to_events( notes ) );
    return gen_header( format_single, 1, 100 )
    + gen_track_header( events.length )
    + events;
    }

    function note( name, octave ){
    return { 'type' : 'note',
    'pitch' : "CCDDEFFGGAAB".indexOf(name.slice(0,1))+
    (name.length == 2 ? (name.charAt(1)=='#'?1:-1): 0)+
    12*octave };
    }

    function rest(){
    return { 'type' : 'rest' };
    }

    function chord(){
    return { 'type' : 'chord',
    'notes' : arguments };
    }

    function sequence(){
    return { 'type' : 'seq',
    'delta' : arguments[0]
    'notes' : arguments.splice(1) };
    }

    const note_off = 0x80,
    note_on = 0x90,
    key_after_touch = 0xA0,
    control_change = 0xB0,
    program_change = 0xC0,
    channel_after_touch = 0xD0,
    pitch_wheel_change = 0xE0,
    meta = 0xFF;
    const pianissimo = 0x33,
    piano = 0x44,
    mezzoforte = 0x55,
    forte = 0x66,
    fortissimo = 0x88;

    function notes_to_events( notes, time = 0, duration = 50 ){
    var events = [];
    if( notes.type == 'rest' ){
    } else if( notes.type == 'note' ){
    events.push( { 'cmd' : note_on,
    'pitch' : notes.pitch,
    'time' : time,
    'velocity' : piano} );
    events.push( { 'cmd' : note_off,
    'pitch' : notes.pitch,
    'time' : time + duration } );
    } else if( notes.type = 'chord' ){
    notes.notes.forEach(
    note=> events.concat( notes_to_events( note, time, duration ) )
    );
    } else if( notes.type = 'sequence' ){
    notes.notes.forEach(
    (note,idx)=> events.concat( notes_to_events( note, time+idx*delta, delta ) )
    );
    }
    return events;
    }

    function assemble( events ){
    events.sort( (x,y)=> x.time < y.time ? -1 : x.time == y.time ? 0 : 1 );
    //encode the bytes of the midi commands interleaved with deltas
    var acc = '';
    var time = 0;
    events.forEach( function( event ){
    acc = acc + gen_delta( event.time - time );
    time = event.time;
    acc = acc + event.cmd +
    ( event.cmd == note_on ? bytes(2, event.pitch) + bytes(2, event.velocity)
    : event.cmd == note_off ? bytes(2, event.pitch) + bytes(2, event.velocity) : '' );
    });
    return acc;
    }

    function bytes( w, num ){
    var acc = [];
    while( num > 255 ){
    acc.shift( num % 256 );
    num /= 256;
    }
    acc.shift( num );
    var buf = Array(w).fill(0,0).concat(acc);
    var buf2 = buf.slice( buf.length - w );
    return String.fromCharCode(buf2...);
    }

    function gen_delta( dtime ){
    var acc = [];
    while( dtime > 127 ){
    acc.shift( dtime % 128 );
    dtime /= 128;
    }
    acc.shift( dtime );
    var codes = acc.slice( 0, acc.length-1 ).map( x=>x+128 ).concat(acc.slice( acc.length-1 ));
    return String.fromCharCode(codes...);
    }

    //usage

    var notes = sequence( 50,
    chord(note('A',4),note('C',5),note('E',5)),
    note('A',3),
    chord(note('A',4),note('C',5),note('E',5)),
    rest() );
    var midi_file = gen_midi_file( notes );

    --- SoupGate-Win32 v1.05
    * Origin: fsxNet Usenet Gateway (21:1/5)
  • From luserdroog@21:1/5 to luserdroog on Sat Nov 27 19:59:12 2021
    On Saturday, November 27, 2021 at 5:12:02 PM UTC-6, luserdroog wrote:
    I've been making grandiose comments and hyperboles about
    structure and abstraction and whatnot without putting up a lot
    code. And admittedly, I'm not super experienced in javascript
    per se compared to most of the regs here.

    So here's my baloney. Following just the reference referred on
    that old wotsit page, (http://www.idea2ic.com/File_Formats/midi.pdf)
    I cooked up some javascript to try to create a midi stream.

    Untested, and it doesn't use a priority queue. It doesn't have a UI.
    It doesn't use MVC. But it does try to shove the bits and fiddly
    stuff down to the bottom and present a higher-level interface
    (inspired by combinators). But I hope it shows some of the
    power of splitting the code into functions.

    midi.js:
    [snip]

    Well, dang. Foot indeed in mouth. A Midi file doesn't help you with
    the Web Midi API. Well, I guess it helps. But all the time deltas
    with the weird 7bit integer strings is gone. Instead you have to
    coordinate the timing with whatever tools JavaScript has to offer.
    I'm guessing it's same answer as for games and you have to use get_animation_frame() or whatever it's called. So I may return
    when it can actually play something and is ready for a UI.
    Any advice on doing the precise delays between sending events
    would be helpful. I can factor out my functions so I don't need
    to undo the weird delta integers. But I still will need to wait for
    the appropriate period before sending the next event.

    --- SoupGate-Win32 v1.05
    * Origin: fsxNet Usenet Gateway (21:1/5)
  • From Julio Di Egidio@21:1/5 to jonas.t...@gmail.com on Sun Nov 28 04:08:43 2021
    On Sunday, 28 November 2021 at 13:00:37 UTC+1, jonas.t...@gmail.com wrote:

    Nah Julio noone would like quantisation,

    Nah, just you are a fucking idiot.

    it is like saying you want your audio quantised and sure it is to "highest possible" bit and frequensy resolution.
    RT using best resolution the clock of computer can perform.

    The best resolution *your code* manages.

    A fucking idiot and an incorrigible one at that...

    *Plonk*

    Julio

    --- SoupGate-Win32 v1.05
    * Origin: fsxNet Usenet Gateway (21:1/5)
  • From =?UTF-8?Q?Jonas_Th=C3=B6rnvall?=@21:1/5 to All on Sun Nov 28 04:00:32 2021
    söndag 28 november 2021 kl. 12:21:56 UTC+1 skrev ju...@diegidio.name:
    On 28/11/2021 04:59, luserdroog wrote:
    On Saturday, November 27, 2021 at 5:12:02 PM UTC-6, luserdroog wrote:
    I've been making grandiose comments and hyperboles about
    structure and abstraction and whatnot without putting up a lot
    code. And admittedly, I'm not super experienced in javascript
    per se compared to most of the regs here.
    FYI, encapsulating logic in functions and, along the same line, avoiding shared state to the full extent possible, while already way better than anything Jonas keeps writing, still falls short of any actual code structuring proper (and, all the more so of any actual and explicitly functional use of JS). Indeed, code structuring, as the bottom line of software design, is not per se primarily nor immediately a matter of the specific language.

    That said, maybe keep also in mind that you are hardly going to see or
    do any significant structuring of code unless you get and go beyond the
    500 (maybe even the 1000, YMMV) lines of code.
    Well, dang. Foot indeed in mouth. A Midi file doesn't help you with
    the Web Midi API.
    Unless I have missed it, you have yet to say what requirement you are
    trying to implement exactly. Building a midi file in memory to simply
    play it in the browser? What's difficult with that? Or rather playing
    midi?

    If it is playing midi, I think the approach has to be the exact opposite that you guys have shown so far: it's not the notes that should drive
    the loop, it's time itself (your metronome) that should tick at a
    certain frequency, call it your "resolution frequency" (in practice,
    ticking the 32nds or the 64ths, depending on how precise vs fast you
    manage to make it), then at certain ticks, i.e. as and when needed, you would send out midi events.

    And the first iteration would be building such "metronome" and its loop
    and making it tick with enough accuracy and not accumulating drift (a
    least within some reasonable range of metronomic speeds, aka BPMs)...

    Julio
    Nah Julio noone would like quantisation, it is like saying you want your audio quantised and sure it is to "highest possible" bit and frequensy resolution.
    RT using best resolution the clock of computer can perform.

    --- SoupGate-Win32 v1.05
    * Origin: fsxNet Usenet Gateway (21:1/5)
  • From Julio Di Egidio@21:1/5 to luserdroog on Sun Nov 28 12:21:48 2021
    On 28/11/2021 04:59, luserdroog wrote:
    On Saturday, November 27, 2021 at 5:12:02 PM UTC-6, luserdroog wrote:
    I've been making grandiose comments and hyperboles about
    structure and abstraction and whatnot without putting up a lot
    code. And admittedly, I'm not super experienced in javascript
    per se compared to most of the regs here.

    FYI, encapsulating logic in functions and, along the same line, avoiding
    shared state to the full extent possible, while already way better than anything Jonas keeps writing, still falls short of any actual code
    structuring proper (and, all the more so of any actual and explicitly functional use of JS). Indeed, code structuring, as the bottom line of software design, is not per se primarily nor immediately a matter of the specific language.

    That said, maybe keep also in mind that you are hardly going to see or
    do any significant structuring of code unless you get and go beyond the
    500 (maybe even the 1000, YMMV) lines of code.

    Well, dang. Foot indeed in mouth. A Midi file doesn't help you with
    the Web Midi API.

    Unless I have missed it, you have yet to say what requirement you are
    trying to implement exactly. Building a midi file in memory to simply
    play it in the browser? What's difficult with that? Or rather playing
    midi?

    If it is playing midi, I think the approach has to be the exact opposite
    that you guys have shown so far: it's not the notes that should drive
    the loop, it's time itself (your metronome) that should tick at a
    certain frequency, call it your "resolution frequency" (in practice,
    ticking the 32nds or the 64ths, depending on how precise vs fast you
    manage to make it), then at certain ticks, i.e. as and when needed, you
    would send out midi events.

    And the first iteration would be building such "metronome" and its loop
    and making it tick with enough accuracy and not accumulating drift (a
    least within some reasonable range of metronomic speeds, aka BPMs)...

    Julio

    --- SoupGate-Win32 v1.05
    * Origin: fsxNet Usenet Gateway (21:1/5)
  • From =?UTF-8?Q?Jonas_Th=C3=B6rnvall?=@21:1/5 to All on Sun Nov 28 08:57:39 2021
    söndag 28 november 2021 kl. 17:40:38 UTC+1 skrev Jonas Thörnvall:
    söndag 28 november 2021 kl. 17:29:23 UTC+1 skrev Jonas Thörnvall:
    söndag 28 november 2021 kl. 13:08:47 UTC+1 skrev ju...@diegidio.name:
    On Sunday, 28 November 2021 at 13:00:37 UTC+1, jonas.t...@gmail.com wrote:

    Nah Julio noone would like quantisation,
    Nah, just you are a fucking idiot.
    it is like saying you want your audio quantised and sure it is to "highest possible" bit and frequensy resolution.
    RT using best resolution the clock of computer can perform.
    The best resolution *your code* manages.

    A fucking idiot and an incorrigible one at that...

    *Plonk*

    Julio
    So what resolution do you reckon my code manage, well it depends on the processor.
    And i am still smarter then you in any IQ test, you may be the knowledgeable one but i am the smartone i prefer it that way.
    stopRec=setInterval(recSomeNotes,5);

    function recSomeNotes(){
    //Recordings are stopped in DrawSCROLLBAR REALTIME=performance.now()-browserLoadTIME;
    while(REALTIME>schedTime){ outportarr[outportindex].send(noteMessage[playPos]);
    if (echo && mode=="Play"){
    pianoKeypressOut();
    }
    schedTime=copyEv[playPos]+schedTime;
    playPos++;
    }
    }
    Just did set playup timeout to zero no problem.
    I do not know the limit for "simultaneous" playup during rec, but probably 0 will not work.... But maybe 2 or 1 ms.

    --- SoupGate-Win32 v1.05
    * Origin: fsxNet Usenet Gateway (21:1/5)
  • From =?UTF-8?Q?Jonas_Th=C3=B6rnvall?=@21:1/5 to All on Sun Nov 28 08:35:54 2021
    söndag 28 november 2021 kl. 13:08:47 UTC+1 skrev ju...@diegidio.name:
    On Sunday, 28 November 2021 at 13:00:37 UTC+1, jonas.t...@gmail.com wrote:

    Nah Julio noone would like quantisation,
    Nah, just you are a fucking idiot.
    it is like saying you want your audio quantised and sure it is to "highest possible" bit and frequensy resolution.
    RT using best resolution the clock of computer can perform.
    The best resolution *your code* manages.

    A fucking idiot and an incorrigible one at that...

    *Plonk*

    Julio

    Well Julio my recording is realtime "using realtime clock" my playup polling is 5 ms i probably could run it at 2.
    So i have a quantised playup at 5 ms and a RT recording....

    Seen so many morons with their shitty implementations using audio buffer to record midi LoL.
    And they all sound shit and quantised.

    --- SoupGate-Win32 v1.05
    * Origin: fsxNet Usenet Gateway (21:1/5)
  • From =?UTF-8?Q?Jonas_Th=C3=B6rnvall?=@21:1/5 to All on Sun Nov 28 08:40:34 2021
    söndag 28 november 2021 kl. 17:29:23 UTC+1 skrev Jonas Thörnvall:
    söndag 28 november 2021 kl. 13:08:47 UTC+1 skrev ju...@diegidio.name:
    On Sunday, 28 November 2021 at 13:00:37 UTC+1, jonas.t...@gmail.com wrote:

    Nah Julio noone would like quantisation,
    Nah, just you are a fucking idiot.
    it is like saying you want your audio quantised and sure it is to "highest possible" bit and frequensy resolution.
    RT using best resolution the clock of computer can perform.
    The best resolution *your code* manages.

    A fucking idiot and an incorrigible one at that...

    *Plonk*

    Julio
    So what resolution do you reckon my code manage, well it depends on the processor.
    And i am still smarter then you in any IQ test, you may be the knowledgeable one but i am the smartone i prefer it that way.

    stopRec=setInterval(recSomeNotes,5);

    function recSomeNotes(){
    //Recordings are stopped in DrawSCROLLBAR
    REALTIME=performance.now()-browserLoadTIME;
    while(REALTIME>schedTime){
    outportarr[outportindex].send(noteMessage[playPos]);
    if (echo && mode=="Play"){
    pianoKeypressOut();
    }
    schedTime=copyEv[playPos]+schedTime;
    playPos++;
    }
    }

    --- SoupGate-Win32 v1.05
    * Origin: fsxNet Usenet Gateway (21:1/5)
  • From =?UTF-8?Q?Jonas_Th=C3=B6rnvall?=@21:1/5 to All on Sun Nov 28 08:29:19 2021
    söndag 28 november 2021 kl. 13:08:47 UTC+1 skrev ju...@diegidio.name:
    On Sunday, 28 November 2021 at 13:00:37 UTC+1, jonas.t...@gmail.com wrote:

    Nah Julio noone would like quantisation,
    Nah, just you are a fucking idiot.
    it is like saying you want your audio quantised and sure it is to "highest possible" bit and frequensy resolution.
    RT using best resolution the clock of computer can perform.
    The best resolution *your code* manages.

    A fucking idiot and an incorrigible one at that...

    *Plonk*

    Julio

    So what resolution do you reckon my code manage, well it depends on the processor.
    And i am still smarter then you in any IQ test, you may be the knowledgeable one but i am the smartone i prefer it that way.

    --- SoupGate-Win32 v1.05
    * Origin: fsxNet Usenet Gateway (21:1/5)
  • From =?UTF-8?Q?Jonas_Th=C3=B6rnvall?=@21:1/5 to All on Sun Nov 28 09:03:22 2021
    söndag 28 november 2021 kl. 17:40:38 UTC+1 skrev Jonas Thörnvall:
    söndag 28 november 2021 kl. 17:29:23 UTC+1 skrev Jonas Thörnvall:
    söndag 28 november 2021 kl. 13:08:47 UTC+1 skrev ju...@diegidio.name:
    On Sunday, 28 November 2021 at 13:00:37 UTC+1, jonas.t...@gmail.com wrote:

    Nah Julio noone would like quantisation,
    Nah, just you are a fucking idiot.
    it is like saying you want your audio quantised and sure it is to "highest possible" bit and frequensy resolution.
    RT using best resolution the clock of computer can perform.
    The best resolution *your code* manages.

    A fucking idiot and an incorrigible one at that...

    *Plonk*

    Julio
    So what resolution do you reckon my code manage, well it depends on the processor.
    And i am still smarter then you in any IQ test, you may be the knowledgeable one but i am the smartone i prefer it that way.
    stopRec=setInterval(recSomeNotes,5);

    function recSomeNotes(){
    //Recordings are stopped in DrawSCROLLBAR REALTIME=performance.now()-browserLoadTIME;
    while(REALTIME>schedTime){ outportarr[outportindex].send(noteMessage[playPos]);
    if (echo && mode=="Play"){
    pianoKeypressOut();
    }
    schedTime=copyEv[playPos]+schedTime;
    playPos++;
    }
    }
    Well Julio i just set rec to 0 timeout for playup during recording, it seem to record all notes "and playup animated scrollbar" just fine.
    Sp who is the idiot?

    --- SoupGate-Win32 v1.05
    * Origin: fsxNet Usenet Gateway (21:1/5)
  • From luserdroog@21:1/5 to ju...@diegidio.name on Mon Nov 29 21:53:01 2021
    On Sunday, November 28, 2021 at 5:21:56 AM UTC-6, ju...@diegidio.name wrote:
    On 28/11/2021 04:59, luserdroog wrote:
    On Saturday, November 27, 2021 at 5:12:02 PM UTC-6, luserdroog wrote:
    I've been making grandiose comments and hyperboles about
    structure and abstraction and whatnot without putting up a lot
    code. And admittedly, I'm not super experienced in javascript
    per se compared to most of the regs here.
    FYI, encapsulating logic in functions and, along the same line, avoiding shared state to the full extent possible, while already way better than anything Jonas keeps writing, still falls short of any actual code structuring proper (and, all the more so of any actual and explicitly functional use of JS). Indeed, code structuring, as the bottom line of software design, is not per se primarily nor immediately a matter of the specific language.

    That said, maybe keep also in mind that you are hardly going to see or
    do any significant structuring of code unless you get and go beyond the
    500 (maybe even the 1000, YMMV) lines of code.
    Well, dang. Foot indeed in mouth. A Midi file doesn't help you with
    the Web Midi API.
    Unless I have missed it, you have yet to say what requirement you are
    trying to implement exactly. Building a midi file in memory to simply
    play it in the browser? What's difficult with that? Or rather playing
    midi?

    If it is playing midi, I think the approach has to be the exact opposite
    that you guys have shown so far: it's not the notes that should drive
    the loop, it's time itself (your metronome) that should tick at a
    certain frequency, call it your "resolution frequency" (in practice,
    ticking the 32nds or the 64ths, depending on how precise vs fast you
    manage to make it), then at certain ticks, i.e. as and when needed, you
    would send out midi events.

    And the first iteration would be building such "metronome" and its loop
    and making it tick with enough accuracy and not accumulating drift (a
    least within some reasonable range of metronomic speeds, aka BPMs)...

    Julio

    All true. I have purposefully been vague about the requirements for the
    program because I've been focused entirely on just typing some code
    rather than bad poetry or bibliographic entries from the 60s and 70s.

    I do kind of want to play the midi data in the browser. I somehow thought
    the api would supply more out of the box. But it looks like it does provide more than I first suspected.

    In the spec, example 9.5 https://www.w3.org/TR/2015/WD-webmidi-20150317/#sending-midi-messages-to-an-output-device

    function sendMiddleC( midiAccess, portID ) {
    var noteOnMessage = [0x90, 60, 0x7f]; // note on, middle C, full velocity
    var output = midiAccess.outputs.get(portID);
    output.send( noteOnMessage ); //omitting the timestamp means send immediately.
    output.send( [0x80, 60, 0x40], window.performance.now() + 1000.0 ); // Inlined array creation- note off, middle C,
    // release velocity = 64, timestamp = now + 1000ms.
    }

    So you can just pass a timestamp along with the midi bytes. Just need
    to scale my ticks value to milliseconds.

    But even that, it seems is only half the battle. Because then you also
    need to set up a sound module on the other end of the midi port
    to interpret the events. Either a synthesizer or sample player.

    --- SoupGate-Win32 v1.05
    * Origin: fsxNet Usenet Gateway (21:1/5)
  • From luserdroog@21:1/5 to luserdroog on Mon Nov 29 23:33:47 2021
    On Saturday, November 27, 2021 at 9:59:16 PM UTC-6, luserdroog wrote:
    On Saturday, November 27, 2021 at 5:12:02 PM UTC-6, luserdroog wrote:
    I've been making grandiose comments and hyperboles about
    structure and abstraction and whatnot without putting up a lot
    code. And admittedly, I'm not super experienced in javascript
    per se compared to most of the regs here.

    So here's my baloney. Following just the reference referred on
    that old wotsit page, (http://www.idea2ic.com/File_Formats/midi.pdf)
    I cooked up some javascript to try to create a midi stream.

    Untested, and it doesn't use a priority queue. It doesn't have a UI.
    It doesn't use MVC. But it does try to shove the bits and fiddly
    stuff down to the bottom and present a higher-level interface
    (inspired by combinators). But I hope it shows some of the
    power of splitting the code into functions.

    midi.js:
    [snip]

    Well, dang. Foot indeed in mouth. A Midi file doesn't help you with
    the Web Midi API. Well, I guess it helps. But all the time deltas
    with the weird 7bit integer strings is gone. Instead you have to
    coordinate the timing with whatever tools JavaScript has to offer.
    I'm guessing it's same answer as for games and you have to use get_animation_frame() or whatever it's called. So I may return
    when it can actually play something and is ready for a UI.
    Any advice on doing the precise delays between sending events
    would be helpful. I can factor out my functions so I don't need
    to undo the weird delta integers. But I still will need to wait for
    the appropriate period before sending the next event.

    I did some elaborating and reorganizing. I'm not sure I did all the calculations right. But at least this is something to talk about.
    I think it's a little prettier now.

    The WebMidi API wants milliseconds for its delays. But the numbers
    I have are delta_ticks_per_quarter and milliseconds_per_quarter.
    And the number I actually want to input is beats_per_minute.
    So they all get multiplied and divided together somehow.

    My tree data structure might not be very appropriate for evolving
    towards having a UI. That part is hard so I didn't want to do it yet.
    At the very least, this presents "my understanding of MIDI" fwiw.
    I'm unsure about my decision to introduce time information only
    in the sequence() structure. It kinda makes sense but it's also
    very weird. Time always builds up in even multiples of a unit,
    but the user has to choose that unit. The rests are there to introduce
    pauses, but still it feels limiting but I don't see a better choice at
    the moment.

    //mymidi.js

    function note( name, octave ){
    return { 'type' : 'note',
    'pitch' : "CCDDEFFGGAAB".indexOf(name.slice(0,1))+
    (name.length == 2 ? (name.charAt(1)=='#'?1:-1): 0)+
    12*octave };
    }

    function drum( name ){
    return { 'type' : 'note',
    'pitch' : name };
    }

    function rest(){
    return { 'type' : 'rest' };
    }

    function choir( ...args ){
    return { 'type' : 'choir',
    'kids' : args };
    }

    function sequence( delta, ...args ){
    return { 'type' : 'seq',
    'delta': delta,
    'kids' : args };
    }

    function repeat( times, arg ){
    var delta = duration( arg );
    var buf = Array(times).fill(arg);
    return sequence( delta, buf... );
    }

    function channel( chan, arg ){
    return { 'type' : 'chan',
    'channel': chan,
    'kid' : arg };
    }

    function instrument( inst, arg ){
    return { 'type' : 'inst',
    'inst' : inst,
    'kid' : arg };
    }

    function time_sig( sig, arg ){
    return { 'type' : 'sig',
    'sig' : sig,
    'kid' : arg };
    }

    function tempo( mpq, arg ){
    return { 'type' : 'tempo',
    'mpq' : mpq,
    'kid' : arg };
    }


    const ac_bass_drum = 35,
    bass_drum = 36,
    side_stick = 37,
    ac_snare = 38,
    hand_clap = 39,
    el_snare = 40,
    low_fl_tom = 41,
    cl_hat = 42,
    hi_fl_tom = 43,
    ped_hat = 44,
    lo_tom = 45,
    op_hat = 46,
    lo_mid_tom = 47,
    hi_mid_tom = 48,
    crash = 49,
    hi_tom = 50,
    ride = 51,
    chinese_cymbal = 52,
    ride_bell = 53,
    tambourine = 54,
    splash = 55,
    cowbell = 56,
    crash2 = 57,
    vibraslap = 58,
    ride2 = 59,
    hi_bongo = 60,
    lo_bongo = 61,
    mute_hi_conga = 62,
    open_lo_conga = 63,
    lo_conga = 64,
    hi_timbale = 65,
    lo_timbale = 66,
    hi_agogo = 67,
    lo_agogo = 68,
    cabasa = 69,
    maracas = 70,
    short_whistle = 71,
    long_whistle = 72,
    short_guiro = 73,
    long_guiro = 74,
    claves = 75,
    hi_wood_block = 76,
    lo_wood_block = 77,
    mute_cuica = 78,
    open_cuica = 79,
    mute_triangle = 80,
    open_triangle = 81;


    const pianissimo = 0x33,
    piano = 0x44,
    mezzoforte = 0x55,
    forte = 0x66,
    fortissimo = 0x77;

    const note_off = 0x80,
    note_on = 0x90,
    key_after_touch = 0xA0,
    control_change = 0xB0,
    program_change = 0xC0,
    channel_after_touch = 0xD0,
    pitch_wheel_change = 0xE0,
    meta = 0xFF;

    function duration( notes ){
    if( notes.hasOwnProperty( 'delta' ) ){
    return notes.delta * notes.kids.length;
    } else if( notes.hasOwnProperty( 'kids' ) ){
    return duration( notes.kids[0] );
    } else if( notes.hasOwnProperty( 'kid' ){
    return duration( notes.kid );
    } else {
    return 1;
    }
    }

    function notes_to_events( notes, time = 0, duration = 1, channel = 0 ){
    var events = [];
    if( notes.type == 'rest' ){
    //do nothing right here
    } else if( notes.type == 'note' ){
    events.push( { 'cmd' : note_on,
    'channel' : channel,
    'pitch' : notes.pitch,
    'time' : time,
    'velocity' : piano } );
    events.push( { 'cmd' : note_off,
    'channel' : channel,
    'pitch' : notes.pitch,
    'time' : time + duration,
    'velocity' : piano } );
    } else if( notes.type == 'choir' ){
    notes.kids.forEach(
    note=> events.concat( notes_to_events( note, time, duration, channel ) )
    );
    } else if( notes.type == 'sequence' ){
    notes.kids.forEach(
    (note,idx)=>
    events.concat( notes_to_events( note, time+idx*notes.delta, notes.delta, channel ) )
    );
    } else if( notes.type == 'inst' ){
    events.push( { 'cmd' : program_change,
    'channel' : channel,
    'prog' : notes.inst,
    'time' : time } );
    events.concat( notes_to_events( notes.kid, time, duration, channel ) );
    } else if( notes.type == 'chan' ){
    events.concat( notes_to_events( notes.kid, time, duration, notes.channel ) );
    } else if( notes.type == 'sig' ){
    events.push( { 'cmd' : meta,
    'data' : String.fromCharCode( 0x58,
    0x04, notes.sig.n, notes.sig.d, notes.sig.c, notes.sig.b ),
    'time' : time } );
    events.concat( notes_to_events( notes.kid, time, duration, channel ) );
    } else if( notes.type == 'tempo' ){
    events.push( { 'cmd' : meta,
    'data' : String.fromCharCode( 0x51, 0x03 ) + bytes(3, notes.mpq ),
    'mpq' : notes.mpq,
    'time' : time } );
    events.concat( notes_to_events( notes.kid, time, duration, channel ) );
    }
    return events;
    }

    function assemble( events ){
    events.sort( (x,y)=> x.time < y.time ? -1 : x.time == y.time ? 0 : 1 );
    var acc = '';
    var time = 0;
    events.forEach( function( event ){
    acc = acc + gen_delta( event.time - time );
    time = event.time;
    acc = acc + event_bytes( event );
    });
    return acc + String.fromCharCode( meta, 0x2F, 0x00 ); //End of track marker }

    function event_bytes( event ){
    return String.fromCharCode( event.cmd + (event.cmd != meta ? event.channel : 0) ) +
    ( event.cmd == note_on ? bytes(2, event.pitch) + bytes(2, event.velocity)
    : event.cmd == note_off ? bytes(2, event.pitch) + bytes(2, event.velocity)
    : event.cmd == program_change ? bytes(2, event.prog)
    : event.cmd == meta ? event.data
    : '' );
    }

    function bytes( w, num ){
    var acc = [];
    while( num > 255 ){
    acc.shift( num % 256 );
    num /= 256;
    }
    acc.shift( num );
    var buf = Array(w).fill(0).concat(acc);
    var buf2 = buf.slice( buf.length - w );
    return String.fromCharCode( buf2... );
    }

    function gen_delta( dtime ){
    var acc = [];
    while( dtime > 127 ){
    acc.shift( dtime % 128 );
    dtime /= 128;
    }
    acc.shift( dtime );
    var codes = acc.slice( 0, acc.length-1 ).map( x=>x+128 ).concat(acc.slice( acc.length-1 ));
    return String.fromCharCode( codes... );
    }


    // initialize midi output

    var midi = null;
    var output_port_id = null;
    function onMidiSuccess( midiAccess ){
    midi = midiAccess;
    for( var output in midi.outputs ){
    console.log( "Output port: [type:'" + output.type + "'] id:'" + output.id +
    "manufacturer:'" + output.manufacturer + "' name:'" + output.name +
    "' version:'" + output.version "'" );
    output_port_id = output.id;
    }
    }
    function onMidiFailure( msg ){
    console.log( "Failed to get MIDI Access -- " + msg );
    }
    navigator.requestMidiAccess().then( onMidiSuccess, onMidiFailure );



    function play( notes, delta_ticks_per_quarter ){
    var events = notes_to_events( notes );
    events.sort( (x,y)=> x.time < y.time ? -1 : x.time == y.time ? 0 : 1 );
    var mpq = events[ events.findIndex( e=> e.hasOwnProperty('mpq') ) ].mpq;
    var time = 0;
    var time_scale = mpq / delta_ticks_per_quarter; // milliseconds per tick
    var loop_overhead = 0;
    var output = midi.outputs.get(output_port_id);
    var real_time = window.performance.now();
    events.forEach( function( event ){
    if( event.time == time ){
    output.send( Array.from( event_bytes( event ) ) );
    } else {
    output.send( Array.from( event_bytes( event ) ),
    real_time + (event.time - time) * time_scale );
    time += loop_overhead;
    }
    });
    }

    const gen_header = (fmt,tracks,delta_ticks_per_quarter) =>
    'MThd' + bytes(4,6) + bytes(2,fmt) + bytes(2,tracks) + bytes(2,delta_ticks_per_quarter);
    const format_single = 0,
    format_multi_sync = 1,
    format_multi_async = 2;

    const gen_track_header = (length) =>
    'MTrk' + bytes(4,length);

    function gen_midi_file( notes, delta_ticks_per_quarter ){
    var events = assemble( notes_to_events( notes ) );
    return gen_header( format_single, 1, delta_ticks_per_quarter )
    + gen_track_header( events.length )
    + events;
    }

    //bpm -> mpq
    function bpm( beats ){
    return beats * 1000.0 / 60.0;
    }

    // time_sig:: 'n':: numerator
    // 'd':: denominator
    // 'c':: "number of ticks in metronome click"
    // 'b':: "how many 32nd notes in a quarter note?"
    //q 8 q 12 q 12 q 18
    //e e e e e e e e e e
    //s s s s s s s s s s s s s s s s s s s s s s s s s
    //tttttttt tttttttttttt tttttttttttt tttttttttttttttttt

    //usage

    var q = 120,
    e = 60,
    s = 30;

    var riff = sequence( s, choir(note('A',4),note('C',5),note('E',5)),
    note('A',3),
    choir(note('A',4),note('C',5),note('E',5)),
    rest() );
    var piano_riff = channel( 0,
    instrument( 1,
    repeat( 4,
    riff ) ) );
    var drum_pat = channel( 9,
    choir( sequence( e, drum(cl_hat), drum(cl_hat), drum(cl_hat), drum(cl_hat),
    drum(cl_hat), drum(cl_hat), drum(cl_hat), drum(op_hat) ),
    sequence( q, drum(bass_drum), drum(ac_snare),
    drum(bass_drum), drum(ac_snare) ) ) );
    var song = tempo( bpm(30),
    time_sig( {'n':4,'d':4,'c':q,'b':8},
    choir( piano_riff, drum_pat ) ) );

    var midi_file = gen_midi_file( song, q );
    play( song, q );

    --- SoupGate-Win32 v1.05
    * Origin: fsxNet Usenet Gateway (21:1/5)
  • From =?UTF-8?Q?Jonas_Th=C3=B6rnvall?=@21:1/5 to All on Tue Nov 30 01:00:37 2021
    tisdag 30 november 2021 kl. 09:57:01 UTC+1 skrev Jonas Thörnvall:
    tisdag 30 november 2021 kl. 09:46:54 UTC+1 skrev Jonas Thörnvall:
    tisdag 30 november 2021 kl. 08:33:52 UTC+1 skrev luser...@gmail.com:
    On Saturday, November 27, 2021 at 9:59:16 PM UTC-6, luserdroog wrote:
    On Saturday, November 27, 2021 at 5:12:02 PM UTC-6, luserdroog wrote:
    I've been making grandiose comments and hyperboles about
    structure and abstraction and whatnot without putting up a lot
    code. And admittedly, I'm not super experienced in javascript
    per se compared to most of the regs here.

    So here's my baloney. Following just the reference referred on
    that old wotsit page, (http://www.idea2ic.com/File_Formats/midi.pdf) I cooked up some javascript to try to create a midi stream.

    Untested, and it doesn't use a priority queue. It doesn't have a UI. It doesn't use MVC. But it does try to shove the bits and fiddly stuff down to the bottom and present a higher-level interface (inspired by combinators). But I hope it shows some of the
    power of splitting the code into functions.

    midi.js:
    [snip]

    Well, dang. Foot indeed in mouth. A Midi file doesn't help you with the Web Midi API. Well, I guess it helps. But all the time deltas
    with the weird 7bit integer strings is gone. Instead you have to coordinate the timing with whatever tools JavaScript has to offer.
    I'm guessing it's same answer as for games and you have to use get_animation_frame() or whatever it's called. So I may return
    when it can actually play something and is ready for a UI.
    Any advice on doing the precise delays between sending events
    would be helpful. I can factor out my functions so I don't need
    to undo the weird delta integers. But I still will need to wait for the appropriate period before sending the next event.
    I did some elaborating and reorganizing. I'm not sure I did all the calculations right. But at least this is something to talk about.
    I think it's a little prettier now.

    The WebMidi API wants milliseconds for its delays. But the numbers
    I have are delta_ticks_per_quarter and milliseconds_per_quarter.
    And the number I actually want to input is beats_per_minute.
    So they all get multiplied and divided together somehow.

    My tree data structure might not be very appropriate for evolving towards having a UI. That part is hard so I didn't want to do it yet.
    At the very least, this presents "my understanding of MIDI" fwiw.
    I'm unsure about my decision to introduce time information only
    in the sequence() structure. It kinda makes sense but it's also
    very weird. Time always builds up in even multiples of a unit,
    but the user has to choose that unit. The rests are there to introduce pauses, but still it feels limiting but I don't see a better choice at the moment.

    //mymidi.js
    function note( name, octave ){
    return { 'type' : 'note',
    'pitch' : "CCDDEFFGGAAB".indexOf(name.slice(0,1))+
    (name.length == 2 ? (name.charAt(1)=='#'?1:-1): 0)+
    12*octave };
    }
    function drum( name ){
    return { 'type' : 'note',
    'pitch' : name };
    }

    function rest(){
    return { 'type' : 'rest' };
    }
    function choir( ...args ){
    return { 'type' : 'choir',
    'kids' : args };
    }

    function sequence( delta, ...args ){
    return { 'type' : 'seq',
    'delta': delta,
    'kids' : args };
    }

    function repeat( times, arg ){
    var delta = duration( arg );
    var buf = Array(times).fill(arg);
    return sequence( delta, buf... );
    }

    function channel( chan, arg ){
    return { 'type' : 'chan',
    'channel': chan,
    'kid' : arg };
    }

    function instrument( inst, arg ){
    return { 'type' : 'inst',
    'inst' : inst,
    'kid' : arg };
    }

    function time_sig( sig, arg ){
    return { 'type' : 'sig',
    'sig' : sig,
    'kid' : arg };
    }

    function tempo( mpq, arg ){
    return { 'type' : 'tempo',
    'mpq' : mpq,
    'kid' : arg };
    }


    const ac_bass_drum = 35,
    bass_drum = 36,
    side_stick = 37,
    ac_snare = 38,
    hand_clap = 39,
    el_snare = 40,
    low_fl_tom = 41,
    cl_hat = 42,
    hi_fl_tom = 43,
    ped_hat = 44,
    lo_tom = 45,
    op_hat = 46,
    lo_mid_tom = 47,
    hi_mid_tom = 48,
    crash = 49,
    hi_tom = 50,
    ride = 51,
    chinese_cymbal = 52,
    ride_bell = 53,
    tambourine = 54,
    splash = 55,
    cowbell = 56,
    crash2 = 57,
    vibraslap = 58,
    ride2 = 59,
    hi_bongo = 60,
    lo_bongo = 61,
    mute_hi_conga = 62,
    open_lo_conga = 63,
    lo_conga = 64,
    hi_timbale = 65,
    lo_timbale = 66,
    hi_agogo = 67,
    lo_agogo = 68,
    cabasa = 69,
    maracas = 70,
    short_whistle = 71,
    long_whistle = 72,
    short_guiro = 73,
    long_guiro = 74,
    claves = 75,
    hi_wood_block = 76,
    lo_wood_block = 77,
    mute_cuica = 78,
    open_cuica = 79,
    mute_triangle = 80,
    open_triangle = 81;
    const pianissimo = 0x33,
    piano = 0x44,
    mezzoforte = 0x55,
    forte = 0x66,
    fortissimo = 0x77;
    const note_off = 0x80,
    note_on = 0x90,
    key_after_touch = 0xA0,
    control_change = 0xB0,
    program_change = 0xC0,
    channel_after_touch = 0xD0,
    pitch_wheel_change = 0xE0,
    meta = 0xFF;
    function duration( notes ){
    if( notes.hasOwnProperty( 'delta' ) ){
    return notes.delta * notes.kids.length;
    } else if( notes.hasOwnProperty( 'kids' ) ){
    return duration( notes.kids[0] );
    } else if( notes.hasOwnProperty( 'kid' ){
    return duration( notes.kid );
    } else {
    return 1;
    }
    }

    function notes_to_events( notes, time = 0, duration = 1, channel = 0 ){ var events = [];
    if( notes.type == 'rest' ){
    //do nothing right here
    } else if( notes.type == 'note' ){
    events.push( { 'cmd' : note_on,
    'channel' : channel,
    'pitch' : notes.pitch,
    'time' : time,
    'velocity' : piano } );
    events.push( { 'cmd' : note_off,
    'channel' : channel,
    'pitch' : notes.pitch,
    'time' : time + duration,
    'velocity' : piano } );
    } else if( notes.type == 'choir' ){
    notes.kids.forEach(
    note=> events.concat( notes_to_events( note, time, duration, channel ) ) );
    } else if( notes.type == 'sequence' ){
    notes.kids.forEach(
    (note,idx)=>
    events.concat( notes_to_events( note, time+idx*notes.delta, notes.delta, channel ) )
    );
    } else if( notes.type == 'inst' ){
    events.push( { 'cmd' : program_change,
    'channel' : channel,
    'prog' : notes.inst,
    'time' : time } );
    events.concat( notes_to_events( notes.kid, time, duration, channel ) ); } else if( notes.type == 'chan' ){
    events.concat( notes_to_events( notes.kid, time, duration, notes.channel ) );
    } else if( notes.type == 'sig' ){
    events.push( { 'cmd' : meta,
    'data' : String.fromCharCode( 0x58,
    0x04, notes.sig.n, notes.sig.d, notes.sig.c, notes.sig.b ),
    'time' : time } );
    events.concat( notes_to_events( notes.kid, time, duration, channel ) ); } else if( notes.type == 'tempo' ){
    events.push( { 'cmd' : meta,
    'data' : String.fromCharCode( 0x51, 0x03 ) + bytes(3, notes.mpq ),
    'mpq' : notes.mpq,
    'time' : time } );
    events.concat( notes_to_events( notes.kid, time, duration, channel ) ); }
    return events;
    }

    function assemble( events ){
    events.sort( (x,y)=> x.time < y.time ? -1 : x.time == y.time ? 0 : 1 ); var acc = '';
    var time = 0;
    events.forEach( function( event ){
    acc = acc + gen_delta( event.time - time );
    time = event.time;
    acc = acc + event_bytes( event );
    });
    return acc + String.fromCharCode( meta, 0x2F, 0x00 ); //End of track marker
    }

    function event_bytes( event ){
    return String.fromCharCode( event.cmd + (event.cmd != meta ? event.channel : 0) ) +
    ( event.cmd == note_on ? bytes(2, event.pitch) + bytes(2, event.velocity)
    : event.cmd == note_off ? bytes(2, event.pitch) + bytes(2, event.velocity)
    : event.cmd == program_change ? bytes(2, event.prog)
    : event.cmd == meta ? event.data
    : '' );
    }

    function bytes( w, num ){
    var acc = [];
    while( num > 255 ){
    acc.shift( num % 256 );
    num /= 256;
    }
    acc.shift( num );
    var buf = Array(w).fill(0).concat(acc);
    var buf2 = buf.slice( buf.length - w );
    return String.fromCharCode( buf2... );
    }

    function gen_delta( dtime ){
    var acc = [];
    while( dtime > 127 ){
    acc.shift( dtime % 128 );
    dtime /= 128;
    }
    acc.shift( dtime );
    var codes = acc.slice( 0, acc.length-1 ).map( x=>x+128 ).concat(acc.slice( acc.length-1 ));
    return String.fromCharCode( codes... );
    }
    // initialize midi output

    var midi = null;
    var output_port_id = null;
    function onMidiSuccess( midiAccess ){
    midi = midiAccess;
    for( var output in midi.outputs ){
    console.log( "Output port: [type:'" + output.type + "'] id:'" + output.id +
    "manufacturer:'" + output.manufacturer + "' name:'" + output.name +
    "' version:'" + output.version "'" );
    output_port_id = output.id;
    }
    }
    function onMidiFailure( msg ){
    console.log( "Failed to get MIDI Access -- " + msg );
    }
    navigator.requestMidiAccess().then( onMidiSuccess, onMidiFailure );



    function play( notes, delta_ticks_per_quarter ){
    var events = notes_to_events( notes );
    events.sort( (x,y)=> x.time < y.time ? -1 : x.time == y.time ? 0 : 1 ); var mpq = events[ events.findIndex( e=> e.hasOwnProperty('mpq') ) ].mpq; var time = 0;
    var time_scale = mpq / delta_ticks_per_quarter; // milliseconds per tick var loop_overhead = 0;
    var output = midi.outputs.get(output_port_id);
    var real_time = window.performance.now();
    events.forEach( function( event ){
    if( event.time == time ){
    output.send( Array.from( event_bytes( event ) ) );
    } else {
    output.send( Array.from( event_bytes( event ) ),
    real_time + (event.time - time) * time_scale );
    time += loop_overhead;
    }
    });
    }

    const gen_header = (fmt,tracks,delta_ticks_per_quarter) =>
    'MThd' + bytes(4,6) + bytes(2,fmt) + bytes(2,tracks) + bytes(2,delta_ticks_per_quarter);
    const format_single = 0,
    format_multi_sync = 1,
    format_multi_async = 2;

    const gen_track_header = (length) =>
    'MTrk' + bytes(4,length);

    function gen_midi_file( notes, delta_ticks_per_quarter ){
    var events = assemble( notes_to_events( notes ) );
    return gen_header( format_single, 1, delta_ticks_per_quarter )
    + gen_track_header( events.length )
    + events;
    }
    //bpm -> mpq
    function bpm( beats ){
    return beats * 1000.0 / 60.0;
    }

    // time_sig:: 'n':: numerator
    // 'd':: denominator
    // 'c':: "number of ticks in metronome click"
    // 'b':: "how many 32nd notes in a quarter note?"
    //q 8 q 12 q 12 q 18
    //e e e e e e e e e e
    //s s s s s s s s s s s s s s s s s s s s s s s s s
    //tttttttt tttttttttttt tttttttttttt tttttttttttttttttt

    //usage

    var q = 120,
    e = 60,
    s = 30;

    var riff = sequence( s, choir(note('A',4),note('C',5),note('E',5)), note('A',3),
    choir(note('A',4),note('C',5),note('E',5)),
    rest() );
    var piano_riff = channel( 0,
    instrument( 1,
    repeat( 4,
    riff ) ) );
    var drum_pat = channel( 9,
    choir( sequence( e, drum(cl_hat), drum(cl_hat), drum(cl_hat), drum(cl_hat),
    drum(cl_hat), drum(cl_hat), drum(cl_hat), drum(op_hat) ),
    sequence( q, drum(bass_drum), drum(ac_snare),
    drum(bass_drum), drum(ac_snare) ) ) );
    var song = tempo( bpm(30),
    time_sig( {'n':4,'d':4,'c':q,'b':8},
    choir( piano_riff, drum_pat ) ) );

    var midi_file = gen_midi_file( song, q );
    play( song, q );
    Yeah that is what is weird with midi, the shortest event distance i"n ms", in a song is a result depending on PPQ and BPM.
    Lets say 120 beats per minute BPM and 24 parts per quarter note "PPQ" First 60/120=0.5 sec per beat
    Then 0.5sec / 24 ticks = 0.02083333333 sec/tick

    Thats it.
    So then you calculate bar lets say 4/4
    1 beat was 0.5
    Then follow one bar is 4*0.5=2 sec

    You said you wanted 32 parts per full note "that is a bar of 2 sec length above"
    we calculate 2/32 =0.0625 " that is a 32 part per full note, of length 0.0625 seconds.
    By the way go with foot in mouth, it seem to be the superior format telative to midi and easier.

    --- SoupGate-Win32 v1.05
    * Origin: fsxNet Usenet Gateway (21:1/5)
  • From =?UTF-8?Q?Jonas_Th=C3=B6rnvall?=@21:1/5 to All on Tue Nov 30 00:46:46 2021
    tisdag 30 november 2021 kl. 08:33:52 UTC+1 skrev luser...@gmail.com:
    On Saturday, November 27, 2021 at 9:59:16 PM UTC-6, luserdroog wrote:
    On Saturday, November 27, 2021 at 5:12:02 PM UTC-6, luserdroog wrote:
    I've been making grandiose comments and hyperboles about
    structure and abstraction and whatnot without putting up a lot
    code. And admittedly, I'm not super experienced in javascript
    per se compared to most of the regs here.

    So here's my baloney. Following just the reference referred on
    that old wotsit page, (http://www.idea2ic.com/File_Formats/midi.pdf)
    I cooked up some javascript to try to create a midi stream.

    Untested, and it doesn't use a priority queue. It doesn't have a UI.
    It doesn't use MVC. But it does try to shove the bits and fiddly
    stuff down to the bottom and present a higher-level interface
    (inspired by combinators). But I hope it shows some of the
    power of splitting the code into functions.

    midi.js:
    [snip]

    Well, dang. Foot indeed in mouth. A Midi file doesn't help you with
    the Web Midi API. Well, I guess it helps. But all the time deltas
    with the weird 7bit integer strings is gone. Instead you have to
    coordinate the timing with whatever tools JavaScript has to offer.
    I'm guessing it's same answer as for games and you have to use get_animation_frame() or whatever it's called. So I may return
    when it can actually play something and is ready for a UI.
    Any advice on doing the precise delays between sending events
    would be helpful. I can factor out my functions so I don't need
    to undo the weird delta integers. But I still will need to wait for
    the appropriate period before sending the next event.
    I did some elaborating and reorganizing. I'm not sure I did all the calculations right. But at least this is something to talk about.
    I think it's a little prettier now.

    The WebMidi API wants milliseconds for its delays. But the numbers
    I have are delta_ticks_per_quarter and milliseconds_per_quarter.
    And the number I actually want to input is beats_per_minute.
    So they all get multiplied and divided together somehow.

    My tree data structure might not be very appropriate for evolving
    towards having a UI. That part is hard so I didn't want to do it yet.
    At the very least, this presents "my understanding of MIDI" fwiw.
    I'm unsure about my decision to introduce time information only
    in the sequence() structure. It kinda makes sense but it's also
    very weird. Time always builds up in even multiples of a unit,
    but the user has to choose that unit. The rests are there to introduce pauses, but still it feels limiting but I don't see a better choice at
    the moment.

    //mymidi.js
    function note( name, octave ){
    return { 'type' : 'note',
    'pitch' : "CCDDEFFGGAAB".indexOf(name.slice(0,1))+
    (name.length == 2 ? (name.charAt(1)=='#'?1:-1): 0)+
    12*octave };
    }
    function drum( name ){
    return { 'type' : 'note',
    'pitch' : name };
    }

    function rest(){
    return { 'type' : 'rest' };
    }
    function choir( ...args ){
    return { 'type' : 'choir',
    'kids' : args };
    }

    function sequence( delta, ...args ){
    return { 'type' : 'seq',
    'delta': delta,
    'kids' : args };
    }

    function repeat( times, arg ){
    var delta = duration( arg );
    var buf = Array(times).fill(arg);
    return sequence( delta, buf... );
    }

    function channel( chan, arg ){
    return { 'type' : 'chan',
    'channel': chan,
    'kid' : arg };
    }

    function instrument( inst, arg ){
    return { 'type' : 'inst',
    'inst' : inst,
    'kid' : arg };
    }

    function time_sig( sig, arg ){
    return { 'type' : 'sig',
    'sig' : sig,
    'kid' : arg };
    }

    function tempo( mpq, arg ){
    return { 'type' : 'tempo',
    'mpq' : mpq,
    'kid' : arg };
    }


    const ac_bass_drum = 35,
    bass_drum = 36,
    side_stick = 37,
    ac_snare = 38,
    hand_clap = 39,
    el_snare = 40,
    low_fl_tom = 41,
    cl_hat = 42,
    hi_fl_tom = 43,
    ped_hat = 44,
    lo_tom = 45,
    op_hat = 46,
    lo_mid_tom = 47,
    hi_mid_tom = 48,
    crash = 49,
    hi_tom = 50,
    ride = 51,
    chinese_cymbal = 52,
    ride_bell = 53,
    tambourine = 54,
    splash = 55,
    cowbell = 56,
    crash2 = 57,
    vibraslap = 58,
    ride2 = 59,
    hi_bongo = 60,
    lo_bongo = 61,
    mute_hi_conga = 62,
    open_lo_conga = 63,
    lo_conga = 64,
    hi_timbale = 65,
    lo_timbale = 66,
    hi_agogo = 67,
    lo_agogo = 68,
    cabasa = 69,
    maracas = 70,
    short_whistle = 71,
    long_whistle = 72,
    short_guiro = 73,
    long_guiro = 74,
    claves = 75,
    hi_wood_block = 76,
    lo_wood_block = 77,
    mute_cuica = 78,
    open_cuica = 79,
    mute_triangle = 80,
    open_triangle = 81;
    const pianissimo = 0x33,
    piano = 0x44,
    mezzoforte = 0x55,
    forte = 0x66,
    fortissimo = 0x77;
    const note_off = 0x80,
    note_on = 0x90,
    key_after_touch = 0xA0,
    control_change = 0xB0,
    program_change = 0xC0,
    channel_after_touch = 0xD0,
    pitch_wheel_change = 0xE0,
    meta = 0xFF;
    function duration( notes ){
    if( notes.hasOwnProperty( 'delta' ) ){
    return notes.delta * notes.kids.length;
    } else if( notes.hasOwnProperty( 'kids' ) ){
    return duration( notes.kids[0] );
    } else if( notes.hasOwnProperty( 'kid' ){
    return duration( notes.kid );
    } else {
    return 1;
    }
    }

    function notes_to_events( notes, time = 0, duration = 1, channel = 0 ){
    var events = [];
    if( notes.type == 'rest' ){
    //do nothing right here
    } else if( notes.type == 'note' ){
    events.push( { 'cmd' : note_on,
    'channel' : channel,
    'pitch' : notes.pitch,
    'time' : time,
    'velocity' : piano } );
    events.push( { 'cmd' : note_off,
    'channel' : channel,
    'pitch' : notes.pitch,
    'time' : time + duration,
    'velocity' : piano } );
    } else if( notes.type == 'choir' ){
    notes.kids.forEach(
    note=> events.concat( notes_to_events( note, time, duration, channel ) )
    );
    } else if( notes.type == 'sequence' ){
    notes.kids.forEach(
    (note,idx)=>
    events.concat( notes_to_events( note, time+idx*notes.delta, notes.delta, channel ) )
    );
    } else if( notes.type == 'inst' ){
    events.push( { 'cmd' : program_change,
    'channel' : channel,
    'prog' : notes.inst,
    'time' : time } );
    events.concat( notes_to_events( notes.kid, time, duration, channel ) );
    } else if( notes.type == 'chan' ){
    events.concat( notes_to_events( notes.kid, time, duration, notes.channel ) ); } else if( notes.type == 'sig' ){
    events.push( { 'cmd' : meta,
    'data' : String.fromCharCode( 0x58,
    0x04, notes.sig.n, notes.sig.d, notes.sig.c, notes.sig.b ),
    'time' : time } );
    events.concat( notes_to_events( notes.kid, time, duration, channel ) );
    } else if( notes.type == 'tempo' ){
    events.push( { 'cmd' : meta,
    'data' : String.fromCharCode( 0x51, 0x03 ) + bytes(3, notes.mpq ),
    'mpq' : notes.mpq,
    'time' : time } );
    events.concat( notes_to_events( notes.kid, time, duration, channel ) );
    }
    return events;
    }

    function assemble( events ){
    events.sort( (x,y)=> x.time < y.time ? -1 : x.time == y.time ? 0 : 1 );
    var acc = '';
    var time = 0;
    events.forEach( function( event ){
    acc = acc + gen_delta( event.time - time );
    time = event.time;
    acc = acc + event_bytes( event );
    });
    return acc + String.fromCharCode( meta, 0x2F, 0x00 ); //End of track marker
    }

    function event_bytes( event ){
    return String.fromCharCode( event.cmd + (event.cmd != meta ? event.channel : 0) ) +
    ( event.cmd == note_on ? bytes(2, event.pitch) + bytes(2, event.velocity)
    : event.cmd == note_off ? bytes(2, event.pitch) + bytes(2, event.velocity)
    : event.cmd == program_change ? bytes(2, event.prog)
    : event.cmd == meta ? event.data
    : '' );
    }

    function bytes( w, num ){
    var acc = [];
    while( num > 255 ){
    acc.shift( num % 256 );
    num /= 256;
    }
    acc.shift( num );
    var buf = Array(w).fill(0).concat(acc);
    var buf2 = buf.slice( buf.length - w );
    return String.fromCharCode( buf2... );
    }

    function gen_delta( dtime ){
    var acc = [];
    while( dtime > 127 ){
    acc.shift( dtime % 128 );
    dtime /= 128;
    }
    acc.shift( dtime );
    var codes = acc.slice( 0, acc.length-1 ).map( x=>x+128 ).concat(acc.slice( acc.length-1 ));
    return String.fromCharCode( codes... );
    }
    // initialize midi output

    var midi = null;
    var output_port_id = null;
    function onMidiSuccess( midiAccess ){
    midi = midiAccess;
    for( var output in midi.outputs ){
    console.log( "Output port: [type:'" + output.type + "'] id:'" + output.id + "manufacturer:'" + output.manufacturer + "' name:'" + output.name +
    "' version:'" + output.version "'" );
    output_port_id = output.id;
    }
    }
    function onMidiFailure( msg ){
    console.log( "Failed to get MIDI Access -- " + msg );
    }
    navigator.requestMidiAccess().then( onMidiSuccess, onMidiFailure );



    function play( notes, delta_ticks_per_quarter ){
    var events = notes_to_events( notes );
    events.sort( (x,y)=> x.time < y.time ? -1 : x.time == y.time ? 0 : 1 );
    var mpq = events[ events.findIndex( e=> e.hasOwnProperty('mpq') ) ].mpq;
    var time = 0;
    var time_scale = mpq / delta_ticks_per_quarter; // milliseconds per tick
    var loop_overhead = 0;
    var output = midi.outputs.get(output_port_id);
    var real_time = window.performance.now();
    events.forEach( function( event ){
    if( event.time == time ){
    output.send( Array.from( event_bytes( event ) ) );
    } else {
    output.send( Array.from( event_bytes( event ) ),
    real_time + (event.time - time) * time_scale );
    time += loop_overhead;
    }
    });
    }

    const gen_header = (fmt,tracks,delta_ticks_per_quarter) =>
    'MThd' + bytes(4,6) + bytes(2,fmt) + bytes(2,tracks) + bytes(2,delta_ticks_per_quarter);
    const format_single = 0,
    format_multi_sync = 1,
    format_multi_async = 2;

    const gen_track_header = (length) =>
    'MTrk' + bytes(4,length);

    function gen_midi_file( notes, delta_ticks_per_quarter ){
    var events = assemble( notes_to_events( notes ) );
    return gen_header( format_single, 1, delta_ticks_per_quarter )
    + gen_track_header( events.length )
    + events;
    }
    //bpm -> mpq
    function bpm( beats ){
    return beats * 1000.0 / 60.0;
    }

    // time_sig:: 'n':: numerator
    // 'd':: denominator
    // 'c':: "number of ticks in metronome click"
    // 'b':: "how many 32nd notes in a quarter note?"
    //q 8 q 12 q 12 q 18
    //e e e e e e e e e e
    //s s s s s s s s s s s s s s s s s s s s s s s s s
    //tttttttt tttttttttttt tttttttttttt tttttttttttttttttt

    //usage

    var q = 120,
    e = 60,
    s = 30;

    var riff = sequence( s, choir(note('A',4),note('C',5),note('E',5)), note('A',3),
    choir(note('A',4),note('C',5),note('E',5)),
    rest() );
    var piano_riff = channel( 0,
    instrument( 1,
    repeat( 4,
    riff ) ) );
    var drum_pat = channel( 9,
    choir( sequence( e, drum(cl_hat), drum(cl_hat), drum(cl_hat), drum(cl_hat), drum(cl_hat), drum(cl_hat), drum(cl_hat), drum(op_hat) ),
    sequence( q, drum(bass_drum), drum(ac_snare),
    drum(bass_drum), drum(ac_snare) ) ) );
    var song = tempo( bpm(30),
    time_sig( {'n':4,'d':4,'c':q,'b':8},
    choir( piano_riff, drum_pat ) ) );

    var midi_file = gen_midi_file( song, q );
    play( song, q );

    Yeah that is what is weird with midi, the shortest event distance i"n ms", in a song is a result depending on PPQ and BPM.
    Lets say 120 beats per minute BPM and 24 parts per quarter note "PPQ"
    First 60/120=0.5 sec per beat
    Then 0.5sec / 24 ticks = 0.02083333333 sec/tick

    Thats it.

    --- SoupGate-Win32 v1.05
    * Origin: fsxNet Usenet Gateway (21:1/5)
  • From =?UTF-8?Q?Jonas_Th=C3=B6rnvall?=@21:1/5 to All on Tue Nov 30 00:56:54 2021
    tisdag 30 november 2021 kl. 09:46:54 UTC+1 skrev Jonas Thörnvall:
    tisdag 30 november 2021 kl. 08:33:52 UTC+1 skrev luser...@gmail.com:
    On Saturday, November 27, 2021 at 9:59:16 PM UTC-6, luserdroog wrote:
    On Saturday, November 27, 2021 at 5:12:02 PM UTC-6, luserdroog wrote:
    I've been making grandiose comments and hyperboles about
    structure and abstraction and whatnot without putting up a lot
    code. And admittedly, I'm not super experienced in javascript
    per se compared to most of the regs here.

    So here's my baloney. Following just the reference referred on
    that old wotsit page, (http://www.idea2ic.com/File_Formats/midi.pdf)
    I cooked up some javascript to try to create a midi stream.

    Untested, and it doesn't use a priority queue. It doesn't have a UI. It doesn't use MVC. But it does try to shove the bits and fiddly
    stuff down to the bottom and present a higher-level interface (inspired by combinators). But I hope it shows some of the
    power of splitting the code into functions.

    midi.js:
    [snip]

    Well, dang. Foot indeed in mouth. A Midi file doesn't help you with
    the Web Midi API. Well, I guess it helps. But all the time deltas
    with the weird 7bit integer strings is gone. Instead you have to coordinate the timing with whatever tools JavaScript has to offer.
    I'm guessing it's same answer as for games and you have to use get_animation_frame() or whatever it's called. So I may return
    when it can actually play something and is ready for a UI.
    Any advice on doing the precise delays between sending events
    would be helpful. I can factor out my functions so I don't need
    to undo the weird delta integers. But I still will need to wait for
    the appropriate period before sending the next event.
    I did some elaborating and reorganizing. I'm not sure I did all the calculations right. But at least this is something to talk about.
    I think it's a little prettier now.

    The WebMidi API wants milliseconds for its delays. But the numbers
    I have are delta_ticks_per_quarter and milliseconds_per_quarter.
    And the number I actually want to input is beats_per_minute.
    So they all get multiplied and divided together somehow.

    My tree data structure might not be very appropriate for evolving
    towards having a UI. That part is hard so I didn't want to do it yet.
    At the very least, this presents "my understanding of MIDI" fwiw.
    I'm unsure about my decision to introduce time information only
    in the sequence() structure. It kinda makes sense but it's also
    very weird. Time always builds up in even multiples of a unit,
    but the user has to choose that unit. The rests are there to introduce pauses, but still it feels limiting but I don't see a better choice at
    the moment.

    //mymidi.js
    function note( name, octave ){
    return { 'type' : 'note',
    'pitch' : "CCDDEFFGGAAB".indexOf(name.slice(0,1))+
    (name.length == 2 ? (name.charAt(1)=='#'?1:-1): 0)+
    12*octave };
    }
    function drum( name ){
    return { 'type' : 'note',
    'pitch' : name };
    }

    function rest(){
    return { 'type' : 'rest' };
    }
    function choir( ...args ){
    return { 'type' : 'choir',
    'kids' : args };
    }

    function sequence( delta, ...args ){
    return { 'type' : 'seq',
    'delta': delta,
    'kids' : args };
    }

    function repeat( times, arg ){
    var delta = duration( arg );
    var buf = Array(times).fill(arg);
    return sequence( delta, buf... );
    }

    function channel( chan, arg ){
    return { 'type' : 'chan',
    'channel': chan,
    'kid' : arg };
    }

    function instrument( inst, arg ){
    return { 'type' : 'inst',
    'inst' : inst,
    'kid' : arg };
    }

    function time_sig( sig, arg ){
    return { 'type' : 'sig',
    'sig' : sig,
    'kid' : arg };
    }

    function tempo( mpq, arg ){
    return { 'type' : 'tempo',
    'mpq' : mpq,
    'kid' : arg };
    }


    const ac_bass_drum = 35,
    bass_drum = 36,
    side_stick = 37,
    ac_snare = 38,
    hand_clap = 39,
    el_snare = 40,
    low_fl_tom = 41,
    cl_hat = 42,
    hi_fl_tom = 43,
    ped_hat = 44,
    lo_tom = 45,
    op_hat = 46,
    lo_mid_tom = 47,
    hi_mid_tom = 48,
    crash = 49,
    hi_tom = 50,
    ride = 51,
    chinese_cymbal = 52,
    ride_bell = 53,
    tambourine = 54,
    splash = 55,
    cowbell = 56,
    crash2 = 57,
    vibraslap = 58,
    ride2 = 59,
    hi_bongo = 60,
    lo_bongo = 61,
    mute_hi_conga = 62,
    open_lo_conga = 63,
    lo_conga = 64,
    hi_timbale = 65,
    lo_timbale = 66,
    hi_agogo = 67,
    lo_agogo = 68,
    cabasa = 69,
    maracas = 70,
    short_whistle = 71,
    long_whistle = 72,
    short_guiro = 73,
    long_guiro = 74,
    claves = 75,
    hi_wood_block = 76,
    lo_wood_block = 77,
    mute_cuica = 78,
    open_cuica = 79,
    mute_triangle = 80,
    open_triangle = 81;
    const pianissimo = 0x33,
    piano = 0x44,
    mezzoforte = 0x55,
    forte = 0x66,
    fortissimo = 0x77;
    const note_off = 0x80,
    note_on = 0x90,
    key_after_touch = 0xA0,
    control_change = 0xB0,
    program_change = 0xC0,
    channel_after_touch = 0xD0,
    pitch_wheel_change = 0xE0,
    meta = 0xFF;
    function duration( notes ){
    if( notes.hasOwnProperty( 'delta' ) ){
    return notes.delta * notes.kids.length;
    } else if( notes.hasOwnProperty( 'kids' ) ){
    return duration( notes.kids[0] );
    } else if( notes.hasOwnProperty( 'kid' ){
    return duration( notes.kid );
    } else {
    return 1;
    }
    }

    function notes_to_events( notes, time = 0, duration = 1, channel = 0 ){ var events = [];
    if( notes.type == 'rest' ){
    //do nothing right here
    } else if( notes.type == 'note' ){
    events.push( { 'cmd' : note_on,
    'channel' : channel,
    'pitch' : notes.pitch,
    'time' : time,
    'velocity' : piano } );
    events.push( { 'cmd' : note_off,
    'channel' : channel,
    'pitch' : notes.pitch,
    'time' : time + duration,
    'velocity' : piano } );
    } else if( notes.type == 'choir' ){
    notes.kids.forEach(
    note=> events.concat( notes_to_events( note, time, duration, channel ) ) );
    } else if( notes.type == 'sequence' ){
    notes.kids.forEach(
    (note,idx)=>
    events.concat( notes_to_events( note, time+idx*notes.delta, notes.delta, channel ) )
    );
    } else if( notes.type == 'inst' ){
    events.push( { 'cmd' : program_change,
    'channel' : channel,
    'prog' : notes.inst,
    'time' : time } );
    events.concat( notes_to_events( notes.kid, time, duration, channel ) );
    } else if( notes.type == 'chan' ){
    events.concat( notes_to_events( notes.kid, time, duration, notes.channel ) );
    } else if( notes.type == 'sig' ){
    events.push( { 'cmd' : meta,
    'data' : String.fromCharCode( 0x58,
    0x04, notes.sig.n, notes.sig.d, notes.sig.c, notes.sig.b ),
    'time' : time } );
    events.concat( notes_to_events( notes.kid, time, duration, channel ) );
    } else if( notes.type == 'tempo' ){
    events.push( { 'cmd' : meta,
    'data' : String.fromCharCode( 0x51, 0x03 ) + bytes(3, notes.mpq ),
    'mpq' : notes.mpq,
    'time' : time } );
    events.concat( notes_to_events( notes.kid, time, duration, channel ) );
    }
    return events;
    }

    function assemble( events ){
    events.sort( (x,y)=> x.time < y.time ? -1 : x.time == y.time ? 0 : 1 ); var acc = '';
    var time = 0;
    events.forEach( function( event ){
    acc = acc + gen_delta( event.time - time );
    time = event.time;
    acc = acc + event_bytes( event );
    });
    return acc + String.fromCharCode( meta, 0x2F, 0x00 ); //End of track marker
    }

    function event_bytes( event ){
    return String.fromCharCode( event.cmd + (event.cmd != meta ? event.channel : 0) ) +
    ( event.cmd == note_on ? bytes(2, event.pitch) + bytes(2, event.velocity) : event.cmd == note_off ? bytes(2, event.pitch) + bytes(2, event.velocity) : event.cmd == program_change ? bytes(2, event.prog)
    : event.cmd == meta ? event.data
    : '' );
    }

    function bytes( w, num ){
    var acc = [];
    while( num > 255 ){
    acc.shift( num % 256 );
    num /= 256;
    }
    acc.shift( num );
    var buf = Array(w).fill(0).concat(acc);
    var buf2 = buf.slice( buf.length - w );
    return String.fromCharCode( buf2... );
    }

    function gen_delta( dtime ){
    var acc = [];
    while( dtime > 127 ){
    acc.shift( dtime % 128 );
    dtime /= 128;
    }
    acc.shift( dtime );
    var codes = acc.slice( 0, acc.length-1 ).map( x=>x+128 ).concat(acc.slice( acc.length-1 ));
    return String.fromCharCode( codes... );
    }
    // initialize midi output

    var midi = null;
    var output_port_id = null;
    function onMidiSuccess( midiAccess ){
    midi = midiAccess;
    for( var output in midi.outputs ){
    console.log( "Output port: [type:'" + output.type + "'] id:'" + output.id +
    "manufacturer:'" + output.manufacturer + "' name:'" + output.name +
    "' version:'" + output.version "'" );
    output_port_id = output.id;
    }
    }
    function onMidiFailure( msg ){
    console.log( "Failed to get MIDI Access -- " + msg );
    }
    navigator.requestMidiAccess().then( onMidiSuccess, onMidiFailure );



    function play( notes, delta_ticks_per_quarter ){
    var events = notes_to_events( notes );
    events.sort( (x,y)=> x.time < y.time ? -1 : x.time == y.time ? 0 : 1 ); var mpq = events[ events.findIndex( e=> e.hasOwnProperty('mpq') ) ].mpq; var time = 0;
    var time_scale = mpq / delta_ticks_per_quarter; // milliseconds per tick var loop_overhead = 0;
    var output = midi.outputs.get(output_port_id);
    var real_time = window.performance.now();
    events.forEach( function( event ){
    if( event.time == time ){
    output.send( Array.from( event_bytes( event ) ) );
    } else {
    output.send( Array.from( event_bytes( event ) ),
    real_time + (event.time - time) * time_scale );
    time += loop_overhead;
    }
    });
    }

    const gen_header = (fmt,tracks,delta_ticks_per_quarter) =>
    'MThd' + bytes(4,6) + bytes(2,fmt) + bytes(2,tracks) + bytes(2,delta_ticks_per_quarter);
    const format_single = 0,
    format_multi_sync = 1,
    format_multi_async = 2;

    const gen_track_header = (length) =>
    'MTrk' + bytes(4,length);

    function gen_midi_file( notes, delta_ticks_per_quarter ){
    var events = assemble( notes_to_events( notes ) );
    return gen_header( format_single, 1, delta_ticks_per_quarter )
    + gen_track_header( events.length )
    + events;
    }
    //bpm -> mpq
    function bpm( beats ){
    return beats * 1000.0 / 60.0;
    }

    // time_sig:: 'n':: numerator
    // 'd':: denominator
    // 'c':: "number of ticks in metronome click"
    // 'b':: "how many 32nd notes in a quarter note?"
    //q 8 q 12 q 12 q 18
    //e e e e e e e e e e
    //s s s s s s s s s s s s s s s s s s s s s s s s s
    //tttttttt tttttttttttt tttttttttttt tttttttttttttttttt

    //usage

    var q = 120,
    e = 60,
    s = 30;

    var riff = sequence( s, choir(note('A',4),note('C',5),note('E',5)), note('A',3),
    choir(note('A',4),note('C',5),note('E',5)),
    rest() );
    var piano_riff = channel( 0,
    instrument( 1,
    repeat( 4,
    riff ) ) );
    var drum_pat = channel( 9,
    choir( sequence( e, drum(cl_hat), drum(cl_hat), drum(cl_hat), drum(cl_hat),
    drum(cl_hat), drum(cl_hat), drum(cl_hat), drum(op_hat) ),
    sequence( q, drum(bass_drum), drum(ac_snare),
    drum(bass_drum), drum(ac_snare) ) ) );
    var song = tempo( bpm(30),
    time_sig( {'n':4,'d':4,'c':q,'b':8},
    choir( piano_riff, drum_pat ) ) );

    var midi_file = gen_midi_file( song, q );
    play( song, q );
    Yeah that is what is weird with midi, the shortest event distance i"n ms", in a song is a result depending on PPQ and BPM.
    Lets say 120 beats per minute BPM and 24 parts per quarter note "PPQ"
    First 60/120=0.5 sec per beat
    Then 0.5sec / 24 ticks = 0.02083333333 sec/tick

    Thats it.
    So then you calculate bar lets say 4/4
    1 beat was 0.5
    Then follow one bar is 4*0.5=2 sec

    You said you wanted 32 parts per full note "that is a bar of 2 sec length above"
    we calculate 2/32 =0.0625 " that is a 32 part per full note, of length 0.0625 seconds.

    --- SoupGate-Win32 v1.05
    * Origin: fsxNet Usenet Gateway (21:1/5)
  • From =?UTF-8?Q?Jonas_Th=C3=B6rnvall?=@21:1/5 to All on Tue Nov 30 01:19:29 2021
    tisdag 30 november 2021 kl. 10:15:45 UTC+1 skrev Jonas Thörnvall:
    tisdag 30 november 2021 kl. 10:00:41 UTC+1 skrev Jonas Thörnvall:
    tisdag 30 november 2021 kl. 09:57:01 UTC+1 skrev Jonas Thörnvall:
    tisdag 30 november 2021 kl. 09:46:54 UTC+1 skrev Jonas Thörnvall:
    tisdag 30 november 2021 kl. 08:33:52 UTC+1 skrev luser...@gmail.com:
    On Saturday, November 27, 2021 at 9:59:16 PM UTC-6, luserdroog wrote:
    On Saturday, November 27, 2021 at 5:12:02 PM UTC-6, luserdroog wrote:
    I've been making grandiose comments and hyperboles about structure and abstraction and whatnot without putting up a lot code. And admittedly, I'm not super experienced in javascript per se compared to most of the regs here.

    So here's my baloney. Following just the reference referred on that old wotsit page, (http://www.idea2ic.com/File_Formats/midi.pdf)
    I cooked up some javascript to try to create a midi stream.

    Untested, and it doesn't use a priority queue. It doesn't have a UI.
    It doesn't use MVC. But it does try to shove the bits and fiddly stuff down to the bottom and present a higher-level interface (inspired by combinators). But I hope it shows some of the
    power of splitting the code into functions.

    midi.js:
    [snip]

    Well, dang. Foot indeed in mouth. A Midi file doesn't help you with
    the Web Midi API. Well, I guess it helps. But all the time deltas with the weird 7bit integer strings is gone. Instead you have to coordinate the timing with whatever tools JavaScript has to offer. I'm guessing it's same answer as for games and you have to use get_animation_frame() or whatever it's called. So I may return when it can actually play something and is ready for a UI.
    Any advice on doing the precise delays between sending events would be helpful. I can factor out my functions so I don't need
    to undo the weird delta integers. But I still will need to wait for
    the appropriate period before sending the next event.
    I did some elaborating and reorganizing. I'm not sure I did all the calculations right. But at least this is something to talk about.
    I think it's a little prettier now.

    The WebMidi API wants milliseconds for its delays. But the numbers
    I have are delta_ticks_per_quarter and milliseconds_per_quarter.
    And the number I actually want to input is beats_per_minute.
    So they all get multiplied and divided together somehow.

    My tree data structure might not be very appropriate for evolving towards having a UI. That part is hard so I didn't want to do it yet.
    At the very least, this presents "my understanding of MIDI" fwiw. I'm unsure about my decision to introduce time information only
    in the sequence() structure. It kinda makes sense but it's also
    very weird. Time always builds up in even multiples of a unit,
    but the user has to choose that unit. The rests are there to introduce
    pauses, but still it feels limiting but I don't see a better choice at
    the moment.

    //mymidi.js
    function note( name, octave ){
    return { 'type' : 'note',
    'pitch' : "CCDDEFFGGAAB".indexOf(name.slice(0,1))+
    (name.length == 2 ? (name.charAt(1)=='#'?1:-1): 0)+
    12*octave };
    }
    function drum( name ){
    return { 'type' : 'note',
    'pitch' : name };
    }

    function rest(){
    return { 'type' : 'rest' };
    }
    function choir( ...args ){
    return { 'type' : 'choir',
    'kids' : args };
    }

    function sequence( delta, ...args ){
    return { 'type' : 'seq',
    'delta': delta,
    'kids' : args };
    }

    function repeat( times, arg ){
    var delta = duration( arg );
    var buf = Array(times).fill(arg);
    return sequence( delta, buf... );
    }

    function channel( chan, arg ){
    return { 'type' : 'chan',
    'channel': chan,
    'kid' : arg };
    }

    function instrument( inst, arg ){
    return { 'type' : 'inst',
    'inst' : inst,
    'kid' : arg };
    }

    function time_sig( sig, arg ){
    return { 'type' : 'sig',
    'sig' : sig,
    'kid' : arg };
    }

    function tempo( mpq, arg ){
    return { 'type' : 'tempo',
    'mpq' : mpq,
    'kid' : arg };
    }


    const ac_bass_drum = 35,
    bass_drum = 36,
    side_stick = 37,
    ac_snare = 38,
    hand_clap = 39,
    el_snare = 40,
    low_fl_tom = 41,
    cl_hat = 42,
    hi_fl_tom = 43,
    ped_hat = 44,
    lo_tom = 45,
    op_hat = 46,
    lo_mid_tom = 47,
    hi_mid_tom = 48,
    crash = 49,
    hi_tom = 50,
    ride = 51,
    chinese_cymbal = 52,
    ride_bell = 53,
    tambourine = 54,
    splash = 55,
    cowbell = 56,
    crash2 = 57,
    vibraslap = 58,
    ride2 = 59,
    hi_bongo = 60,
    lo_bongo = 61,
    mute_hi_conga = 62,
    open_lo_conga = 63,
    lo_conga = 64,
    hi_timbale = 65,
    lo_timbale = 66,
    hi_agogo = 67,
    lo_agogo = 68,
    cabasa = 69,
    maracas = 70,
    short_whistle = 71,
    long_whistle = 72,
    short_guiro = 73,
    long_guiro = 74,
    claves = 75,
    hi_wood_block = 76,
    lo_wood_block = 77,
    mute_cuica = 78,
    open_cuica = 79,
    mute_triangle = 80,
    open_triangle = 81;
    const pianissimo = 0x33,
    piano = 0x44,
    mezzoforte = 0x55,
    forte = 0x66,
    fortissimo = 0x77;
    const note_off = 0x80,
    note_on = 0x90,
    key_after_touch = 0xA0,
    control_change = 0xB0,
    program_change = 0xC0,
    channel_after_touch = 0xD0,
    pitch_wheel_change = 0xE0,
    meta = 0xFF;
    function duration( notes ){
    if( notes.hasOwnProperty( 'delta' ) ){
    return notes.delta * notes.kids.length;
    } else if( notes.hasOwnProperty( 'kids' ) ){
    return duration( notes.kids[0] );
    } else if( notes.hasOwnProperty( 'kid' ){
    return duration( notes.kid );
    } else {
    return 1;
    }
    }

    function notes_to_events( notes, time = 0, duration = 1, channel = 0 ){
    var events = [];
    if( notes.type == 'rest' ){
    //do nothing right here
    } else if( notes.type == 'note' ){
    events.push( { 'cmd' : note_on,
    'channel' : channel,
    'pitch' : notes.pitch,
    'time' : time,
    'velocity' : piano } );
    events.push( { 'cmd' : note_off,
    'channel' : channel,
    'pitch' : notes.pitch,
    'time' : time + duration,
    'velocity' : piano } );
    } else if( notes.type == 'choir' ){
    notes.kids.forEach(
    note=> events.concat( notes_to_events( note, time, duration, channel ) )
    );
    } else if( notes.type == 'sequence' ){
    notes.kids.forEach(
    (note,idx)=>
    events.concat( notes_to_events( note, time+idx*notes.delta, notes.delta, channel ) )
    );
    } else if( notes.type == 'inst' ){
    events.push( { 'cmd' : program_change,
    'channel' : channel,
    'prog' : notes.inst,
    'time' : time } );
    events.concat( notes_to_events( notes.kid, time, duration, channel ) );
    } else if( notes.type == 'chan' ){
    events.concat( notes_to_events( notes.kid, time, duration, notes.channel ) );
    } else if( notes.type == 'sig' ){
    events.push( { 'cmd' : meta,
    'data' : String.fromCharCode( 0x58,
    0x04, notes.sig.n, notes.sig.d, notes.sig.c, notes.sig.b ),
    'time' : time } );
    events.concat( notes_to_events( notes.kid, time, duration, channel ) );
    } else if( notes.type == 'tempo' ){
    events.push( { 'cmd' : meta,
    'data' : String.fromCharCode( 0x51, 0x03 ) + bytes(3, notes.mpq ), 'mpq' : notes.mpq,
    'time' : time } );
    events.concat( notes_to_events( notes.kid, time, duration, channel ) );
    }
    return events;
    }

    function assemble( events ){
    events.sort( (x,y)=> x.time < y.time ? -1 : x.time == y.time ? 0 : 1 );
    var acc = '';
    var time = 0;
    events.forEach( function( event ){
    acc = acc + gen_delta( event.time - time );
    time = event.time;
    acc = acc + event_bytes( event );
    });
    return acc + String.fromCharCode( meta, 0x2F, 0x00 ); //End of track marker
    }

    function event_bytes( event ){
    return String.fromCharCode( event.cmd + (event.cmd != meta ? event.channel : 0) ) +
    ( event.cmd == note_on ? bytes(2, event.pitch) + bytes(2, event.velocity)
    : event.cmd == note_off ? bytes(2, event.pitch) + bytes(2, event.velocity)
    : event.cmd == program_change ? bytes(2, event.prog)
    : event.cmd == meta ? event.data
    : '' );
    }

    function bytes( w, num ){
    var acc = [];
    while( num > 255 ){
    acc.shift( num % 256 );
    num /= 256;
    }
    acc.shift( num );
    var buf = Array(w).fill(0).concat(acc);
    var buf2 = buf.slice( buf.length - w );
    return String.fromCharCode( buf2... );
    }

    function gen_delta( dtime ){
    var acc = [];
    while( dtime > 127 ){
    acc.shift( dtime % 128 );
    dtime /= 128;
    }
    acc.shift( dtime );
    var codes = acc.slice( 0, acc.length-1 ).map( x=>x+128 ).concat(acc.slice( acc.length-1 ));
    return String.fromCharCode( codes... );
    }
    // initialize midi output

    var midi = null;
    var output_port_id = null;
    function onMidiSuccess( midiAccess ){
    midi = midiAccess;
    for( var output in midi.outputs ){
    console.log( "Output port: [type:'" + output.type + "'] id:'" + output.id +
    "manufacturer:'" + output.manufacturer + "' name:'" + output.name + "' version:'" + output.version "'" );
    output_port_id = output.id;
    }
    }
    function onMidiFailure( msg ){
    console.log( "Failed to get MIDI Access -- " + msg );
    }
    navigator.requestMidiAccess().then( onMidiSuccess, onMidiFailure );



    function play( notes, delta_ticks_per_quarter ){
    var events = notes_to_events( notes );
    events.sort( (x,y)=> x.time < y.time ? -1 : x.time == y.time ? 0 : 1 );
    var mpq = events[ events.findIndex( e=> e.hasOwnProperty('mpq') ) ].mpq;
    var time = 0;
    var time_scale = mpq / delta_ticks_per_quarter; // milliseconds per tick
    var loop_overhead = 0;
    var output = midi.outputs.get(output_port_id);
    var real_time = window.performance.now();
    events.forEach( function( event ){
    if( event.time == time ){
    output.send( Array.from( event_bytes( event ) ) );
    } else {
    output.send( Array.from( event_bytes( event ) ),
    real_time + (event.time - time) * time_scale );
    time += loop_overhead;
    }
    });
    }

    const gen_header = (fmt,tracks,delta_ticks_per_quarter) =>
    'MThd' + bytes(4,6) + bytes(2,fmt) + bytes(2,tracks) + bytes(2,delta_ticks_per_quarter);
    const format_single = 0,
    format_multi_sync = 1,
    format_multi_async = 2;

    const gen_track_header = (length) =>
    'MTrk' + bytes(4,length);

    function gen_midi_file( notes, delta_ticks_per_quarter ){
    var events = assemble( notes_to_events( notes ) );
    return gen_header( format_single, 1, delta_ticks_per_quarter )
    + gen_track_header( events.length )
    + events;
    }
    //bpm -> mpq
    function bpm( beats ){
    return beats * 1000.0 / 60.0;
    }

    // time_sig:: 'n':: numerator
    // 'd':: denominator
    // 'c':: "number of ticks in metronome click"
    // 'b':: "how many 32nd notes in a quarter note?"
    //q 8 q 12 q 12 q 18
    //e e e e e e e e e e
    //s s s s s s s s s s s s s s s s s s s s s s s s s
    //tttttttt tttttttttttt tttttttttttt tttttttttttttttttt

    //usage

    var q = 120,
    e = 60,
    s = 30;

    var riff = sequence( s, choir(note('A',4),note('C',5),note('E',5)), note('A',3),
    choir(note('A',4),note('C',5),note('E',5)),
    rest() );
    var piano_riff = channel( 0,
    instrument( 1,
    repeat( 4,
    riff ) ) );
    var drum_pat = channel( 9,
    choir( sequence( e, drum(cl_hat), drum(cl_hat), drum(cl_hat), drum(cl_hat),
    drum(cl_hat), drum(cl_hat), drum(cl_hat), drum(op_hat) ),
    sequence( q, drum(bass_drum), drum(ac_snare),
    drum(bass_drum), drum(ac_snare) ) ) );
    var song = tempo( bpm(30),
    time_sig( {'n':4,'d':4,'c':q,'b':8},
    choir( piano_riff, drum_pat ) ) );

    var midi_file = gen_midi_file( song, q );
    play( song, q );
    Yeah that is what is weird with midi, the shortest event distance i"n ms", in a song is a result depending on PPQ and BPM.
    Lets say 120 beats per minute BPM and 24 parts per quarter note "PPQ" First 60/120=0.5 sec per beat
    Then 0.5sec / 24 ticks = 0.02083333333 sec/tick

    Thats it.
    So then you calculate bar lets say 4/4
    1 beat was 0.5
    Then follow one bar is 4*0.5=2 sec

    You said you wanted 32 parts per full note "that is a bar of 2 sec length above"
    we calculate 2/32 =0.0625 " that is a 32 part per full note, of length 0.0625 seconds.
    By the way go with foot in mouth, it seem to be the superior format telative to midi and easier.
    Ok so lets do the last step with i find idiotic and to not do, lets quantise the 32th note into ppq

    0.0625 /0.02083333333=3.00000000048
    That is 3 ticks per 32th note in 24 PPQ and 120 BPM
    So now Julio our proud PPQ embracer can feel proud over himself quantized music unrecognizable.

    Lets just say it has historical reasons that had to do with processor speeds, and is obsolete just like Julio LoL
    If you want to synch gear up, you probably should implement PPQ

    --- SoupGate-Win32 v1.05
    * Origin: fsxNet Usenet Gateway (21:1/5)
  • From =?UTF-8?Q?Jonas_Th=C3=B6rnvall?=@21:1/5 to All on Tue Nov 30 01:15:41 2021
    tisdag 30 november 2021 kl. 10:00:41 UTC+1 skrev Jonas Thörnvall:
    tisdag 30 november 2021 kl. 09:57:01 UTC+1 skrev Jonas Thörnvall:
    tisdag 30 november 2021 kl. 09:46:54 UTC+1 skrev Jonas Thörnvall:
    tisdag 30 november 2021 kl. 08:33:52 UTC+1 skrev luser...@gmail.com:
    On Saturday, November 27, 2021 at 9:59:16 PM UTC-6, luserdroog wrote:
    On Saturday, November 27, 2021 at 5:12:02 PM UTC-6, luserdroog wrote:
    I've been making grandiose comments and hyperboles about
    structure and abstraction and whatnot without putting up a lot code. And admittedly, I'm not super experienced in javascript
    per se compared to most of the regs here.

    So here's my baloney. Following just the reference referred on that old wotsit page, (http://www.idea2ic.com/File_Formats/midi.pdf)
    I cooked up some javascript to try to create a midi stream.

    Untested, and it doesn't use a priority queue. It doesn't have a UI.
    It doesn't use MVC. But it does try to shove the bits and fiddly stuff down to the bottom and present a higher-level interface (inspired by combinators). But I hope it shows some of the
    power of splitting the code into functions.

    midi.js:
    [snip]

    Well, dang. Foot indeed in mouth. A Midi file doesn't help you with the Web Midi API. Well, I guess it helps. But all the time deltas with the weird 7bit integer strings is gone. Instead you have to coordinate the timing with whatever tools JavaScript has to offer. I'm guessing it's same answer as for games and you have to use get_animation_frame() or whatever it's called. So I may return
    when it can actually play something and is ready for a UI.
    Any advice on doing the precise delays between sending events
    would be helpful. I can factor out my functions so I don't need
    to undo the weird delta integers. But I still will need to wait for the appropriate period before sending the next event.
    I did some elaborating and reorganizing. I'm not sure I did all the calculations right. But at least this is something to talk about.
    I think it's a little prettier now.

    The WebMidi API wants milliseconds for its delays. But the numbers
    I have are delta_ticks_per_quarter and milliseconds_per_quarter.
    And the number I actually want to input is beats_per_minute.
    So they all get multiplied and divided together somehow.

    My tree data structure might not be very appropriate for evolving towards having a UI. That part is hard so I didn't want to do it yet. At the very least, this presents "my understanding of MIDI" fwiw.
    I'm unsure about my decision to introduce time information only
    in the sequence() structure. It kinda makes sense but it's also
    very weird. Time always builds up in even multiples of a unit,
    but the user has to choose that unit. The rests are there to introduce pauses, but still it feels limiting but I don't see a better choice at the moment.

    //mymidi.js
    function note( name, octave ){
    return { 'type' : 'note',
    'pitch' : "CCDDEFFGGAAB".indexOf(name.slice(0,1))+
    (name.length == 2 ? (name.charAt(1)=='#'?1:-1): 0)+
    12*octave };
    }
    function drum( name ){
    return { 'type' : 'note',
    'pitch' : name };
    }

    function rest(){
    return { 'type' : 'rest' };
    }
    function choir( ...args ){
    return { 'type' : 'choir',
    'kids' : args };
    }

    function sequence( delta, ...args ){
    return { 'type' : 'seq',
    'delta': delta,
    'kids' : args };
    }

    function repeat( times, arg ){
    var delta = duration( arg );
    var buf = Array(times).fill(arg);
    return sequence( delta, buf... );
    }

    function channel( chan, arg ){
    return { 'type' : 'chan',
    'channel': chan,
    'kid' : arg };
    }

    function instrument( inst, arg ){
    return { 'type' : 'inst',
    'inst' : inst,
    'kid' : arg };
    }

    function time_sig( sig, arg ){
    return { 'type' : 'sig',
    'sig' : sig,
    'kid' : arg };
    }

    function tempo( mpq, arg ){
    return { 'type' : 'tempo',
    'mpq' : mpq,
    'kid' : arg };
    }


    const ac_bass_drum = 35,
    bass_drum = 36,
    side_stick = 37,
    ac_snare = 38,
    hand_clap = 39,
    el_snare = 40,
    low_fl_tom = 41,
    cl_hat = 42,
    hi_fl_tom = 43,
    ped_hat = 44,
    lo_tom = 45,
    op_hat = 46,
    lo_mid_tom = 47,
    hi_mid_tom = 48,
    crash = 49,
    hi_tom = 50,
    ride = 51,
    chinese_cymbal = 52,
    ride_bell = 53,
    tambourine = 54,
    splash = 55,
    cowbell = 56,
    crash2 = 57,
    vibraslap = 58,
    ride2 = 59,
    hi_bongo = 60,
    lo_bongo = 61,
    mute_hi_conga = 62,
    open_lo_conga = 63,
    lo_conga = 64,
    hi_timbale = 65,
    lo_timbale = 66,
    hi_agogo = 67,
    lo_agogo = 68,
    cabasa = 69,
    maracas = 70,
    short_whistle = 71,
    long_whistle = 72,
    short_guiro = 73,
    long_guiro = 74,
    claves = 75,
    hi_wood_block = 76,
    lo_wood_block = 77,
    mute_cuica = 78,
    open_cuica = 79,
    mute_triangle = 80,
    open_triangle = 81;
    const pianissimo = 0x33,
    piano = 0x44,
    mezzoforte = 0x55,
    forte = 0x66,
    fortissimo = 0x77;
    const note_off = 0x80,
    note_on = 0x90,
    key_after_touch = 0xA0,
    control_change = 0xB0,
    program_change = 0xC0,
    channel_after_touch = 0xD0,
    pitch_wheel_change = 0xE0,
    meta = 0xFF;
    function duration( notes ){
    if( notes.hasOwnProperty( 'delta' ) ){
    return notes.delta * notes.kids.length;
    } else if( notes.hasOwnProperty( 'kids' ) ){
    return duration( notes.kids[0] );
    } else if( notes.hasOwnProperty( 'kid' ){
    return duration( notes.kid );
    } else {
    return 1;
    }
    }

    function notes_to_events( notes, time = 0, duration = 1, channel = 0 ){
    var events = [];
    if( notes.type == 'rest' ){
    //do nothing right here
    } else if( notes.type == 'note' ){
    events.push( { 'cmd' : note_on,
    'channel' : channel,
    'pitch' : notes.pitch,
    'time' : time,
    'velocity' : piano } );
    events.push( { 'cmd' : note_off,
    'channel' : channel,
    'pitch' : notes.pitch,
    'time' : time + duration,
    'velocity' : piano } );
    } else if( notes.type == 'choir' ){
    notes.kids.forEach(
    note=> events.concat( notes_to_events( note, time, duration, channel ) )
    );
    } else if( notes.type == 'sequence' ){
    notes.kids.forEach(
    (note,idx)=>
    events.concat( notes_to_events( note, time+idx*notes.delta, notes.delta, channel ) )
    );
    } else if( notes.type == 'inst' ){
    events.push( { 'cmd' : program_change,
    'channel' : channel,
    'prog' : notes.inst,
    'time' : time } );
    events.concat( notes_to_events( notes.kid, time, duration, channel ) );
    } else if( notes.type == 'chan' ){
    events.concat( notes_to_events( notes.kid, time, duration, notes.channel ) );
    } else if( notes.type == 'sig' ){
    events.push( { 'cmd' : meta,
    'data' : String.fromCharCode( 0x58,
    0x04, notes.sig.n, notes.sig.d, notes.sig.c, notes.sig.b ),
    'time' : time } );
    events.concat( notes_to_events( notes.kid, time, duration, channel ) );
    } else if( notes.type == 'tempo' ){
    events.push( { 'cmd' : meta,
    'data' : String.fromCharCode( 0x51, 0x03 ) + bytes(3, notes.mpq ), 'mpq' : notes.mpq,
    'time' : time } );
    events.concat( notes_to_events( notes.kid, time, duration, channel ) );
    }
    return events;
    }

    function assemble( events ){
    events.sort( (x,y)=> x.time < y.time ? -1 : x.time == y.time ? 0 : 1 );
    var acc = '';
    var time = 0;
    events.forEach( function( event ){
    acc = acc + gen_delta( event.time - time );
    time = event.time;
    acc = acc + event_bytes( event );
    });
    return acc + String.fromCharCode( meta, 0x2F, 0x00 ); //End of track marker
    }

    function event_bytes( event ){
    return String.fromCharCode( event.cmd + (event.cmd != meta ? event.channel : 0) ) +
    ( event.cmd == note_on ? bytes(2, event.pitch) + bytes(2, event.velocity)
    : event.cmd == note_off ? bytes(2, event.pitch) + bytes(2, event.velocity)
    : event.cmd == program_change ? bytes(2, event.prog)
    : event.cmd == meta ? event.data
    : '' );
    }

    function bytes( w, num ){
    var acc = [];
    while( num > 255 ){
    acc.shift( num % 256 );
    num /= 256;
    }
    acc.shift( num );
    var buf = Array(w).fill(0).concat(acc);
    var buf2 = buf.slice( buf.length - w );
    return String.fromCharCode( buf2... );
    }

    function gen_delta( dtime ){
    var acc = [];
    while( dtime > 127 ){
    acc.shift( dtime % 128 );
    dtime /= 128;
    }
    acc.shift( dtime );
    var codes = acc.slice( 0, acc.length-1 ).map( x=>x+128 ).concat(acc.slice( acc.length-1 ));
    return String.fromCharCode( codes... );
    }
    // initialize midi output

    var midi = null;
    var output_port_id = null;
    function onMidiSuccess( midiAccess ){
    midi = midiAccess;
    for( var output in midi.outputs ){
    console.log( "Output port: [type:'" + output.type + "'] id:'" + output.id +
    "manufacturer:'" + output.manufacturer + "' name:'" + output.name +
    "' version:'" + output.version "'" );
    output_port_id = output.id;
    }
    }
    function onMidiFailure( msg ){
    console.log( "Failed to get MIDI Access -- " + msg );
    }
    navigator.requestMidiAccess().then( onMidiSuccess, onMidiFailure );



    function play( notes, delta_ticks_per_quarter ){
    var events = notes_to_events( notes );
    events.sort( (x,y)=> x.time < y.time ? -1 : x.time == y.time ? 0 : 1 );
    var mpq = events[ events.findIndex( e=> e.hasOwnProperty('mpq') ) ].mpq;
    var time = 0;
    var time_scale = mpq / delta_ticks_per_quarter; // milliseconds per tick
    var loop_overhead = 0;
    var output = midi.outputs.get(output_port_id);
    var real_time = window.performance.now();
    events.forEach( function( event ){
    if( event.time == time ){
    output.send( Array.from( event_bytes( event ) ) );
    } else {
    output.send( Array.from( event_bytes( event ) ),
    real_time + (event.time - time) * time_scale );
    time += loop_overhead;
    }
    });
    }

    const gen_header = (fmt,tracks,delta_ticks_per_quarter) =>
    'MThd' + bytes(4,6) + bytes(2,fmt) + bytes(2,tracks) + bytes(2,delta_ticks_per_quarter);
    const format_single = 0,
    format_multi_sync = 1,
    format_multi_async = 2;

    const gen_track_header = (length) =>
    'MTrk' + bytes(4,length);

    function gen_midi_file( notes, delta_ticks_per_quarter ){
    var events = assemble( notes_to_events( notes ) );
    return gen_header( format_single, 1, delta_ticks_per_quarter )
    + gen_track_header( events.length )
    + events;
    }
    //bpm -> mpq
    function bpm( beats ){
    return beats * 1000.0 / 60.0;
    }

    // time_sig:: 'n':: numerator
    // 'd':: denominator
    // 'c':: "number of ticks in metronome click"
    // 'b':: "how many 32nd notes in a quarter note?"
    //q 8 q 12 q 12 q 18
    //e e e e e e e e e e
    //s s s s s s s s s s s s s s s s s s s s s s s s s
    //tttttttt tttttttttttt tttttttttttt tttttttttttttttttt

    //usage

    var q = 120,
    e = 60,
    s = 30;

    var riff = sequence( s, choir(note('A',4),note('C',5),note('E',5)), note('A',3),
    choir(note('A',4),note('C',5),note('E',5)),
    rest() );
    var piano_riff = channel( 0,
    instrument( 1,
    repeat( 4,
    riff ) ) );
    var drum_pat = channel( 9,
    choir( sequence( e, drum(cl_hat), drum(cl_hat), drum(cl_hat), drum(cl_hat),
    drum(cl_hat), drum(cl_hat), drum(cl_hat), drum(op_hat) ),
    sequence( q, drum(bass_drum), drum(ac_snare),
    drum(bass_drum), drum(ac_snare) ) ) );
    var song = tempo( bpm(30),
    time_sig( {'n':4,'d':4,'c':q,'b':8},
    choir( piano_riff, drum_pat ) ) );

    var midi_file = gen_midi_file( song, q );
    play( song, q );
    Yeah that is what is weird with midi, the shortest event distance i"n ms", in a song is a result depending on PPQ and BPM.
    Lets say 120 beats per minute BPM and 24 parts per quarter note "PPQ" First 60/120=0.5 sec per beat
    Then 0.5sec / 24 ticks = 0.02083333333 sec/tick

    Thats it.
    So then you calculate bar lets say 4/4
    1 beat was 0.5
    Then follow one bar is 4*0.5=2 sec

    You said you wanted 32 parts per full note "that is a bar of 2 sec length above"
    we calculate 2/32 =0.0625 " that is a 32 part per full note, of length 0.0625 seconds.
    By the way go with foot in mouth, it seem to be the superior format telative to midi and easier.
    Ok so lets do the last step with i find idiotic and to not do, lets quantise the 32th note into ppq

    0.0625 /0.02083333333=3.00000000048
    That is 3 ticks per 32th note in 24 PPQ and 120 BPM
    So now Julio our proud PPQ embracer can feel proud over himself quantized music unrecognizable.

    Lets just say it has historical reasons that had to do with processor speeds, and is obsolete just like Julio LoL

    --- SoupGate-Win32 v1.05
    * Origin: fsxNet Usenet Gateway (21:1/5)
  • From Julio Di Egidio@21:1/5 to luser...@gmail.com on Tue Nov 30 23:54:18 2021
    On Tuesday, 30 November 2021 at 06:53:06 UTC+1, luser...@gmail.com wrote:
    On Sunday, November 28, 2021 at 5:21:56 AM UTC-6, ju...@diegidio.name wrote:
    On 28/11/2021 04:59, luserdroog wrote:
    On Saturday, November 27, 2021 at 5:12:02 PM UTC-6, luserdroog wrote:
    I've been making grandiose comments and hyperboles about
    structure and abstraction and whatnot without putting up a lot
    code. And admittedly, I'm not super experienced in javascript
    per se compared to most of the regs here.
    FYI, encapsulating logic in functions and, along the same line, avoiding shared state to the full extent possible, while already way better than anything Jonas keeps writing, still falls short of any actual code structuring proper (and, all the more so of any actual and explicitly functional use of JS). Indeed, code structuring, as the bottom line of software design, is not per se primarily nor immediately a matter of the specific language.

    That said, maybe keep also in mind that you are hardly going to see or
    do any significant structuring of code unless you get and go beyond the
    500 (maybe even the 1000, YMMV) lines of code.
    Well, dang. Foot indeed in mouth. A Midi file doesn't help you with
    the Web Midi API.
    Unless I have missed it, you have yet to say what requirement you are trying to implement exactly. Building a midi file in memory to simply
    play it in the browser? What's difficult with that? Or rather playing
    midi?

    If it is playing midi, I think the approach has to be the exact opposite that you guys have shown so far: it's not the notes that should drive
    the loop, it's time itself (your metronome) that should tick at a
    certain frequency, call it your "resolution frequency" (in practice, ticking the 32nds or the 64ths, depending on how precise vs fast you
    manage to make it), then at certain ticks, i.e. as and when needed, you would send out midi events.

    And the first iteration would be building such "metronome" and its loop
    and making it tick with enough accuracy and not accumulating drift (a
    least within some reasonable range of metronomic speeds, aka BPMs)...

    All true. I have purposefully been vague about the requirements for the program because I've been focused entirely on just typing some code
    rather than bad poetry or bibliographic entries from the 60s and 70s.

    I do kind of want to play the midi data in the browser. I somehow thought
    the api would supply more out of the box. But it looks like it does provide more than I first suspected.

    In the spec, example 9.5 https://www.w3.org/TR/2015/WD-webmidi-20150317/#sending-midi-messages-to-an-output-device

    function sendMiddleC( midiAccess, portID ) {
    var noteOnMessage = [0x90, 60, 0x7f]; // note on, middle C, full velocity
    var output = midiAccess.outputs.get(portID);
    output.send( noteOnMessage ); //omitting the timestamp means send immediately.
    output.send( [0x80, 60, 0x40], window.performance.now() + 1000.0 ); // Inlined array creation- note off, middle C,
    // release velocity = 64, timestamp = now + 1000ms.
    }

    So you can just pass a timestamp along with the midi bytes. Just need
    to scale my ticks value to milliseconds.

    But even that, it seems is only half the battle. Because then you also
    need to set up a sound module on the other end of the midi port
    to interpret the events. Either a synthesizer or sample player.

    When Dunning-Kruger is a compliment: not just a case of utter cluelessness, you insincere retarded cunt and yet another spammer...

    ESAD.

    *Plonk*

    Julio

    --- SoupGate-Win32 v1.05
    * Origin: fsxNet Usenet Gateway (21:1/5)
  • From luserdroog@21:1/5 to ju...@diegidio.name on Thu Dec 2 21:16:44 2021
    On Wednesday, December 1, 2021 at 1:54:24 AM UTC-6, ju...@diegidio.name wrote:
    On Tuesday, 30 November 2021 at 06:53:06 UTC+1, luser...@gmail.com wrote:
    On Sunday, November 28, 2021 at 5:21:56 AM UTC-6, ju...@diegidio.name wrote:
    On 28/11/2021 04:59, luserdroog wrote:
    On Saturday, November 27, 2021 at 5:12:02 PM UTC-6, luserdroog wrote:
    I've been making grandiose comments and hyperboles about
    structure and abstraction and whatnot without putting up a lot
    code. And admittedly, I'm not super experienced in javascript
    per se compared to most of the regs here.
    [snip]

    *Plonk*


    Okiefenokee, here's a draft of an actual interactive piano-roll UI.
    My data structure is pretty stupid. It should just be an array of
    lists (ie. arrays) of pitches. But it's all clickable. And there's no need
    to find anything based on an x-coordinate because all the clickable
    elements have their click handlers built with the location known.

    So, I think using a <table> is good strategy. Going vertically seems
    much easier and more natural than trying to do horizontal scrolling.
    I suppose this program provides the context for my other thread about
    what to do with the Controller. I'm not really using all the features of
    the View, either, I think.

    It's just 2 files. fim.html with the style sheet embedded. And fim.js with
    the MVC classes and a fancy iterator I found with a google search.
    Critique me please, my droogies.

    fim.html:
    <!DOCTYPE html>
    <!-- fim.html -->
    <html lang="en">
    <head>
    <meta charset="utf-8" />
    <title>FIM sequencer</title>
    <style>
    .pianoroll table {
    table-layout: fixed;
    border-collapse: collapse;
    border: 1px solid;
    cursor: pointer;
    }
    .pianoroll th { border: 1.5px solid; }
    .pianoroll td { border: 1px solid; }
    .heavy { border: 2.5px solid; }
    .black {
    background-color: black;
    color: black;
    }
    .controlpanel ul {
    list-style-type: none;
    display: inline-flex;
    justify-content: center;
    padding: 0;
    margin: 0;
    }
    .controlpanel li {
    padding: 5px;
    }
    #debug { display: none; }
    </style>
    </head>
    <body>
    <div class=pianoroll> <table></table> </div>
    <div class=controlpanel> <ul>
    <li><button id=addrow>Add row</button>
    <li><button id=play>Play</button>
    <li><button id=save>Save</button>
    </ul> </div>
    <div id=debug> </div>
    </body>
    <script src="fim.js"></script>
    </html>


    fim.js:
    // fim.js

    const attach = (x,y,z)=> x.addEventListener( y, z );
    const qs = (sel,ctx)=> (ctx || document).querySelector( sel );
    const qsa = (sel,ctx)=> Array.from( (ctx || document).querySelectorAll( sel ) );
    const debug = str=> qs("#debug").innerHTML = str;

    class Model {
    constructor(){
    this._observers = [];
    }
    observe( observer ){ this._observers.push( observer ) }
    unobserve( observer ){ this._observers=this._observers.filter(o=>o!==observer); }
    notify( data ){ this._observers.forEach(o=>o.update(data)); }
    set value( value ){
    this._value = value;
    this.onChange( value );
    this.notify( value );
    }
    get value(){
    return this._value;
    }
    onChange( value ){ }
    }

    class View {
    constructor( model ){
    if( model ) this.setModel( model );
    this._subViews = [];
    }
    update( data ){
    this.onUpdate( data );
    this._subViews.forEach(s=>s.update( data ));
    }
    getModel(){ return this._model; }
    setModel( model ){ this._setModelAndController( model, this._controller ); }
    getDefaultController(){ return new Controller(); }
    getController( controller ){ if( !this._controller )
    this.setController( this.getDefaultController() );
    return this._controller; }
    setController( controller ){ this._setModelAndController( this._model, controller ); }
    _setModelAndController( model, controller ){
    if( this._model !== model ){
    if( this._model ) this._model.unobserve( this );
    if( model ) model.observe( this );
    this._model = model;
    }
    if( controller ){ controller.setView( this ); controller.setModel( model ); }
    this._controller = controller;
    }
    getSubViews(){ return new Array( this._subViews ); }
    addSubView( subView ){
    var prev = subView.getSuperView();
    if( prev ) prev.removeSubView( subView );
    this._subViews.push( subView );
    subView.setSuperView( this );
    }
    removeSubView( subView ){
    this._subViews=this._subViews.filter(s=>{if(s===subView)s.setSuperView(null);
    return s !== subView;}); }
    setSuperView( superView ){ this._superView = superView; }
    destroy(){
    if( this._model ) this._model.unobserve( this );
    this._subViews.forEach(s=>s.destroy());
    }
    setHtmlElement( element ){ this._htmlElement = element; }
    onUpdate( data ){ }
    find( sel ){ return qs( sel, this._htmlElement ); }
    findAll( sel ){ return qsa( sel, this._htmlElement ); }
    show( element ){ (element || this._htmlElement).style.display = ''; }
    hide( element ){ (element || this._htmlElement).style.display = 'none'; }
    }

    class Controller {
    getModel(){ return this._model; }
    setModel( model ){ this._model = model; }
    getView(){ return this._view; }
    setView( view ){ this._view = view; }
    }

    function* makeRangeIterator(start = 0, end = Infinity, step = 1) {
    let iterationCount = 0;
    for (let i = start; i < end; i += step) {
    iterationCount++;
    yield i;
    }
    return iterationCount;
    }


    const octaves = 3;

    var notes = new Model();
    var pianoroll = new View();
    var control = new Controller();

    pianoroll.setHtmlElement( qs('.pianoroll') );
    pianoroll.setController( control );
    pianoroll.setModel( notes );

    attach(qs('#play'),'click', e=>notes.play() );
    attach(qs('#save'),'click', e=>notes.save() );
    attach(qs('#addrow'),'click', e=>notes.addrow() );
    attach(document,'keyup', e=>key( e.key ) );
    function key( k ){ if( k == 'p' ) notes.play();
    else if( k == 's' ) notes.save();
    else if( k == ' ' ) notes.addrow();
    return true; }

    notes.play = function(){
    debug("play");
    }

    notes.save = function(){
    debug("save");
    }

    notes.addrow = function(){
    debug("add row");
    var v = this.value;
    v.push( {'type':'rest'} );
    this.value = v;
    }

    notes.addnote = function( index ){
    debug("add note");
    var v = this.value;
    v.push( {'type':'note', 'pitch':index} );
    this.value = v;
    }

    notes.toggle = function( time, pitch ){
    debug("toggle");
    var v = this.value;
    var row = v[time];
    if( row.type == 'rest' ){
    row = {'type':'note', 'pitch':pitch};
    } else if( row.type == 'note' ){
    if( row.pitch == pitch ){
    row = {'type':'rest'};
    } else {
    row = {'type':'notes', 'pitches':[row.pitch,pitch]};
    }
    } else if( row.type == 'notes' ){
    if( row.pitches.findIndex( p=>p==pitch ) != -1 ){
    row.pitches = row.pitches.filter( p=>p!=pitch );
    } else {
    row.pitches.push( pitch );
    }
    }
    v[time] = row;
    this.value = v;
    }

    pianoroll.onUpdate = function( data ){
    var table = this._htmlElement.children.item(0);
    table.remove();

    table = document.createElement("table");
    data.forEach( (e,i)=>{
    if( e.type == 'rest' ) add_blank_row( table, i );
    if( e.type == 'note' ) add_note_row( table, i, e.pitch );
    if( e.type == 'notes' ) add_notes_row( table, i, e.pitches );
    } );
    add_table_header( table );
    this._htmlElement.appendChild( table );
    }

    function add_table_header( table ){
    var head = document.createElement("tr");
    var indices = makeRangeIterator();
    for( var i = 0; i < octaves; i++ ){
    add_octave( head, indices );
    }
    head.className = 'heavy';
    table.appendChild( head );
    }

    function add_octave( head, indices ){
    head.appendChild( white( indices.next().value ) );
    head.appendChild( black( white( indices.next().value ) ) );
    head.appendChild( white( indices.next().value ) );
    head.appendChild( black( white( indices.next().value ) ) );
    head.appendChild( white( indices.next().value ) );
    head.appendChild( white( indices.next().value ) );
    head.appendChild( black( white( indices.next().value ) ) );
    head.appendChild( white( indices.next().value ) );
    head.appendChild( black( white( indices.next().value ) ) );
    head.appendChild( white( indices.next().value ) );
    head.appendChild( black( white( indices.next().value ) ) );
    head.appendChild( white( indices.next().value ) );
    }

    function click_addnote( index, thing ){
    return link( ()=>notes.addnote( index ), thing );
    }

    function click_toggle( row, index, thing ){
    return link( ()=>notes.toggle( row, index ), thing );
    }

    function link( doit, thing ){
    var a = document.createElement("a");
    a.onclick = doit;
    a.appendChild( thing );
    return a;
    }

    function add_note_row( table, index, pitch ){
    var tr = document.createElement("tr");
    var i = 0;
    for( ; i < pitch; i++ )
    tr.appendChild( blank( index, i ) );
    tr.appendChild( black( blank( index, i ) ) ), i++;
    for( ; i < octaves*12; i++ )
    tr.appendChild( blank( index, i ) );
    table.appendChild( tr );
    }

    function add_notes_row( table, index, pitches ){
    pitches.sort( (x,y)=>x<y?-1:x==y?0:1 );
    var tr = document.createElement("tr");
    var i = 0;
    pitches.forEach( p=>{
    for( ; i < p; i++ )
    tr.appendChild( blank( index, i ) );
    tr.appendChild( black( blank( index, i ) ) ), i++;
    });
    for( ; i < octaves*12; i++ )
    tr.appendChild( blank( index, i ) );
    table.appendChild( tr );
    }

    function add_blank_row( table, index ){
    var tr = document.createElement("tr");
    for( var i = 0; i < octaves*12; i++ )
    tr.appendChild( blank( index, i ) );
    table.appendChild( tr );
    }

    function black( thing ){
    thing.className = 'black';
    return thing;
    }

    function white( index ){
    var cell = document.createTextNode("\u00A0");
    var th = document.createElement("th");
    th.appendChild( click_addnote( index, cell ) );
    return th;
    }

    function blank( row, index ){
    var td = document.createElement("td");
    var cell = document.createTextNode("\u00A0");
    td.appendChild( click_toggle( row, index, cell ) );
    return td;
    }

    notes.value = []; // this line draws the keyboard

    --- SoupGate-Win32 v1.05
    * Origin: fsxNet Usenet Gateway (21:1/5)