How to use quantized TensorFlow Lite files on Bangle?

Posted on
Page
of 2
Prev
/ 2
  • Mon 2021.10.04

    Nicely done @ThomasVikström  !


    psst! . . . Hey @Gordon me thinks you have found your new tutorial writer!!   wink, wink ;-)

  • Just an update to this for other AI & ML nerds like me. With Bangle.js2 I'm now able to spell the English alphabet in the air by "drawing" characters in the air (CAPS only + space and backspace, left out W). If you want to see it in action, check a short video I uploaded to my LinkedIn account, the link should work for anyone.

    The main changes I've done compared to the tutorial I wrote, is putting below code snippet in the beginning of the programs used to collect the gestures and to recognise them. The code sets the sensitivity for starting and ending the gestures. Depending on your use case, you might want to tweak the settings accordingly.

    Bangle.setOptions({gestureEndThresh: Math.pow(700, 2), gestureStartThresh: Math.pow(600,2), gestureInactiveCount: 6, gestureMinLength: 15});
    
    

    If you want to connect your Bangle to e.g. a computer, and write in any application, the below code is what I uploaded to Bangle. I'm sure it could be optimized and cleaned (and commented!), but as a proof of concept, it works good enough for me. Feel free to provide improvements though, that way I'll learn JS better myself.

    var storage = require('Storage');
    var kb = require("ble_hid_keyboard");
    NRF.setServices(undefined, { hid : kb.report });
    
    const settings = storage.readJSON('setting.json',1) || { HID: false };
    
    var sendHid, next, prev, toggle, up, down, profile;
    var time_on_screen = 500;
    
    Bangle.setOptions({gestureEndThresh: Math.pow(700, 2), gestureStartThresh: Math.pow(700,2), gestureInactiveCount: 6, gestureMinLength: 15});
    
    
    if (settings.HID=="kb" || settings.HID=="kbmedia") {
      profile = 'Keyboard';
      if (settings.HID=="kbmedia") {
        sendHid = function (code, cb) {
          try {
            NRF.sendHIDReport([2,0,0,code,0,0,0,0,0]­, () => {
              NRF.sendHIDReport([2,0,0,0,0,0,0,0,0], () => {
                if (cb) cb();
              });
            });
          } catch(e) {
            print(e);
          }
        };
      } else {
        sendHid = function (code, cb) {
          try {
            NRF.sendHIDReport([0,0,code,0,0,0,0,0], () => {
              NRF.sendHIDReport([0,0,0,0,0,0,0,0], () => {
                if (cb) cb();
              });
            });
          } catch(e) {
            print(e);
          }
        };
      }
      next = function (cb) { sendHid(0x4f, cb); };
      prev = function (cb) { sendHid(0x50, cb); };
      toggle = function (cb) { sendHid(0x2c, cb); };
      up = function (cb) {sendHid(0x52, cb); };
      down = function (cb) { sendHid(0x51, cb); };
    } else {
      E.showPrompt("Enable HID?",{title:"HID disabled"}).then(function(enable) {
        if (enable) {
          settings.HID = "kb";
          require("Storage").write('setting.json',­ settings);
          setTimeout(load, 1000, "hidkbd.app.js");
        } else setTimeout(load, 1000);
      });
    }
    
    function drawApp() {
      g.clear();
      g.setFont("6x8",2);
      g.setFontAlign(0,0);
      g.drawString(profile, 120, 120);
      const d = g.getWidth() - 18;
    
      function c(a) {
        return {
          width: 8,
          height: a.length,
          bpp: 1,
          buffer: (new Uint8Array(a)).buffer
        };
      }
    
      g.drawImage(c([16,56,124,254,16,16,16,16­]),d,40);
      g.drawImage(c([16,16,16,16,254,124,56,16­]),d,194);
      g.drawImage(c([0,8,12,14,255,14,12,8]),d­,116);
    }
    
    if (next) {
      Bangle.on('aiGesture', (v) => {
        E.showMessage(v);
        switch (v) {
          case 'A':
            kb.tap(kb.KEY.A, 0);
    //        next(() => {});
            break;
          case 'B':
            kb.tap(kb.KEY.B, 0);
    //        next(() => {});
            break;
          case 'BACKSPACE':
            kb.tap(kb.KEY.BACKSPACE, 0);
            break;
          case 'C':
            kb.tap(kb.KEY.C, 0);
    //        prev(() => {});
            break;
          case 'D':
            kb.tap(kb.KEY.D, 0);
            break;
          case 'E':
            kb.tap(kb.KEY.E, 0);
            break;
          case 'F':
            kb.tap(kb.KEY.F, 0);
            break;
          case 'G':
            kb.tap(kb.KEY.G, 0);
            break;
          case 'H':
            kb.tap(kb.KEY.H, 0);
            break;
          case 'I':
            kb.tap(kb.KEY.I, 0);
            break;
          case 'J':
            kb.tap(kb.KEY.J, 0);
            break;
          case 'K':
            kb.tap(kb.KEY.K, 0);
            break;
          case 'L':
            kb.tap(kb.KEY.L, 0);
            break;
          case 'M':
            kb.tap(kb.KEY.M, 0);
            break;
          case 'N':
            kb.tap(kb.KEY.N, 0);
            break;
          case 'O':
            kb.tap(kb.KEY.O, 0);
            break;
          case 'P':
            kb.tap(kb.KEY.P, 0);
            break;
          case 'Q':
            kb.tap(kb.KEY.Q, 0);
            break;
          case 'R':
            kb.tap(kb.KEY.R, 0);
            break;
          case 'S':
            kb.tap(kb.KEY.S, 0);
            break;
          case 'SPACE':
            kb.tap(kb.KEY[" "], 2);
            break;
          case 'T':
            kb.tap(kb.KEY.T, 0);
            break;
          case 'U':
            kb.tap(kb.KEY.U, 0);
            break;
          case 'V':
            kb.tap(kb.KEY.V, 0);
            break;
          case 'X':
            kb.tap(kb.KEY.X, 0);
            break;
          case 'Y':
            kb.tap(kb.KEY.Y, 0);
            break;
          case 'Z':
            kb.tap(kb.KEY.Z, 0);
            break;
        }
        setTimeout(drawApp, time_on_screen);
      });
    
      setWatch(function(e) {
        var len = e.time - e.lastTime;
        if (len > 0.3 && len < 0.9) {
          E.showMessage('prev');
          setTimeout(drawApp, 1000);
          prev(() => {});
        } else {
          E.showMessage('up');
          setTimeout(drawApp, 1000);
          up(() => {});
        }
      }, BTN1, { edge:"falling",repeat:true,debounce:50})­;
    
      setWatch(function(e) {
        var len = e.time - e.lastTime;
        if (len > 0.3 && len < 0.9) {
          E.showMessage('next');
          setTimeout(drawApp, 1000);
          next(() => {});
        } else {
          E.showMessage('down');
          setTimeout(drawApp, 1000);
          down(() => {});
        }
      }, BTN3, { edge:"falling",repeat:true,debounce:50})­;
    
      setWatch(function(e) {
        E.showMessage('toggle');
        setTimeout(drawApp, 1000);
        toggle();
      }, BTN2, { edge:"falling",repeat:true,debounce:50})­;
    
      drawApp();
    }
    
  • That is really cool!
    I won't lie though, I don't think I will be replacing my keyboard any time soon.

    How about a Bangle on each arm and using semaphore? :D.

  • Thx! Somehow it's not replacing my keyboard either :D

    Right before Christmas I actually put a Bangle on my ankle and taught it to recognize if I had shoes or socks on my feet when walking. While the accuracy was not great, it was better than random. I guess by tweaking the gesture sensitivity, using other type of shoes, and collecting lots more data, the accuracy would've been better. And no, I don't need AI to tell me what I'm wearing, HI (Human Intelligence) is good enough for that :)
    So far Bangle & AI has for me mainly been a solution looking for a problem to solve, but you gotta start somewhere.

  • Wow! Looks amazing!
    And the video demonstration turned out to be very epic, I watched it several times and I want more :)

  • Speaking of keyboards, here is an idea for you. A friend and I discussed that a Bangle could be trained as a keylogger using ML and accelerometer data. At least one hand, although we think touch typing two hands might even work. What are your thoughts?

  • Thx @Serj !
    Well, let's see what's next, no promises, what I have in mind will in worst case not include any Bangle or Puck, unless I can sneak them in somehow in the equation. Not revealing anything more for now, don't have a clue if I even can get the human interface and tech to work together :-)

  • Not sure I understand, do you mean that Bangle could be trained to recognise what I'm typing on a keyboard? And this could be used later when I'm typing on e.g. a table surface, and Bangle connected to a device without external keyboard, like a phone?
    At least when I'm touch typing with two hands, my wrists are resting on a surface and sometimes not moving at all, hence any accelerometer readings are infinitesimal and most probably unusable. Using one hand, and one finger, it might work as long as you "calibrate", i.e. know which x and y positions you are starting from. Could be doable even without AI.

    Or perhaps you mean something else...?

  • I must type differently. My wrists hover like I am playing a piano when I type. It is what I meant.

  • And you use all fingers, not only index fingers? If using all fingers, how would it be possible to know what finger you pressed a key with, even if you are moving your wrists?

  • Yes, I use all my fingers. My assumption is that I use a different amount of force and the distance to each key differs based on the position of the key and which finger I am using. Maybe it is ridiculous though, it was just an idea I discussed with a friend over coffee, I haven't actually looked at the accelerometer data when I am typing.

  • Ok, then I understand your train of thought!
    If I would use a very old (but very reliable) IBM keyboard I would probably need more force, compared with my current not so great MS Surface Pro 4 keyboard where keys move only 1 mm or so. With my way of typing, I'm however doubting the accuracy would be much better than randomly right, of course the Bangle/Bangles would need to be very tight to reduce any slipping.
    Interesting idea anyway, I'll put it behind my ear.

    PS Similar solutions can be found by searching for make any surface a keyboard, e.g.:
    https://www.youtube.com/watch?v=cQqgm7PG­SXA&ab_channel=Akshay


    https://mashable.com/article/wearable-su­rface-keyboard

  • This looks amazing - thanks for posting up!

    And this is all still done with Edge Impulse? It's really impressive - I always felt that the Tensorflow stuff was a bit of a steep learning curve and a bit painful to train, and it seems like Edge Impulse has really made massive strides there.

  • Thx @Gordon !
    Yes, the machine learning part is completely done with Edge Impulse, and I agree, it's an impressive platform, and for many use cases the time limit of 20 minutes (for the free tier) is more than enough. You can also do inference on a mobile, e.g. with camera/images, audio, or accelerometer.
    I also tried to learn Tensorflow some years back, but got stuck on too many details here and there in Python, so Edge Impulse and other platforms hiding the inner parts of the engine help bringing ML "to the crowds".
    For ML nerds: Other platforms I've found are Google's Teachable Machine (browser based) and Microsoft's Lobe.Ai (client), those are a bit quicker to get started with if you are only interested in ML with images or audio.

  • You wanted more :-)

    While I've not been able (or even tried) to add any Espruino device to what I've been working on, this was published yesterday. In the video

    and tutorial I'm showing and explaining how to use an EEG-headset and ML (Machine Learning) to control a very simplistic Pong-game. Next related projects are already under work, so if interested, stay tuned :-)

  • Thank you, Sir, THIS made my day!

    I still remember the previous video that I associate with AI on the BJS watch , kudos!
    By the way, if it doesn't bother you, please add that previous video to your YouTube channel, it's most convenient to subscribe and watch there!

    Also, such works inspire to invest time and energy in open-source!

    Project Idea: Virtual Mouse based on BJS watch + TensorFlow. Thus, the cursor and buttons will be controlled by moving an empty hand on the table or in the air, simulating mouse movement :)

    Cheers!

  • Thanks @Serj! I've added the previous video to my YouTube channel, somehow it had escaped me to do it earlier.
    Your project idea sounds interesting! Moving the mouse cursor might very well work better without ML, just by using accelerometer data. I have a vague memory that someone has done something similar. Simulating mouse button(s) might be trickier without ML, so a blended approach for this might be best.
    I'm next spring teaching basics in Machine Learning at university level, and this project might be something to at least partially include in the curriculum. My university purchased 10 Bangles for teaching purposes, so I could ask some students to use the watches for collecting data. In addition we have 10 Arduino ML kits + other miscellaneous hardware to use.

  • Great! Interesting details! I hope the students enjoy it 👍
    Now there are almost no AI-based projects in the repository with apps :(
    https://banglejs.com/apps/
    Hope the situation gets better!

    By the way, do you have a practice of recording lectures on video? Or is it prohibited? It would be great to listen to upcoming lectures on AI in combination with technologies such as TensorFlow Lite, BJS watch, etc 👍

  • If the lectures would be virtual (they aren't), recording and sharing publicly might be possible as long as the student's names or faces are not visible. But in my case it will be a blend of F2F workshops with some theory in between so no chance of recording.

  • That looks great! Thanks again for all your work on the Edge Impulse stuff (it just popped up again on my twitter feed)

    Last week I was at Nodeconf and they had a workshop on Tensorflow.js, and it also mentioned https://teachablemachine.withgoogle.com

    If some of you haven't seen this, give it a go (especially the image version at https://teachablemachine.withgoogle.com/­train/image) - it's insane how quickly you can teach it to do useful things with the webcam.

    I did some tutorials for a workshop, and very quickly added one on using it and Bangle.js to detect you picking your nose and to buzz a Bangle.js to stop you: https://github.com/gfwilliams/workshop-n­odeconfeu2022/blob/main/tensorflow.md

    I'll bring the other tutorials there onto the main Espruino website soon as well: https://github.com/gfwilliams/workshop-n­odeconfeu2022

  • Thx Gordon, I learned a lot myself in the project!
    I've earlier tried Teachablemachine and Lobe.ai and the learning curve is very shallow with this type of tools. Need to try the nose picking idea, it's a bit more fun than just having some program checking if you are wearing a mask or not.

  • @Serj you might've seen the newest AI-related video in my YouTube channel? I try to keep you entertained :-D

    Next AI project might involve a Bangle.js in case I'm able to communicate through Bluetooth with Nicla Sense ME as asked about here. I'm still a complete newbie in this area, so don't know how easy or difficult it is, but as soon as the Nicla arrives I'll start looking into it more.

    https://youtu.be/OwcoYQL4VEU

  • Great! I especially liked the possibility of typing, it turns out that you can combine this system with a browser and surf the Internet (f.e. combine with Vimium plugin)?
    Theoretically, it is possible to make a device with affordable means that will help, say, paralyzed people or people with injured hands.

    Then you can dream up and combine it with Espruino, and things from the real world, for example, control a quadcopter. But that's just a thought =)
    Yes, it would be great to see some new AI project on the BJS watch!

    By the way, I saw a new video on the forum earlier, because notifications of new messages come to my mail. However, everything is at hand on YouTube, in the future it will be possible to quickly find any video =)

  • Theoretically, it is possible to make a device with affordable means that will help, say, paralyzed people or people with injured hands.
    Yes, it has been done, at least with professional/clinical EEG-devices, not sure about consumer-based devices, but haven't researched it. I'll try to improve the ML-model with more data and "better" data through some bio-feedback mechanism, and once that's done I'll see what I'll invent :-)

    But long before that I'll try to sneak in BJS communicating with a Arduino device through Bluetooth, AI will play a part as well.

  • Post a reply
    • Bold
    • Italics
    • Link
    • Image
    • List
    • Quote
    • code
    • Preview
About

How to use quantized TensorFlow Lite files on Bangle?

Posted by Avatar for ThomasVikström @ThomasVikström

Actions