summaryrefslogtreecommitdiff
path: root/client
diff options
context:
space:
mode:
authorJules Laplace <jules@okfoc.us>2017-04-27 15:39:35 -0400
committerJules Laplace <jules@okfoc.us>2017-04-27 15:39:35 -0400
commit52596435d4839ef4026db8441598b32c3fd83c5c (patch)
treedfbd56021a274961f63e974195f69bd98e21aab2 /client
parent2761ba1231686407af2e11786e29e84c69b0183f (diff)
make it work mobile
Diffstat (limited to 'client')
-rw-r--r--client/index.js110
-rw-r--r--client/lib/startAudioContext.js181
-rw-r--r--client/lib/util.js60
3 files changed, 311 insertions, 40 deletions
diff --git a/client/index.js b/client/index.js
index 0a0eed1..276ed2a 100644
--- a/client/index.js
+++ b/client/index.js
@@ -1,6 +1,7 @@
import keys from './lib/keys'
import color from './lib/color'
import kalimba from './lib/kalimba'
+import { browser, requestAudioContext } from './lib/util'
const root = 440
const s = 50
@@ -14,56 +15,87 @@ const add_off = 0.1
const mul_off = 0.9
let dragging = false
+let lastFreq = 0
+let notes = []
-for (var i = 0; i < ws; i++) {
- for (var j = 0; j < hs; j++) {
- add(i, j)
- }
-}
+requestAudioContext( () => {
+ for (var i = 0; i < ws; i++) {
+ notes[i] = []
+ for (var j = 0; j < hs; j++) {
+ notes[i][j] = add(i, j)
+ }
+ }
+})
function add (x, y) {
const i = x + 1
- const j = y + 1
+ const j = y + 1
const div = document.createElement('div')
- const freq = root * i/j
- let add = 0
- let frac = Math.log2(i/j) % 1
- div.style.left = (x * s) + 'px'
- div.style.top = (y * s) + 'px'
- div.innerHTML = `<div>${i}<\/div><div>\/</div><div>${j}<\/div>`
- if (frac < 0) {
- frac += 1
- console.log(frac)
- }
- if (i < j) {
+ const freq = root * i/j
+ let add = 0
+ let frac = Math.log2(i/j) % 1
+ div.style.left = (x * s) + 'px'
+ div.style.top = (y * s) + 'px'
+ div.innerHTML = `<div>${i}<\/div><div>\/</div><div>${j}<\/div>`
+ if (frac < 0) {
+ frac += 1
+ }
+ if (i < j) {
add = -Math.log(j/i) / 3.5
- }
- else {
+ }
+ else {
add = Math.log(i/j) / 6
+ }
+ if ( frac === 0) {
+ div.style.fontWeight = '900'
+ div.style.left = (x * s) + 'px'
+ div.style.top = (y * s) + 'px'
+ }
+ div.style.backgroundColor = color(frac, add_off + add, mul_off)
+
+ if (browser.isDesktop) {
+ div.addEventListener('mouseenter', function(){
+ div.style.backgroundColor = color(frac, add + add_on, mul_on)
+ if (dragging) {
+ kalimba.play( freq )
+ }
+ })
+ div.addEventListener('mouseleave', function(){
+ div.style.backgroundColor = color(frac, add + add_off, mul_off)
+ })
+ div.addEventListener('click', function(){
+ kalimba.play( freq )
+ })
}
- if ( frac === 0) {
- div.style.fontWeight = '900'
- div.style.left = (x * s) + 'px'
- div.style.top = (y * s) + 'px'
+ else {
+ div.addEventListener('touchstart', function(){
+ kalimba.play( freq )
+ lastFreq = freq
+ })
}
- div.style.backgroundColor = color(frac, add_off + add, mul_off)
- div.addEventListener('mouseenter', function(){
- div.style.backgroundColor = color(frac, add + add_on, mul_on)
- if (dragging) {
- kalimba.play( freq )
- }
- })
- div.addEventListener('mouseleave', function(){
- div.style.backgroundColor = color(frac, add + add_off, mul_off)
- })
- div.addEventListener('click', function(){
- kalimba.play( freq )
- })
- document.body.appendChild(div)
+ document.body.appendChild(div)
+ return freq
}
-document.addEventListener('mousedown', () => { dragging = true })
-document.addEventListener('mouseup', () => { dragging = false })
+if (browser.isDesktop) {
+ document.addEventListener('mousedown', () => { dragging = true })
+ document.addEventListener('mouseup', () => { dragging = false })
+}
+else {
+ document.addEventListener('touchstart', () => { dragging = true })
+ document.addEventListener('touchmove', (e) => {
+ e.preventDefault()
+ const x = Math.floor( e.touches[0].pageX / s )
+ const y = Math.floor( e.touches[0].pageY / s )
+ if (! (x in notes) || ! (y in notes[x])) return
+ const freq = notes[x][y]
+ if (freq !== lastFreq) {
+ kalimba.play( freq )
+ lastFreq = freq
+ }
+ })
+ document.addEventListener('touchend', () => { dragging = false })
+}
keys.listen(function(index){
// const freq = scales.current().index(index)
diff --git a/client/lib/startAudioContext.js b/client/lib/startAudioContext.js
new file mode 100644
index 0000000..f3a9793
--- /dev/null
+++ b/client/lib/startAudioContext.js
@@ -0,0 +1,181 @@
+/**
+ * StartAudioContext.js
+ * @author Yotam Mann
+ * @license http://opensource.org/licenses/MIT MIT License
+ * @copyright 2016 Yotam Mann
+ */
+(function (root, factory) {
+ if (typeof define === "function" && define.amd) {
+ define([], factory);
+ } else if (typeof module === 'object' && module.exports) {
+ module.exports = factory();
+ } else {
+ root.StartAudioContext = factory();
+ }
+}(this, function () {
+
+ /**
+ * The StartAudioContext object
+ */
+ var StartAudioContext = {
+ /**
+ * The audio context passed in by the user
+ * @type {AudioContext}
+ */
+ context : null,
+ /**
+ * The TapListeners bound to the elements
+ * @type {Array}
+ * @private
+ */
+ _tapListeners : [],
+ /**
+ * Callbacks to invoke when the audio context is started
+ * @type {Array}
+ * @private
+ */
+ _onStarted : [],
+ };
+
+
+ /**
+ * Set the context
+ * @param {AudioContext} ctx
+ * @returns {StartAudioContext}
+ */
+ StartAudioContext.setContext = function(ctx){
+ StartAudioContext.context = ctx;
+ return StartAudioContext;
+ };
+
+ /**
+ * Add a tap listener to the audio context
+ * @param {Array|Element|String|jQuery} element
+ * @returns {StartAudioContext}
+ */
+ StartAudioContext.on = function(element){
+ if (Array.isArray(element) || (NodeList && element instanceof NodeList)){
+ for (var i = 0; i < element.length; i++){
+ StartAudioContext.on(element[i]);
+ }
+ } else if (typeof element === "string"){
+ StartAudioContext.on(document.querySelectorAll(element));
+ } else if (element.jquery && typeof element.toArray === "function"){
+ StartAudioContext.on(element.toArray());
+ } else if (Element && element instanceof Element){
+ //if it's an element, create a TapListener
+ var tap = new TapListener(element, onTap);
+ StartAudioContext._tapListeners.push(tap);
+ }
+ return StartAudioContext;
+ };
+
+ /**
+ * Bind a callback to when the audio context is started.
+ * @param {Function} cb
+ * @return {StartAudioContext}
+ */
+ StartAudioContext.onStarted = function(cb){
+ //if it's already started, invoke the callback
+ if (StartAudioContext.isStarted()){
+ cb();
+ } else {
+ StartAudioContext._onStarted.push(cb);
+ }
+ return StartAudioContext;
+ };
+
+ /**
+ * returns true if the context is started
+ * @return {Boolean}
+ */
+ StartAudioContext.isStarted = function(){
+ return (StartAudioContext.context !== null && StartAudioContext.context.state === "running");
+ };
+
+ /**
+ * @class Listens for non-dragging tap ends on the given element
+ * @param {Element} element
+ * @internal
+ */
+ var TapListener = function(element){
+
+ this._dragged = false;
+
+ this._element = element;
+
+ this._bindedMove = this._moved.bind(this);
+ this._bindedEnd = this._ended.bind(this);
+
+ element.addEventListener("touchmove", this._bindedMove);
+ element.addEventListener("touchend", this._bindedEnd);
+ element.addEventListener("mouseup", this._bindedEnd);
+ };
+
+ /**
+ * drag move event
+ */
+ TapListener.prototype._moved = function(e){
+ this._dragged = true;
+ };
+
+ /**
+ * tap ended listener
+ */
+ TapListener.prototype._ended = function(e){
+ if (!this._dragged){
+ onTap();
+ }
+ this._dragged = false;
+ };
+
+ /**
+ * remove all the bound events
+ */
+ TapListener.prototype.dispose = function(){
+ this._element.removeEventListener("touchmove", this._bindedMove);
+ this._element.removeEventListener("touchend", this._bindedEnd);
+ this._element.removeEventListener("mouseup", this._bindedEnd);
+ this._bindedMove = null;
+ this._bindedEnd = null;
+ this._element = null;
+ };
+
+ /**
+ * Invoked the first time of the elements is tapped.
+ * Creates a silent oscillator when a non-dragging touchend
+ * event has been triggered.
+ */
+ function onTap(){
+ //start the audio context with a silent oscillator
+ if (StartAudioContext.context && !StartAudioContext.isStarted()){
+ var osc = StartAudioContext.context.createOscillator();
+ var silent = StartAudioContext.context.createGain();
+ silent.gain.value = 0;
+ osc.connect(silent);
+ silent.connect(StartAudioContext.context.destination);
+ var now = StartAudioContext.context.currentTime;
+ osc.start(now);
+ osc.stop(now+0.5);
+ }
+
+ //dispose all the tap listeners
+ if (StartAudioContext._tapListeners){
+ for (var i = 0; i < StartAudioContext._tapListeners.length; i++){
+ StartAudioContext._tapListeners[i].dispose();
+ }
+ StartAudioContext._tapListeners = null;
+ }
+ //the onstarted callbacks
+ if (StartAudioContext._onStarted){
+ for (var j = 0; j < StartAudioContext._onStarted.length; j++){
+ StartAudioContext._onStarted[j]();
+ }
+ StartAudioContext._onStarted = null;
+ }
+ }
+
+ return StartAudioContext;
+}));
+
+
diff --git a/client/lib/util.js b/client/lib/util.js
index b2d95f5..4d9d038 100644
--- a/client/lib/util.js
+++ b/client/lib/util.js
@@ -1,3 +1,61 @@
+import Tone from 'tone'
+import StartAudioContext from './startAudioContext'
+
+const isIphone = (navigator.userAgent.match(/iPhone/i)) || (navigator.userAgent.match(/iPod/i))
+const isIpad = (navigator.userAgent.match(/iPad/i))
+const isAndroid = (navigator.userAgent.match(/Android/i))
+const isMobile = isIphone || isIpad || isAndroid
+const isDesktop = ! isMobile
+
+document.body.classList.add(isMobile ? 'mobile' : 'desktop')
+
+const browser = { isIphone, isIpad, isMobile, isDesktop }
+
function choice (a){ return a[ Math.floor(Math.random() * a.length) ] }
+function mod(n,m){ return n-(m * Math.floor(n/m)) }
+
+function requestAudioContext (fn) {
+ if (isMobile) {
+ const container = document.createElement('div')
+ const button = document.createElement('div')
+ button.innerHTML = 'Tap to start - please unmute your phone'
+ Object.assign(container.style, {
+ display: 'block',
+ position: 'absolute',
+ width: '100%',
+ height: '100%',
+ zIndex: '10000',
+ top: '0px',
+ left: '0px',
+ backgroundColor: 'rgba(0, 0, 0, 0.8)',
+ })
+ Object.assign(button.style, {
+ display: 'block',
+ position: 'absolute',
+ left: '50%',
+ top: '50%',
+ padding: '20px',
+ backgroundColor: '#7F33ED',
+ color: 'white',
+ fontFamily: 'monospace',
+ borderRadius: '3px',
+ transform: 'translate3D(-50%,-50%,0)',
+ textAlign: 'center',
+ lineHeight: '1.5',
+ width: '150px',
+ })
+ container.appendChild(button)
+ document.body.appendChild(container)
+ StartAudioContext.setContext(Tone.context)
+ StartAudioContext.on(button)
+ StartAudioContext.onStarted(_ => {
+ container.remove()
+ fn()
+ })
+ } else {
+ fn()
+ }
+}
+
+export { choice, mod, browser, requestAudioContext }
-export { choice } \ No newline at end of file