Skip to content

Instantly share code, notes, and snippets.

@cluzier
Created May 1, 2019 04:07
Show Gist options
  • Save cluzier/d6172deda3daa65419ada2fc7186481a to your computer and use it in GitHub Desktop.
Save cluzier/d6172deda3daa65419ada2fc7186481a to your computer and use it in GitHub Desktop.
Hand Tracking API
<body class="bx--body p20">
<div class="mb10">
<button onclick="toggleVideo()" id="trackbutton" class="bx--btn bx--btn--secondary" type="button">
Toggle Video
</button>
<div id="updatenote" class="updatenote mt10"> loading model ..</div>
</div>
<video class="videobox canvasbox" autoplay="autoplay" id="myvideo"></video>
<canvas id="canvas" class="border canvasbox"></canvas>
<script src="https://unpkg.com/carbon-components@latest/scripts/carbon-components.js"></script>
<script src="https://cdn.jsdelivr.net/npm/handtrackjs/dist/handtrack.min.js"> </script>
<script src="track.js"></script>
</body>
const video = document.getElementById("myvideo");
const canvas = document.getElementById("canvas");
const context = canvas.getContext("2d");
let trackButton = document.getElementById("trackbutton");
let updateNote = document.getElementById("updatenote");
let isVideo = false;
let model = null;
const modelParams = {
flipHorizontal: true, // flip e.g for video
maxNumBoxes: 20, // maximum number of boxes to detect
iouThreshold: 0.5, // ioU threshold for non-max suppression
scoreThreshold: 0.6, // confidence threshold for predictions.
}
function startVideo() {
handTrack.startVideo(video).then(function (status) {
console.log("video started", status);
if (status) {
updateNote.innerText = "Video started. Now tracking"
isVideo = true
runDetection()
} else {
updateNote.innerText = "Please enable video"
}
});
}
function toggleVideo() {
if (!isVideo) {
updateNote.innerText = "Starting video"
startVideo();
} else {
updateNote.innerText = "Stopping video"
handTrack.stopVideo(video)
isVideo = false;
updateNote.innerText = "Video stopped"
}
}
function runDetection() {
model.detect(video).then(predictions => {
console.log("Predictions: ", predictions);
model.renderPredictions(predictions, canvas, context, video);
if (isVideo) {
requestAnimationFrame(runDetection);
}
});
}
// Load the model.
handTrack.load(modelParams).then(lmodel => {
// detect objects in the image.
model = lmodel
updateNote.innerText = "Loaded Model!"
trackButton.disabled = false
});
<script src="https://cdn.jsdelivr.net/npm/handtrackjs/dist/handtrack.min.js"></script>
body {
display:flex;
flex-direction:column;
align-items:center;
justify-content:center;
padding: 20px;
background: #f3f3f3;
}
.p20 {
padding: 20px;
}
.canvasbox {
border-radius: 3px;
margin-right: 10px;
width: 450px;
height: 338px;
border-bottom: 3px solid #0063FF;
box-shadow: 0 2px 3px 0 rgba(0, 0, 0, 0.2), 0 4px 10px 0 #00000030;
background: #333;
}
.mb10 {
margin-bottom: 10px
}
.mt10 {
margin-top: 10px
}
.updatenote {
padding: 10px;
background: rgb(245, 147, 20);
color: white;
display: inline;
}
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment