Files
slr_google_landmarks_demo/holistic.html
jared 8bcc62b045 Initial commit: MediaPipe landmarks demo
HTML demos for face, hand, gesture, and posture tracking using MediaPipe.
Includes Python CLI tools for processing video files.

Co-Authored-By: Claude Opus 4.5 <noreply@anthropic.com>
2026-01-19 22:38:40 -05:00

263 lines
12 KiB
HTML

<!doctype html>
<html lang="en">
<head>
<meta charset="utf-8" />
<title>MediaPipe Holistic — Main Output Only</title>
<meta name="viewport" content="width=device-width, initial-scale=1" />
<link href="https://fonts.googleapis.com/css2?family=Titillium+Web:wght@400;600&display=swap" rel="stylesheet">
<style>
@keyframes spin { 0% {transform: rotate(0)} 100% {transform: rotate(360deg)} }
.abs { position: absolute; }
a { color: white; text-decoration: none; } a:hover { color: lightblue; }
body {
margin: 0; color: white; font-family: 'Titillium Web', sans-serif;
position: absolute; inset: 0; overflow: hidden; background: #000;
}
.container {
position: absolute; inset: 0; background-color: #596e73; height: 100%;
}
.canvas-container {
display: flex; height: 100%; width: 100%;
justify-content: center; align-items: center;
}
.output_canvas { max-width: 100%; display: block; position: relative; }
/* Hide ALL video elements so only the processed canvas is visible */
video { display: none !important; }
.control-panel { position: absolute; left: 10px; top: 10px; z-index: 6; }
.loading {
display: flex; position: absolute; inset: 0; align-items: center; justify-content: center;
backface-visibility: hidden; opacity: 1; transition: opacity 1s; z-index: 10;
}
.loading .spinner {
position: absolute; width: 120px; height: 120px; animation: spin 1s linear infinite;
border: 32px solid #bebebe; border-top: 32px solid #3498db; border-radius: 50%;
}
.loading .message { font-size: x-large; }
.loaded .loading { opacity: 0; }
.logo { bottom: 10px; right: 20px; }
.logo .title { color: white; font-size: 28px; }
.shoutout { left: 0; right: 0; bottom: 40px; text-align: center; font-size: 24px; position: absolute; z-index: 4; }
</style>
</head>
<body>
<div class="container">
<!-- Hidden capture element kept for MediaPipe pipeline -->
<video class="input_video" playsinline></video>
<div class="canvas-container">
<canvas class="output_canvas" width="1280" height="720"></canvas>
</div>
<!-- Loading spinner -->
<div class="loading">
<div class="spinner"></div>
<div class="message">Loading</div>
</div>
<!-- Logo/link -->
<a class="abs logo" href="https://mediapipe.dev" target="_blank" rel="noreferrer">
<div style="display:flex;align-items:center;bottom:0;right:10px;">
<img class="logo" alt="" style="height:50px"
src="data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAAEAAAABCAQAAAC1HAwCAAAAC0lEQVR4nGMAAQAABQABJtqz7QAAAABJRU5ErkJggg==" />
<span class="title" style="margin-left:8px">MediaPipe</span>
</div>
</a>
<!-- Info link -->
<div class="shoutout">
<div><a href="https://solutions.mediapipe.dev/holistic" target="_blank" rel="noreferrer">Click here for more info</a></div>
</div>
</div>
<!-- Control panel container -->
<div class="control-panel"></div>
<!-- MediaPipe libs (globals: mpHolistic, drawingUtils, controlsNS, etc.) -->
<script src="https://cdn.jsdelivr.net/npm/@mediapipe/holistic/holistic.js"></script>
<script src="https://cdn.jsdelivr.net/npm/@mediapipe/drawing_utils/drawing_utils.js"></script>
<script src="https://cdn.jsdelivr.net/npm/@mediapipe/camera_utils/camera_utils.js"></script>
<script src="https://cdn.jsdelivr.net/npm/@mediapipe/control_utils/control_utils.js"></script>
<!-- Device detector is ESM; we import it and run the app -->
<script type="module">
import DeviceDetector from "https://cdn.skypack.dev/device-detector-js@2.2.10";
function testSupport(supportedDevices) {
const dd = new DeviceDetector();
const d = dd.parse(navigator.userAgent);
let ok = false;
for (const dev of supportedDevices) {
if (dev.client && !(new RegExp(`^${dev.client}$`)).test(d.client.name)) continue;
if (dev.os && !(new RegExp(`^${dev.os}$`)).test(d.os.name)) continue;
ok = true; break;
}
if (!ok) alert(`This demo, running on ${d.client.name}/${d.os.name}, is not well supported at this time, continue at your own risk.`);
}
testSupport([{ client: 'Chrome' }]);
const controlsNS = window;
const mpHolistic = window;
const drawingUtils = window;
const videoElement = document.getElementsByClassName('input_video')[0];
const canvasElement = document.getElementsByClassName('output_canvas')[0];
const controlsElement = document.getElementsByClassName('control-panel')[0];
const canvasCtx = canvasElement.getContext('2d');
const fpsControl = new controlsNS.FPS();
const spinner = document.querySelector('.loading');
spinner.ontransitionend = () => { spinner.style.display = 'none'; };
function removeElements(landmarks, elements) {
if (!landmarks) return;
for (const e of elements) delete landmarks[e];
}
function removeLandmarks(results) {
if (results.poseLandmarks) {
removeElements(results.poseLandmarks, [0,1,2,3,4,5,6,7,8,9,10,15,16,17,18,19,20,21,22]);
}
}
function connect(ctx, connectors) {
const c = ctx.canvas;
for (const [from, to] of connectors) {
if (!from || !to) continue;
if (from.visibility && to.visibility && (from.visibility < 0.1 || to.visibility < 0.1)) continue;
ctx.beginPath();
ctx.moveTo(from.x * c.width, from.y * c.height);
ctx.lineTo(to.x * c.width, to.y * c.height);
ctx.stroke();
}
}
let activeEffect = 'mask';
function onResults(results) {
document.body.classList.add('loaded');
removeLandmarks(results);
fpsControl.tick();
canvasCtx.save();
canvasCtx.clearRect(0, 0, canvasElement.width, canvasElement.height);
if (results.segmentationMask) {
canvasCtx.drawImage(results.segmentationMask, 0, 0, canvasElement.width, canvasElement.height);
if (activeEffect === 'mask' || activeEffect === 'both') {
canvasCtx.globalCompositeOperation = 'source-in';
canvasCtx.fillStyle = '#00FF007F';
canvasCtx.fillRect(0, 0, canvasElement.width, canvasElement.height);
} else {
canvasCtx.globalCompositeOperation = 'source-out';
canvasCtx.fillStyle = '#0000FF7F';
canvasCtx.fillRect(0, 0, canvasElement.width, canvasElement.height);
}
canvasCtx.globalCompositeOperation = 'destination-atop';
canvasCtx.drawImage(results.image, 0, 0, canvasElement.width, canvasElement.height);
canvasCtx.globalCompositeOperation = 'source-over';
} else {
canvasCtx.drawImage(results.image, 0, 0, canvasElement.width, canvasElement.height);
}
canvasCtx.lineWidth = 5;
if (results.poseLandmarks) {
if (results.rightHandLandmarks) {
canvasCtx.strokeStyle = 'white';
connect(canvasCtx, [[
results.poseLandmarks[mpHolistic.POSE_LANDMARKS.RIGHT_ELBOW],
results.rightHandLandmarks[0]
]]);
}
if (results.leftHandLandmarks) {
canvasCtx.strokeStyle = 'white';
connect(canvasCtx, [[
results.poseLandmarks[mpHolistic.POSE_LANDMARKS.LEFT_ELBOW],
results.leftHandLandmarks[0]
]]);
}
}
drawingUtils.drawConnectors(canvasCtx, results.poseLandmarks, mpHolistic.POSE_CONNECTIONS, { color: 'white' });
drawingUtils.drawLandmarks(
canvasCtx,
Object.values(mpHolistic.POSE_LANDMARKS_LEFT).map(i => results.poseLandmarks?.[i]),
{ visibilityMin: 0.65, color: 'white', fillColor: 'rgb(255,138,0)' }
);
drawingUtils.drawLandmarks(
canvasCtx,
Object.values(mpHolistic.POSE_LANDMARKS_RIGHT).map(i => results.poseLandmarks?.[i]),
{ visibilityMin: 0.65, color: 'white', fillColor: 'rgb(0,217,231)' }
);
drawingUtils.drawConnectors(canvasCtx, results.rightHandLandmarks, mpHolistic.HAND_CONNECTIONS, { color: 'white' });
drawingUtils.drawLandmarks(canvasCtx, results.rightHandLandmarks, {
color: 'white', fillColor: 'rgb(0,217,231)', lineWidth: 2,
radius: (data) => drawingUtils.lerp(data.from?.z ?? 0, -0.15, 0.1, 10, 1)
});
drawingUtils.drawConnectors(canvasCtx, results.leftHandLandmarks, mpHolistic.HAND_CONNECTIONS, { color: 'white' });
drawingUtils.drawLandmarks(canvasCtx, results.leftHandLandmarks, {
color: 'white', fillColor: 'rgb(255,138,0)', lineWidth: 2,
radius: (data) => drawingUtils.lerp(data.from?.z ?? 0, -0.15, 0.1, 10, 1)
});
drawingUtils.drawConnectors(canvasCtx, results.faceLandmarks, mpHolistic.FACEMESH_TESSELATION, { color: '#C0C0C070', lineWidth: 1 });
drawingUtils.drawConnectors(canvasCtx, results.faceLandmarks, mpHolistic.FACEMESH_RIGHT_EYE, { color: 'rgb(0,217,231)' });
drawingUtils.drawConnectors(canvasCtx, results.faceLandmarks, mpHolistic.FACEMESH_RIGHT_EYEBROW, { color: 'rgb(0,217,231)' });
drawingUtils.drawConnectors(canvasCtx, results.faceLandmarks, mpHolistic.FACEMESH_LEFT_EYE, { color: 'rgb(255,138,0)' });
drawingUtils.drawConnectors(canvasCtx, results.faceLandmarks, mpHolistic.FACEMESH_LEFT_EYEBROW, { color: 'rgb(255,138,0)' });
drawingUtils.drawConnectors(canvasCtx, results.faceLandmarks, mpHolistic.FACEMESH_FACE_OVAL, { color: '#E0E0E0', lineWidth: 5 });
drawingUtils.drawConnectors(canvasCtx, results.faceLandmarks, mpHolistic.FACEMESH_LIPS, { color: '#E0E0E0', lineWidth: 5 });
canvasCtx.restore();
}
const holistic = new mpHolistic.Holistic({
locateFile: (file) => `https://cdn.jsdelivr.net/npm/@mediapipe/holistic@${mpHolistic.VERSION}/${file}`
});
holistic.onResults(onResults);
new controlsNS.ControlPanel(controlsElement, {
selfieMode: true,
modelComplexity: 1,
smoothLandmarks: true,
enableSegmentation: false,
smoothSegmentation: true,
minDetectionConfidence: 0.5,
minTrackingConfidence: 0.5,
effect: 'background',
})
.add([
new controlsNS.StaticText({ title: 'MediaPipe Holistic' }),
fpsControl,
new controlsNS.Toggle({ title: 'Selfie Mode', field: 'selfieMode' }),
new controlsNS.SourcePicker({
onSourceChanged: () => { holistic.reset(); },
onFrame: async (input, size) => {
const aspect = size.height / size.width;
let width, height;
if (window.innerWidth > window.innerHeight) {
height = window.innerHeight; width = height / aspect;
} else {
width = window.innerWidth; height = width * aspect;
}
canvasElement.width = width;
canvasElement.height = height;
await holistic.send({ image: input });
},
}),
new controlsNS.Slider({ title: 'Model Complexity', field: 'modelComplexity', discrete: ['Lite', 'Full', 'Heavy'] }),
new controlsNS.Toggle({ title: 'Smooth Landmarks', field: 'smoothLandmarks' }),
new controlsNS.Toggle({ title: 'Enable Segmentation', field: 'enableSegmentation' }),
new controlsNS.Toggle({ title: 'Smooth Segmentation', field: 'smoothSegmentation' }),
new controlsNS.Slider({ title: 'Min Detection Confidence', field: 'minDetectionConfidence', range: [0, 1], step: 0.01 }),
new controlsNS.Slider({ title: 'Min Tracking Confidence', field: 'minTrackingConfidence', range: [0, 1], step: 0.01 }),
new controlsNS.Slider({ title: 'Effect', field: 'effect', discrete: { background: 'Background', mask: 'Foreground' } }),
])
.on(x => {
const options = x;
videoElement.classList.toggle('selfie', !!options.selfieMode);
activeEffect = x['effect'];
holistic.setOptions(options);
});
</script>
</body>
</html>