Interaction: Webcams

Capture live video and sample pixel color for interaction feedback.

Example 1: Webcam Mirror

Code

let cam;

function setup() {
  createCanvas(400, 400);
  cam = createCapture(VIDEO);
  cam.size(400, 400);
  cam.hide();
}

function draw() {
  background(0);
  push();
  translate(width, 0);
  scale(-1, 1);
  image(cam, 0, 0, width, height);
  pop();

  noFill();
  stroke(255, 60, 145);
  strokeWeight(3);
  circle(mouseX, mouseY, 50);
}

Try this: Draw simple overlays on top of the mirrored feed.

Open sketch in new tab

Example 2: Sample Webcam Pixel

Code

let cam;

function setup() {
  createCanvas(400, 400);
  cam = createCapture(VIDEO);
  cam.size(400, 400);
  cam.hide();
}

function draw() {
  background(20);
  image(cam, 0, 0, width, height);

  cam.loadPixels();
  const sx = int(map(mouseX, 0, width, 0, cam.width - 1, true));
  const sy = int(map(mouseY, 0, height, 0, cam.height - 1, true));
  const idx = (sy * cam.width + sx) * 4;
  const c = color(cam.pixels[idx], cam.pixels[idx + 1], cam.pixels[idx + 2]);

  fill(c);
  noStroke();
  rect(10, 10, 70, 70, 8);
}

Try this: Average a 5x5 region instead of a single pixel sample.

Open sketch in new tab

Example 3: Webcam + Hand Tracking (ml5)

Webcam required. Demonstrate using the webcam with machine learning to track hands and landmarks in real time using ml5.js and p5.js.

Example Code (ml5 Hands)

This example uses ml5.js (built on TensorFlow.js).

let video;
let handPose;
let hands = [];

function preload() {
  handPose = ml5.handPose({ flipped: true, maxHands: 2 });
}

function setup() {
  createCanvas(400, 400);
  video = createCapture(VIDEO, { flipped: true });
  video.size(400, 400);
  video.hide();
  handPose.detectStart(video, gotHands);
}

function draw() {
  image(video, 0, 0, width, height);
  drawHandLandmarks();
}

function gotHands(results) {
  hands = results;
}

function drawHandLandmarks() {
  noStroke();
  for (const hand of hands) {
    for (const kp of hand.keypoints) {
      fill(255, 45, 140, 190);
      circle(kp.x, kp.y, 10);
    }

    const wrist = hand.keypoints[0];
    if (wrist) {
      fill(20);
      textSize(14);
      text('hand', wrist.x + 10, wrist.y - 8);
    }
  }
}

Open in new tab

How it works

createCapture(VIDEO) gets the webcam stream. The ml5 Hands model (hand pose / hand landmarks) detects points each frame. Those keypoints are drawn as circles, and you can map their positions to control graphics, audio, or UI interactions in real time.

Try this

  • Draw circles only on fingertips.
  • Measure distance between thumb and index finger and map it to brush size.
  • Use wrist position to control color hue or shape scale.

Resources