useUserMedia
A hook for accessing camera and microphone using getUserMedia
A React hook for accessing the user's camera and microphone using the getUserMedia API. Provides controls for starting, stopping, and switching devices.
Source Code
View the full hook implementation in the Hook Source Code section below.
Related Hooks
Use with useMediaDevices to let users select specific cameras/microphones.
Features
- Stream Management - Start/stop media streams with proper cleanup
- Device Switching - Switch between cameras and microphones on the fly
- Constraint Support - Pass custom MediaTrackConstraints for resolution, frame rate, etc.
- SSR Safe - No issues with server-side rendering
- Auto Cleanup - Tracks are automatically stopped on unmount
Basic Usage
Simple camera preview with start/stop controls:
getUserMedia is not supported in this browser
"use client";
import { useRef, useEffect } from "react";
import { useUserMedia } from "@repo/hooks/webrtc/use-user-media";
import { Button } from "@repo/ui/components/button";
import { Video, VideoOff, Loader2 } from "lucide-react";
/* BASIC CAMERA PREVIEW - Start/Stop Video Stream */
export const Example1 = () => {
const videoRef = useRef<HTMLVideoElement>(null);
const { stream, isActive, isLoading, error, start, stop, isSupported } =
useUserMedia();
// Attach stream to video element
useEffect(() => {
if (videoRef.current && stream) {
videoRef.current.srcObject = stream;
}
}, [stream]);
if (!isSupported) {
return (
<div className="text-destructive rounded-lg border border-red-500/50 bg-red-500/10 p-4 text-center text-sm">
getUserMedia is not supported in this browser
</div>
);
}
return (
<div className="flex w-full max-w-md flex-col gap-4">
{/* Video Preview */}
<div className="bg-muted relative aspect-video overflow-hidden rounded-lg">
{isActive ? (
<video
ref={videoRef}
autoPlay
playsInline
muted
className="h-full w-full object-cover"
/>
) : (
<div className="text-muted-foreground flex h-full items-center justify-center">
<VideoOff className="h-12 w-12" />
</div>
)}
{isLoading && (
<div className="absolute inset-0 flex items-center justify-center bg-black/50">
<Loader2 className="h-8 w-8 animate-spin text-white" />
</div>
)}
</div>
{/* Error Message */}
{error && (
<div className="text-destructive rounded-md bg-red-500/10 p-3 text-sm">
{error.message}
</div>
)}
{/* Controls */}
<div className="flex justify-center gap-2">
{isActive ? (
<Button
variant="destructive"
onClick={stop}
className="gap-2"
>
<VideoOff className="h-4 w-4" />
Stop Camera
</Button>
) : (
<Button
onClick={() => start()}
isDisabled={isLoading}
className="gap-2"
>
<Video className="h-4 w-4" />
Start Camera
</Button>
)}
</div>
</div>
);
};
Device Selection
Combine with useMediaDevices to let users pick their camera and microphone:
No camera active
"use client";
import { useRef, useEffect, useState } from "react";
import { useUserMedia } from "@repo/hooks/webrtc/use-user-media";
import { useMediaDevices } from "@repo/hooks/webrtc/use-media-devices";
import { Button } from "@repo/ui/components/button";
import { Camera, Mic, RefreshCw } from "lucide-react";
/* DEVICE SELECTION - Choose Camera/Microphone from Dropdown */
export const Example2 = () => {
const videoRef = useRef<HTMLVideoElement>(null);
const [selectedCamera, setSelectedCamera] = useState<string>("");
const [selectedMic, setSelectedMic] = useState<string>("");
// Get device list
const {
devices,
hasPermission,
requestPermission,
refetch: refetchDevices,
} = useMediaDevices();
// Get media stream
const {
stream,
isActive,
start,
stop,
switchVideoDevice,
switchAudioDevice,
} = useUserMedia();
// Filter devices by type
const cameras = devices.filter((d) => d.kind === "videoinput");
const microphones = devices.filter((d) => d.kind === "audioinput");
// Attach stream to video
useEffect(() => {
if (videoRef.current && stream) {
videoRef.current.srcObject = stream;
}
}, [stream]);
// Request permissions and start
const handleStart = async () => {
if (!hasPermission) {
await requestPermission({ audio: true, video: true });
await refetchDevices();
}
await start();
};
// Handle camera change
const handleCameraChange = async (deviceId: string) => {
setSelectedCamera(deviceId);
if (isActive) {
await switchVideoDevice(deviceId);
}
};
// Handle mic change
const handleMicChange = async (deviceId: string) => {
setSelectedMic(deviceId);
if (isActive) {
await switchAudioDevice(deviceId);
}
};
return (
<div className="flex w-full max-w-md flex-col gap-4">
{/* Video Preview */}
<div className="bg-muted relative aspect-video overflow-hidden rounded-lg">
{isActive ? (
<video
ref={videoRef}
autoPlay
playsInline
muted
className="h-full w-full scale-x-[-1] object-cover"
/>
) : (
<div className="text-muted-foreground flex h-full flex-col items-center justify-center gap-2">
<Camera className="h-10 w-10" />
<span className="text-sm">No camera active</span>
</div>
)}
</div>
{/* Device Selectors */}
<div className="space-y-3">
{/* Camera Select */}
<div>
<label className="text-muted-foreground mb-1.5 flex items-center gap-2 text-sm font-medium">
<Camera className="h-4 w-4" />
Camera
</label>
<select
className="bg-background w-full rounded-md border px-3 py-2 text-sm"
value={selectedCamera}
onChange={(e) => handleCameraChange(e.target.value)}
disabled={cameras.length === 0}
>
<option value="">
{cameras.length === 0
? "No cameras found"
: "Select camera..."}
</option>
{cameras.map((camera) => (
<option
key={camera.deviceId}
value={camera.deviceId}
>
{camera.label ||
`Camera ${cameras.indexOf(camera) + 1}`}
</option>
))}
</select>
</div>
{/* Microphone Select */}
<div>
<label className="text-muted-foreground mb-1.5 flex items-center gap-2 text-sm font-medium">
<Mic className="h-4 w-4" />
Microphone
</label>
<select
className="bg-background w-full rounded-md border px-3 py-2 text-sm"
value={selectedMic}
onChange={(e) => handleMicChange(e.target.value)}
disabled={microphones.length === 0}
>
<option value="">
{microphones.length === 0
? "No microphones found"
: "Select microphone..."}
</option>
{microphones.map((mic) => (
<option key={mic.deviceId} value={mic.deviceId}>
{mic.label ||
`Microphone ${microphones.indexOf(mic) + 1}`}
</option>
))}
</select>
</div>
</div>
{/* Controls */}
<div className="flex gap-2">
{isActive ? (
<Button
variant="destructive"
onClick={stop}
className="flex-1"
>
Stop
</Button>
) : (
<Button onClick={handleStart} className="flex-1">
Start Camera
</Button>
)}
<Button variant="outline" size="icon" onClick={refetchDevices}>
<RefreshCw className="h-4 w-4" />
</Button>
</div>
</div>
);
};
Mute Controls
Use with useTrackToggle for mute/unmute functionality:
Not Started
"use client";
import { useRef, useEffect } from "react";
import { useUserMedia } from "@repo/hooks/webrtc/use-user-media";
import { useTrackToggle } from "@repo/hooks/webrtc/use-track-toggle";
import { Button } from "@repo/ui/components/button";
import { Mic, MicOff, Video, VideoOff } from "lucide-react";
/* MUTE CONTROLS - Toggle Audio & Video Tracks */
export const Example3 = () => {
const videoRef = useRef<HTMLVideoElement>(null);
const { stream, isActive, start, stop } = useUserMedia();
const { isAudioEnabled, isVideoEnabled, toggleAudio, toggleVideo } =
useTrackToggle(stream);
// Attach stream to video
useEffect(() => {
if (videoRef.current && stream) {
videoRef.current.srcObject = stream;
}
}, [stream]);
return (
<div className="flex w-full max-w-md flex-col gap-4">
{/* Video Preview */}
<div className="bg-muted relative aspect-video overflow-hidden rounded-lg">
{isActive && isVideoEnabled ? (
<video
ref={videoRef}
autoPlay
playsInline
muted
className="h-full w-full scale-x-[-1] object-cover"
/>
) : (
<div className="text-muted-foreground flex h-full flex-col items-center justify-center gap-2 bg-zinc-900">
<VideoOff className="h-12 w-12 text-zinc-600" />
<span className="text-sm text-zinc-500">
{isActive ? "Camera Off" : "Not Started"}
</span>
</div>
)}
{/* Mute Indicators */}
{isActive && (
<div className="absolute bottom-3 left-3 flex gap-2">
{!isAudioEnabled && (
<div className="flex items-center gap-1 rounded-full bg-red-500 px-2 py-1 text-xs text-white">
<MicOff className="h-3 w-3" />
Muted
</div>
)}
{!isVideoEnabled && (
<div className="flex items-center gap-1 rounded-full bg-red-500 px-2 py-1 text-xs text-white">
<VideoOff className="h-3 w-3" />
Off
</div>
)}
</div>
)}
</div>
{/* Controls */}
<div className="flex justify-center gap-2">
{!isActive ? (
<Button onClick={() => start()}>Start Camera</Button>
) : (
<>
{/* Audio Toggle */}
<Button
variant={isAudioEnabled ? "outline" : "destructive"}
size="icon"
onClick={toggleAudio}
aria-label={isAudioEnabled ? "Mute" : "Unmute"}
>
{isAudioEnabled ? (
<Mic className="h-5 w-5" />
) : (
<MicOff className="h-5 w-5" />
)}
</Button>
{/* Video Toggle */}
<Button
variant={isVideoEnabled ? "outline" : "destructive"}
size="icon"
onClick={toggleVideo}
aria-label={
isVideoEnabled
? "Turn off camera"
: "Turn on camera"
}
>
{isVideoEnabled ? (
<Video className="h-5 w-5" />
) : (
<VideoOff className="h-5 w-5" />
)}
</Button>
{/* Stop */}
<Button variant="ghost" onClick={stop}>
End
</Button>
</>
)}
</div>
{/* Status */}
{isActive && (
<div className="text-muted-foreground text-center text-xs">
Audio: {isAudioEnabled ? "On" : "Off"} • Video:{" "}
{isVideoEnabled ? "On" : "Off"}
</div>
)}
</div>
);
};
Audio Level Detection
Combine with useAudioLevel for speaking indicators:
"use client";
import { useRef, useEffect } from "react";
import { useUserMedia } from "@repo/hooks/webrtc/use-user-media";
import { useAudioLevel } from "@repo/hooks/webrtc/use-audio-level";
import { Button } from "@repo/ui/components/button";
import { Mic, Video, Activity } from "lucide-react";
/* AUDIO LEVEL INDICATOR - Speaking Detection */
export const Example4 = () => {
const videoRef = useRef<HTMLVideoElement>(null);
const { stream, isActive, start, stop } = useUserMedia();
const { level, isSpeaking, peak, resetPeak } = useAudioLevel(stream);
// Attach stream to video
useEffect(() => {
if (videoRef.current && stream) {
videoRef.current.srcObject = stream;
}
}, [stream]);
// Volume bar width
const volumePercent = Math.round(level * 100);
const peakPercent = Math.round(peak * 100);
return (
<div className="flex w-full max-w-md flex-col gap-4">
{/* Video with Speaking Ring */}
<div
className={`relative aspect-video overflow-hidden rounded-lg transition-all duration-150 ${
isSpeaking
? "ring-offset-background ring-4 ring-green-500 ring-offset-2"
: "ring-0"
}`}
>
{isActive ? (
<video
ref={videoRef}
autoPlay
playsInline
muted
className="h-full w-full scale-x-[-1] object-cover"
/>
) : (
<div className="text-muted-foreground flex h-full items-center justify-center bg-zinc-900">
<Video className="h-12 w-12 text-zinc-600" />
</div>
)}
{/* Speaking Indicator Badge */}
{isActive && isSpeaking && (
<div className="absolute right-3 top-3 flex items-center gap-1.5 rounded-full bg-green-500 px-2.5 py-1 text-xs font-medium text-white">
<Activity className="h-3 w-3 animate-pulse" />
Speaking
</div>
)}
</div>
{/* Volume Meter */}
{isActive && (
<div className="space-y-2">
<div className="flex items-center gap-2">
<Mic className="text-muted-foreground h-4 w-4" />
<div className="relative h-3 flex-1 overflow-hidden rounded-full bg-zinc-200 dark:bg-zinc-800">
{/* Current Level */}
<div
className={`absolute inset-y-0 left-0 transition-all duration-75 ${
isSpeaking ? "bg-green-500" : "bg-zinc-400"
}`}
style={{ width: `${volumePercent}%` }}
/>
{/* Peak Marker */}
<div
className="absolute inset-y-0 w-0.5 bg-red-500"
style={{ left: `${peakPercent}%` }}
/>
</div>
<span className="text-muted-foreground w-12 text-right font-mono text-xs">
{volumePercent}%
</span>
</div>
{/* Stats */}
<div className="flex justify-between text-xs">
<span className="text-muted-foreground">
Peak: {peakPercent}%
</span>
<button
onClick={resetPeak}
className="text-muted-foreground hover:text-foreground underline"
>
Reset Peak
</button>
</div>
</div>
)}
{/* Controls */}
<div className="flex justify-center">
{isActive ? (
<Button variant="destructive" onClick={stop}>
Stop
</Button>
) : (
<Button onClick={() => start()}>Start with Mic</Button>
)}
</div>
</div>
);
};
API Reference
Hook Signature
function useUserMedia(options?: UseUserMediaOptions): UseUserMediaReturn;Options
| Property | Type | Default | Description |
|---|---|---|---|
constraints | UseUserMediaConstraints | {audio: true, video: true} | Initial media constraints |
autoStart | boolean | false | Start stream automatically on mount |
Return Value
| Property | Type | Description |
|---|---|---|
stream | MediaStream | null | The active media stream |
isLoading | boolean | Whether stream is being acquired |
isActive | boolean | Whether a stream is currently active |
error | Error | null | Error if stream acquisition failed |
isSupported | boolean | Whether getUserMedia is supported |
start | (constraints?) => Promise<boolean> | Start the stream |
stop | () => void | Stop all tracks |
switchAudioDevice | (deviceId) => Promise<boolean> | Switch microphone |
switchVideoDevice | (deviceId) => Promise<boolean> | Switch camera |
audioTrack | MediaStreamTrack | null | Current audio track |
videoTrack | MediaStreamTrack | null | Current video track |
Hook Source Code
import { useState, useEffect, useCallback, useRef } from "react";
/**
* Constraints for getUserMedia
*/
export interface UseUserMediaConstraints {
/** Audio constraints (true, false, or MediaTrackConstraints) */
audio?: boolean | MediaTrackConstraints;
/** Video constraints (true, false, or MediaTrackConstraints) */
video?: boolean | MediaTrackConstraints;
}
/**
* Options for the useUserMedia hook
*/
export interface UseUserMediaOptions {
/** Initial constraints (default: { audio: true, video: true }) */
constraints?: UseUserMediaConstraints;
/** Whether to start automatically on mount (default: false) */
autoStart?: boolean;
}
/**
* Return type for the useUserMedia hook
*/
export interface UseUserMediaReturn {
/** The active media stream */
stream: MediaStream | null;
/** Whether the stream is being acquired */
isLoading: boolean;
/** Whether a stream is currently active */
isActive: boolean;
/** Error if stream acquisition failed */
error: Error | null;
/** Whether the API is supported */
isSupported: boolean;
/** Start the media stream with optional constraints */
start: (constraints?: UseUserMediaConstraints) => Promise<boolean>;
/** Stop all tracks and release the stream */
stop: () => void;
/** Switch to a different audio device */
switchAudioDevice: (deviceId: string) => Promise<boolean>;
/** Switch to a different video device */
switchVideoDevice: (deviceId: string) => Promise<boolean>;
/** Get the current audio track */
audioTrack: MediaStreamTrack | null;
/** Get the current video track */
videoTrack: MediaStreamTrack | null;
}
/**
* A React hook for accessing the user's camera and microphone using getUserMedia.
* Provides controls for starting, stopping, and switching devices.
*
* @param options - Configuration options for the hook
* @returns UseUserMediaReturn object with stream, states, and control functions
*
* @example
* ```tsx
* const { stream, start, stop, isActive } = useUserMedia();
*
* // Start with default constraints
* await start();
*
* // Use stream in a video element
* videoRef.current.srcObject = stream;
*
* // Stop when done
* stop();
* ```
*/
export function useUserMedia(
options: UseUserMediaOptions = {},
): UseUserMediaReturn {
const { constraints: initialConstraints, autoStart = false } = options;
const [stream, setStream] = useState<MediaStream | null>(null);
const [isLoading, setIsLoading] = useState(false);
const [error, setError] = useState<Error | null>(null);
const streamRef = useRef<MediaStream | null>(null);
const requestRef = useRef<number>(0);
const constraintsRef = useRef<UseUserMediaConstraints>(
initialConstraints ?? { audio: true, video: true },
);
// Check if API is supported
const isSupported =
typeof navigator !== "undefined" &&
!!navigator.mediaDevices?.getUserMedia;
// Get tracks from current stream
const audioTrack = stream?.getAudioTracks()[0] ?? null;
const videoTrack = stream?.getVideoTracks()[0] ?? null;
const stopTrack = useCallback((track: MediaStreamTrack) => {
track.stop();
track.enabled = false;
}, []);
// Stop all tracks in a stream
const stopTracks = useCallback((mediaStream: MediaStream | null) => {
if (mediaStream) {
mediaStream.getTracks().forEach(stopTrack);
}
}, []);
// Stop the current stream
const stop = useCallback(() => {
stopTracks(streamRef.current);
streamRef.current = null;
setStream(null);
setError(null);
}, [stopTracks]);
// Start the media stream
const start = useCallback(
async (newConstraints?: UseUserMediaConstraints): Promise<boolean> => {
if (!isSupported) {
setError(new Error("getUserMedia is not supported"));
return false;
}
// Update constraints if provided
if (newConstraints) {
constraintsRef.current = newConstraints;
}
// Increment request ID
const requestId = requestRef.current + 1;
requestRef.current = requestId;
setIsLoading(true);
setError(null);
try {
// Stop existing stream first
stopTracks(streamRef.current);
const mediaStream = await navigator.mediaDevices.getUserMedia(
constraintsRef.current,
);
// Check if this is still the latest request
if (requestRef.current !== requestId) {
// This request is stale, stop the stream we just got
stopTracks(mediaStream);
return false;
}
streamRef.current = mediaStream;
setStream(mediaStream);
setIsLoading(false);
return true;
} catch (err) {
// Only handle error if we are still the active request
if (requestRef.current === requestId) {
const error =
err instanceof Error
? err
: new Error("Failed to access media devices");
setError(error);
setStream(null);
setIsLoading(false);
}
return false;
}
},
[isSupported, stopTracks],
);
// Switch audio device
const switchAudioDevice = useCallback(
async (deviceId: string): Promise<boolean> => {
const currentVideo = constraintsRef.current.video;
const newConstraints: UseUserMediaConstraints = {
audio: { deviceId: { exact: deviceId } },
video: currentVideo,
};
return start(newConstraints);
},
[start],
);
// Switch video device
const switchVideoDevice = useCallback(
async (deviceId: string): Promise<boolean> => {
const currentAudio = constraintsRef.current.audio;
const newConstraints: UseUserMediaConstraints = {
audio: currentAudio,
video: { deviceId: { exact: deviceId } },
};
return start(newConstraints);
},
[start],
);
// Auto-start on mount if enabled
useEffect(() => {
if (autoStart && isSupported) {
start();
}
}, [autoStart, isSupported]); // eslint-disable-line react-hooks/exhaustive-deps
// Cleanup on unmount
useEffect(() => {
return () => {
if (streamRef.current) {
streamRef.current.getTracks().forEach(stopTrack);
}
};
}, []);
return {
stream,
isLoading,
isActive: stream !== null,
error,
isSupported,
start,
stop,
switchAudioDevice,
switchVideoDevice,
audioTrack,
videoTrack,
};
}
export default useUserMedia;