BreakPointHooks
Installation
npx sse-hooks add use-breakpoint
yarn dlx sse-hooks add use-breakpoint
pnpm dlx sse-hooks add use-breakpoint
deno run -A npm:sse-hooks add use-breakpoint
bunx sse-hooks add use-breakpoint
Usage
import React from "react";
import { useBreakpoint } from "@/hooks/useBreakpoint";
const BREAKPOINTS = {
mobile: 0,
tablet: 768,
desktop: 1024,
large: 1440,
};
const { useGreater, useSmaller, useBetween, isGreater } =
useBreakpoint(BREAKPOINTS);
export function ResponsiveComponent() {
const isTabletOrAbove = useGreater("tablet");
const isMobileOnly = useSmaller("tablet");
const isStrictlyTablet = useBetween("tablet", "desktop");
const checkInitialSize = () => {
if (isGreater("large")) {
console.log("Started on a very large screen");
}
};
return (
<div style={{ padding: "20px", border: "1px solid #ccc" }}>
<h2>Breakpoint Tracker</h2>
<div style={{ display: "flex", flexDirection: "column", gap: "10px" }}>
<p>
<strong>Current Viewport Logic:</strong>
</p>
{isMobileOnly && (
<div style={{ color: "red" }}>📱 You are on a Mobile device.</div>
)}
{isStrictlyTablet && (
<div style={{ color: "blue" }}>
{" "}
tablet Viewport (Between 768px and 1024px)
</div>
)}
{isTabletOrAbove && (
<div style={{ color: "green" }}>
✅ Desktop or Tablet features enabled
</div>
)}
</div>
<button onClick={checkInitialSize} style={{ marginTop: "20px" }}>
Run Manual Logic Check
</button>
</div>
);
}
// JavaScript example is not available right now.
API
Parameters
| Parameter | Default | Type |
|---|---|---|
options |
|
|
Returns
| Return Value | Default | Type |
|---|---|---|
isSupported | - |
Whether audio recording is supported in the current browser. |
isRecording | - |
Whether recording is currently active. |
isPaused | - |
Whether recording is currently paused. |
stream | - |
Active media stream. |
mediaRecorder | - |
MediaRecorder instance. |
audioBlob | - |
Final recorded audio blob. |
audioUrl | - |
Object URL for the recorded audio. |
duration | - |
Duration of the recording in seconds. |
error | - |
Error message if recording fails. |
analysisData | - |
Live audio analysis data.
|
startRecording | - |
Starts audio recording. |
stopRecording | - |
Stops audio recording. |
pauseRecording | - |
Pauses the recording. |
resumeRecording | - |
Resumes a paused recording. |
clearRecording | - |
Clears the current recording state. |
downloadRecording | - |
Downloads the recording as a file. |
Types Aliases
AudioMimeType
unionSupported audio MIME types for recording.
type AudioMimeType =
| "audio/webm"
| "audio/webm;codecs=opus"
| "audio/webm;codecs=vorbis"
| "audio/ogg"
| "audio/ogg;codecs=opus"
| "audio/ogg;codecs=vorbis"
| "application/ogg"
| "audio/mp4"
| "audio/mp4;codecs=mp4a.40.2"
| "audio/aac"
| "audio/x-m4a"
| "audio/mpeg"
| "audio/mp3"
| "audio/wav"
| "audio/x-wav"
| "audio/wave"
| "audio/flac"
| "audio/3gpp"
| "audio/3gpp2";
Code
import { useState, useEffect, useRef, useCallback } from "react";
import { withDefaults } from "./with-defaults";
/** Supported audio MIME types for recording. */
type AudioMimeType =
| "audio/webm"
| "audio/webm;codecs=opus"
| "audio/webm;codecs=vorbis"
| "audio/ogg"
| "audio/ogg;codecs=opus"
| "audio/ogg;codecs=vorbis"
| "application/ogg"
| "audio/mp4"
| "audio/mp4;codecs=mp4a.40.2"
| "audio/aac"
| "audio/x-m4a"
| "audio/mpeg"
| "audio/mp3"
| "audio/wav"
| "audio/x-wav"
| "audio/wave"
| "audio/flac"
| "audio/3gpp"
| "audio/3gpp2";
/** Options for configuring the useAudioRecorder hook. */
export interface UseAudioRecorderOptions {
/**
* Audio bitrate in bits per second.
* @default 128000
*/
audioBitsPerSecond?: number;
/**
* MIME type for the recorded audio.
* @default "audio/webm"
*/
mimeType?: AudioMimeType;
/**
* Timeslice (ms) for MediaRecorder data chunks.
*/
timeslice?: number;
/**
* If set, enables real-time audio analysis during recording.
*/
enableAnalysis?: boolean;
/**
* FFT size for audio analysis.
* @default 2048
*/
fftSize?: number;
}
/** Audio analysis data returned when `enableAnalysis` is true. */
export interface AudioAnalysisData {
/** Frequency domain data (FFT). */
frequencyData: Uint8Array;
/** Time domain waveform data. */
timeData: Uint8Array;
/** Calculated RMS volume level. */
volume: number;
}
/** The useAudioRecorder return type. */
export interface UseAudioRecorderReturn {
/** Whether audio recording is supported in the current browser. */
isSupported: boolean;
/** Whether recording is currently active. */
isRecording: boolean;
/** Whether recording is currently paused. */
isPaused: boolean;
/** Active media stream. */
stream: MediaStream | null;
/** MediaRecorder instance. */
mediaRecorder: MediaRecorder | null;
/** Final recorded audio blob. */
audioBlob: Blob | null;
/** Object URL for the recorded audio. */
audioUrl: string | null;
/** Duration of the recording in seconds. */
duration: number;
/** Error message if recording fails. */
error: string | null;
/** Live audio analysis data. */
analysisData: AudioAnalysisData | null;
/** Starts audio recording. */
startRecording: () => Promise<void>;
/** Stops audio recording. */
stopRecording: () => void;
/** Pauses the recording. */
pauseRecording: () => void;
/** Resumes a paused recording. */
resumeRecording: () => void;
/** Clears the current recording state. */
clearRecording: () => void;
/** Downloads the recording as a file. */
downloadRecording: (filename?: string) => void;
}
/**
* A comprehensive hook for audio recording with real-time analysis using getUserMedia, MediaRecorder, and Web Audio APIs
*
* @category sensors
* @param {UseAudioRecorderOptions} [options] - Configuration options for audio recording.
* @returns {UseAudioRecorderReturn} Object containing recording state, audio data, and control methods.
* @throws Will set an error if audio recording is not supported or permission is denied.
* @see [Documentation](https://sse-hooks.vercel.app/docs/hooks/use-audio-recorder)
* @public
*/
export const useAudioRecorder = (
options: UseAudioRecorderOptions = {},
): UseAudioRecorderReturn => {
const { audioBitsPerSecond, mimeType, timeslice, enableAnalysis, fftSize } =
withDefaults<UseAudioRecorderOptions>(options, {
audioBitsPerSecond: 128000,
mimeType: "audio/webm",
enableAnalysis: false,
fftSize: 2048,
});
const [isRecording, setIsRecording] = useState(false);
const [isPaused, setIsPaused] = useState(false);
const [stream, setStream] = useState<MediaStream | null>(null);
const [mediaRecorder, setMediaRecorder] = useState<MediaRecorder | null>(
null,
);
const [audioBlob, setAudioBlob] = useState<Blob | null>(null);
const [audioUrl, setAudioUrl] = useState<string | null>(null);
const [duration, setDuration] = useState(0);
const [error, setError] = useState<string | null>(null);
const [analysisData, setAnalysisData] = useState<AudioAnalysisData | null>(
null,
);
const chunksRef = useRef<Blob[]>([]);
const startTimeRef = useRef<number>(0);
const pausedTimeRef = useRef<number>(0);
const intervalRef = useRef<NodeJS.Timeout | null>(null);
const audioContextRef = useRef<AudioContext | null>(null);
const analyserRef = useRef<AnalyserNode | null>(null);
const sourceRef = useRef<MediaStreamAudioSourceNode | null>(null);
const animationFrameRef = useRef<number | null>(null);
const isSupported =
typeof navigator !== "undefined" &&
!!navigator.mediaDevices &&
!!navigator.mediaDevices.getUserMedia &&
!!window.MediaRecorder;
const updateDuration = useCallback(() => {
if (startTimeRef.current) {
const elapsed = Date.now() - startTimeRef.current - pausedTimeRef.current;
setDuration(Math.floor(elapsed / 1000));
}
}, []);
const analyzeAudio = useCallback(() => {
if (!analyserRef.current || !enableAnalysis) return;
const frequencyData = new Uint8Array(analyserRef.current.frequencyBinCount);
const timeData = new Uint8Array(analyserRef.current.fftSize);
analyserRef.current.getByteFrequencyData(frequencyData);
analyserRef.current.getByteTimeDomainData(timeData);
// Calculate volume (RMS)
let sum = 0;
for (let i = 0; i < timeData.length; i++) {
const sample = ((timeData[i] ?? 0) - 128) / 128;
sum += sample * sample;
}
const volume = Math.sqrt(sum / timeData.length);
setAnalysisData({
frequencyData: frequencyData.slice(),
timeData: timeData.slice(),
volume,
});
if (isRecording && !isPaused) {
animationFrameRef.current = requestAnimationFrame(analyzeAudio);
}
}, [isRecording, isPaused, enableAnalysis]);
const setupAudioAnalysis = useCallback(
(mediaStream: MediaStream) => {
if (!enableAnalysis) return;
try {
audioContextRef.current = new (
window.AudioContext || (window as any).webkitAudioContext
)();
analyserRef.current = audioContextRef.current.createAnalyser();
sourceRef.current =
audioContextRef.current.createMediaStreamSource(mediaStream);
analyserRef.current.fftSize = fftSize;
analyserRef.current.smoothingTimeConstant = 0.8;
sourceRef.current.connect(analyserRef.current);
analyzeAudio();
} catch (err) {
console.warn("Failed to setup audio analysis:", err);
}
},
[enableAnalysis, fftSize, analyzeAudio],
);
const startRecording = useCallback(async () => {
if (!isSupported) {
setError("Audio recording is not supported in this browser");
return;
}
try {
setError(null);
const mediaStream = await navigator.mediaDevices.getUserMedia({
audio: {
echoCancellation: true,
noiseSuppression: true,
autoGainControl: true,
},
});
setStream(mediaStream);
setupAudioAnalysis(mediaStream);
const recorder = new MediaRecorder(mediaStream, {
audioBitsPerSecond,
mimeType: MediaRecorder.isTypeSupported(mimeType)
? mimeType
: "audio/webm",
});
chunksRef.current = [];
recorder.ondataavailable = (event) => {
if (event.data.size > 0) {
chunksRef.current.push(event.data);
}
};
recorder.onstop = () => {
const blob = new Blob(chunksRef.current, { type: recorder.mimeType });
setAudioBlob(blob);
setAudioUrl(URL.createObjectURL(blob));
setIsRecording(false);
setIsPaused(false);
if (intervalRef.current) {
clearInterval(intervalRef.current);
intervalRef.current = null;
}
if (animationFrameRef.current) {
cancelAnimationFrame(animationFrameRef.current);
animationFrameRef.current = null;
}
};
recorder.onpause = () => {
setIsPaused(true);
pausedTimeRef.current += Date.now() - startTimeRef.current;
if (animationFrameRef.current) {
cancelAnimationFrame(animationFrameRef.current);
animationFrameRef.current = null;
}
};
recorder.onresume = () => {
setIsPaused(false);
startTimeRef.current = Date.now();
if (enableAnalysis) {
analyzeAudio();
}
};
recorder.onerror = (event) => {
setError(`Recording error: ${event.error?.message || "Unknown error"}`);
setIsRecording(false);
setIsPaused(false);
};
setMediaRecorder(recorder);
recorder.start(timeslice);
setIsRecording(true);
startTimeRef.current = Date.now();
pausedTimeRef.current = 0;
setDuration(0);
intervalRef.current = setInterval(updateDuration, 1000);
} catch (err) {
const errorMessage =
err instanceof Error ? err.message : "Failed to start recording";
setError(errorMessage);
}
}, [
isSupported,
audioBitsPerSecond,
mimeType,
timeslice,
setupAudioAnalysis,
updateDuration,
enableAnalysis,
analyzeAudio,
]);
const stopRecording = useCallback(() => {
if (mediaRecorder && mediaRecorder.state !== "inactive") {
mediaRecorder.stop();
}
if (stream) {
stream.getTracks().forEach((track) => track.stop());
setStream(null);
}
if (audioContextRef.current) {
audioContextRef.current.close();
audioContextRef.current = null;
}
}, [mediaRecorder, stream]);
const pauseRecording = useCallback(() => {
if (mediaRecorder && mediaRecorder.state === "recording") {
mediaRecorder.pause();
}
}, [mediaRecorder]);
const resumeRecording = useCallback(() => {
if (mediaRecorder && mediaRecorder.state === "paused") {
mediaRecorder.resume();
}
}, [mediaRecorder]);
const clearRecording = useCallback(() => {
if (audioUrl) {
URL.revokeObjectURL(audioUrl);
}
setAudioBlob(null);
setAudioUrl(null);
setDuration(0);
setAnalysisData(null);
setError(null);
}, [audioUrl]);
const downloadRecording = useCallback(
(filename = "recording.webm") => {
if (!audioUrl) return;
const link = document.createElement("a");
link.href = audioUrl;
link.download = filename;
document.body.appendChild(link);
link.click();
document.body.removeChild(link);
},
[audioUrl],
);
// Cleanup on unmount
useEffect(() => {
return () => {
if (intervalRef.current) {
clearInterval(intervalRef.current);
}
if (animationFrameRef.current) {
cancelAnimationFrame(animationFrameRef.current);
}
if (stream) {
stream.getTracks().forEach((track) => track.stop());
}
if (audioContextRef.current) {
audioContextRef.current.close();
}
if (audioUrl) {
URL.revokeObjectURL(audioUrl);
}
};
}, [stream, audioUrl]);
return {
isSupported,
isRecording,
isPaused,
stream,
mediaRecorder,
audioBlob,
audioUrl,
duration,
error,
analysisData,
startRecording,
stopRecording,
pauseRecording,
resumeRecording,
clearRecording,
downloadRecording,
};
};
import { useState, useEffect, useRef, useCallback } from "react";
import { withDefaults } from "./with-defaults";
export const useAudioRecorder = (options = {}) => {
const { audioBitsPerSecond, mimeType, timeslice, enableAnalysis, fftSize } =
withDefaults(options, {
audioBitsPerSecond: 128000,
mimeType: "audio/webm",
enableAnalysis: false,
fftSize: 2048,
});
const [isRecording, setIsRecording] = useState(false);
const [isPaused, setIsPaused] = useState(false);
const [stream, setStream] = useState(null);
const [mediaRecorder, setMediaRecorder] = useState(null);
const [audioBlob, setAudioBlob] = useState(null);
const [audioUrl, setAudioUrl] = useState(null);
const [duration, setDuration] = useState(0);
const [error, setError] = useState(null);
const [analysisData, setAnalysisData] = useState(null);
const chunksRef = useRef([]);
const startTimeRef = useRef(0);
const pausedTimeRef = useRef(0);
const intervalRef = useRef(null);
const audioContextRef = useRef(null);
const analyserRef = useRef(null);
const sourceRef = useRef(null);
const animationFrameRef = useRef(null);
const isSupported =
typeof navigator !== "undefined" &&
!!navigator.mediaDevices &&
!!navigator.mediaDevices.getUserMedia &&
!!window.MediaRecorder;
const updateDuration = useCallback(() => {
if (startTimeRef.current) {
const elapsed = Date.now() - startTimeRef.current - pausedTimeRef.current;
setDuration(Math.floor(elapsed / 1000));
}
}, []);
const analyzeAudio = useCallback(() => {
if (!analyserRef.current || !enableAnalysis) return;
const frequencyData = new Uint8Array(analyserRef.current.frequencyBinCount);
const timeData = new Uint8Array(analyserRef.current.fftSize);
analyserRef.current.getByteFrequencyData(frequencyData);
analyserRef.current.getByteTimeDomainData(timeData);
let sum = 0;
for (let i = 0; i < timeData.length; i++) {
const sample = ((timeData[i] ?? 0) - 128) / 128;
sum += sample * sample;
}
const volume = Math.sqrt(sum / timeData.length);
setAnalysisData({
frequencyData: frequencyData.slice(),
timeData: timeData.slice(),
volume,
});
if (isRecording && !isPaused) {
animationFrameRef.current = requestAnimationFrame(analyzeAudio);
}
}, [isRecording, isPaused, enableAnalysis]);
const setupAudioAnalysis = useCallback(
(mediaStream) => {
if (!enableAnalysis) return;
try {
audioContextRef.current = new (
window.AudioContext || window.webkitAudioContext
)();
analyserRef.current = audioContextRef.current.createAnalyser();
sourceRef.current =
audioContextRef.current.createMediaStreamSource(mediaStream);
analyserRef.current.fftSize = fftSize;
analyserRef.current.smoothingTimeConstant = 0.8;
sourceRef.current.connect(analyserRef.current);
analyzeAudio();
} catch (err) {
console.warn("Failed to setup audio analysis:", err);
}
},
[enableAnalysis, fftSize, analyzeAudio],
);
const startRecording = useCallback(async () => {
if (!isSupported) {
setError("Audio recording is not supported in this browser");
return;
}
try {
setError(null);
const mediaStream = await navigator.mediaDevices.getUserMedia({
audio: {
echoCancellation: true,
noiseSuppression: true,
autoGainControl: true,
},
});
setStream(mediaStream);
setupAudioAnalysis(mediaStream);
const recorder = new MediaRecorder(mediaStream, {
audioBitsPerSecond,
mimeType: MediaRecorder.isTypeSupported(mimeType)
? mimeType
: "audio/webm",
});
chunksRef.current = [];
recorder.ondataavailable = (event) => {
if (event.data.size > 0) {
chunksRef.current.push(event.data);
}
};
recorder.onstop = () => {
const blob = new Blob(chunksRef.current, { type: recorder.mimeType });
setAudioBlob(blob);
setAudioUrl(URL.createObjectURL(blob));
setIsRecording(false);
setIsPaused(false);
if (intervalRef.current) {
clearInterval(intervalRef.current);
intervalRef.current = null;
}
if (animationFrameRef.current) {
cancelAnimationFrame(animationFrameRef.current);
animationFrameRef.current = null;
}
};
recorder.onpause = () => {
setIsPaused(true);
pausedTimeRef.current += Date.now() - startTimeRef.current;
if (animationFrameRef.current) {
cancelAnimationFrame(animationFrameRef.current);
animationFrameRef.current = null;
}
};
recorder.onresume = () => {
setIsPaused(false);
startTimeRef.current = Date.now();
if (enableAnalysis) {
analyzeAudio();
}
};
recorder.onerror = (event) => {
setError(`Recording error: ${event.error?.message || "Unknown error"}`);
setIsRecording(false);
setIsPaused(false);
};
setMediaRecorder(recorder);
recorder.start(timeslice);
setIsRecording(true);
startTimeRef.current = Date.now();
pausedTimeRef.current = 0;
setDuration(0);
intervalRef.current = setInterval(updateDuration, 1000);
} catch (err) {
const errorMessage =
err instanceof Error ? err.message : "Failed to start recording";
setError(errorMessage);
}
}, [
isSupported,
audioBitsPerSecond,
mimeType,
timeslice,
setupAudioAnalysis,
updateDuration,
enableAnalysis,
analyzeAudio,
]);
const stopRecording = useCallback(() => {
if (mediaRecorder && mediaRecorder.state !== "inactive") {
mediaRecorder.stop();
}
if (stream) {
stream.getTracks().forEach((track) => track.stop());
setStream(null);
}
if (audioContextRef.current) {
audioContextRef.current.close();
audioContextRef.current = null;
}
}, [mediaRecorder, stream]);
const pauseRecording = useCallback(() => {
if (mediaRecorder && mediaRecorder.state === "recording") {
mediaRecorder.pause();
}
}, [mediaRecorder]);
const resumeRecording = useCallback(() => {
if (mediaRecorder && mediaRecorder.state === "paused") {
mediaRecorder.resume();
}
}, [mediaRecorder]);
const clearRecording = useCallback(() => {
if (audioUrl) {
URL.revokeObjectURL(audioUrl);
}
setAudioBlob(null);
setAudioUrl(null);
setDuration(0);
setAnalysisData(null);
setError(null);
}, [audioUrl]);
const downloadRecording = useCallback(
(filename = "recording.webm") => {
if (!audioUrl) return;
const link = document.createElement("a");
link.href = audioUrl;
link.download = filename;
document.body.appendChild(link);
link.click();
document.body.removeChild(link);
},
[audioUrl],
);
useEffect(() => {
return () => {
if (intervalRef.current) {
clearInterval(intervalRef.current);
}
if (animationFrameRef.current) {
cancelAnimationFrame(animationFrameRef.current);
}
if (stream) {
stream.getTracks().forEach((track) => track.stop());
}
if (audioContextRef.current) {
audioContextRef.current.close();
}
if (audioUrl) {
URL.revokeObjectURL(audioUrl);
}
};
}, [stream, audioUrl]);
return {
isSupported,
isRecording,
isPaused,
stream,
mediaRecorder,
audioBlob,
audioUrl,
duration,
error,
analysisData,
startRecording,
stopRecording,
pauseRecording,
resumeRecording,
clearRecording,
downloadRecording,
};
};
Changelog
useMediaSession
Custom hook that interacts with the Media Session API. It allows you to customize media notifications and handle media control events (like play, pause, next track) from the system's notification area or lock screen.
useResizeObserver
Custom hook that observes the size of an element using the `ResizeObserver API`.