mirror of
https://github.com/streamich/react-use.git
synced 2026-01-25 14:17:16 +00:00
commit
58dc2978a9
@ -45,6 +45,10 @@
|
||||
- [`useWindowSize`](./docs/useWindowSize.md) — tracks `Window` dimensions.
|
||||
<br/>
|
||||
<br/>
|
||||
- [__UI__](./docs/UI.md)
|
||||
- [`useAudio`](./docs/useAudio.md) — plays audio and exposes its controls.
|
||||
<br/>
|
||||
<br/>
|
||||
- [__Animations__](./docs/Animations.md)
|
||||
- [`useRaf`](./docs/useRaf.md) — re-renders component on each `reaquestAnimationFrame`.
|
||||
- [`useSpring`](./docs/useSpring.md) — interpolates number over time according to spring dynamics.
|
||||
|
||||
89
docs/useAudio.md
Normal file
89
docs/useAudio.md
Normal file
@ -0,0 +1,89 @@
|
||||
# `useAudio`
|
||||
|
||||
Creates `<audio>` element, tracks its state and exposes playback conrols.
|
||||
|
||||
|
||||
## Usage
|
||||
|
||||
```jsx
|
||||
import {useAudio} from 'react-use';
|
||||
|
||||
const src = 'https://www.soundhelix.com/examples/mp3/SoundHelix-Song-2.mp3';
|
||||
|
||||
const Demo = () => {
|
||||
const [audio, state, controls, ref] = useAudio({
|
||||
src,
|
||||
autoPlay: true,
|
||||
});
|
||||
|
||||
return (
|
||||
<div>
|
||||
{audio}
|
||||
<pre>{JSON.stringify(state, null, 2)}</pre>
|
||||
<button onClick={controls.pause}>Pause</button>
|
||||
<button onClick={controls.play}>Play</button>
|
||||
<br/>
|
||||
<button onClick={controls.mute}>Mute</button>
|
||||
<button onClick={controls.unmute}>Un-mute</button>
|
||||
<br/>
|
||||
<button onClick={() => controls.volume(.1)}>Volume: 10%</button>
|
||||
<button onClick={() => controls.volume(.5)}>Volume: 50%</button>
|
||||
<button onClick={() => controls.volume(1)}>Volume: 100%</button>
|
||||
<br/>
|
||||
<button onClick={() => controls.seek(state.time - 5)}>-5 sec</button>
|
||||
<button onClick={() => controls.seek(state.time + 5)}>+5 sec</button>
|
||||
</div>
|
||||
);
|
||||
};
|
||||
```
|
||||
|
||||
|
||||
## Reference
|
||||
|
||||
```ts
|
||||
const [audio, state, controls, ref] = useAudio(props);
|
||||
```
|
||||
|
||||
`audio` is React's `<audio>` element that you have to insert somewhere in your
|
||||
render tree, for example:
|
||||
|
||||
```jsx
|
||||
<div>{audio}</div>
|
||||
```
|
||||
|
||||
`state` tracks the state of the audio and has the following shape:
|
||||
|
||||
```json
|
||||
{
|
||||
"buffered": [
|
||||
{
|
||||
"start": 0,
|
||||
"end": 425.952625
|
||||
}
|
||||
],
|
||||
"time": 5.244996,
|
||||
"duration": 425.952625,
|
||||
"isPlaying": false,
|
||||
"muted": false,
|
||||
"volume": 1
|
||||
}
|
||||
```
|
||||
|
||||
`controls` is a list collection of methods that allow you to control the
|
||||
playback of the audio, it has the following interface:
|
||||
|
||||
```ts
|
||||
interface AudioControls {
|
||||
play: () => Promise<void> | void;
|
||||
pause: () => void;
|
||||
mute: () => void;
|
||||
unmute: () => void;
|
||||
volume: (volume: number) => void;
|
||||
seek: (time: number) => void;
|
||||
}
|
||||
```
|
||||
|
||||
`ref` is a React reference to HTML `<audio>` element, you can access the element by
|
||||
`ref.current`, note that it may be `null`.
|
||||
|
||||
And finally, `props` — all props that `<audio>` accepts.
|
||||
@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "react-use",
|
||||
"version": "1.5.0",
|
||||
"version": "1.6.0",
|
||||
"description": "Collection of React Hooks",
|
||||
"main": "lib/index.js",
|
||||
"files": [
|
||||
|
||||
36
src/__stories__/useAudio.story.tsx
Normal file
36
src/__stories__/useAudio.story.tsx
Normal file
@ -0,0 +1,36 @@
|
||||
import * as React from 'react';
|
||||
import {storiesOf} from '@storybook/react';
|
||||
import {useAudio} from '..';
|
||||
|
||||
const src = 'https://www.soundhelix.com/examples/mp3/SoundHelix-Song-2.mp3';
|
||||
|
||||
const Demo = () => {
|
||||
const [audio, state, controls] = useAudio({
|
||||
src,
|
||||
autoPlay: true,
|
||||
});
|
||||
|
||||
return (
|
||||
<div>
|
||||
{audio}
|
||||
<pre>{JSON.stringify(state, null, 2)}</pre>
|
||||
<button onClick={controls.pause}>Pause</button>
|
||||
<button onClick={controls.play}>Play</button>
|
||||
<br/>
|
||||
<button onClick={controls.mute}>Mute</button>
|
||||
<button onClick={controls.unmute}>Un-mute</button>
|
||||
<br/>
|
||||
<button onClick={() => controls.volume(.1)}>Volume: 10%</button>
|
||||
<button onClick={() => controls.volume(.5)}>Volume: 50%</button>
|
||||
<button onClick={() => controls.volume(1)}>Volume: 100%</button>
|
||||
<br/>
|
||||
<button onClick={() => controls.seek(state.time - 5)}>-5 sec</button>
|
||||
<button onClick={() => controls.seek(state.time + 5)}>+5 sec</button>
|
||||
</div>
|
||||
);
|
||||
};
|
||||
|
||||
storiesOf('useAudio', module)
|
||||
.add('Example', () =>
|
||||
<Demo/>
|
||||
)
|
||||
@ -1,4 +1,5 @@
|
||||
import useAsync from './useAsync';
|
||||
import useAudio from './useAudio';
|
||||
import useBattery from './useBattery';
|
||||
import useCounter from './useCounter';
|
||||
import useCss from './useCss';
|
||||
@ -31,6 +32,7 @@ import useWindowSize from './useWindowSize';
|
||||
|
||||
export {
|
||||
useAsync,
|
||||
useAudio,
|
||||
useBattery,
|
||||
useCounter,
|
||||
useCss,
|
||||
|
||||
@ -6,7 +6,8 @@ export const useState: UseState = (React as any).useState;
|
||||
export type UseEffect = (didUpdate: () => ((() => void) | void), params?: any[]) => void;
|
||||
export const useEffect: UseEffect = (React as any).useEffect;
|
||||
|
||||
export type UseRef = <T>(initialValue: T) => {current: T};
|
||||
export interface ReactRef<T> {current: T};
|
||||
export type UseRef = <T>(initialValue: T) => ReactRef<T>;
|
||||
export const useRef: UseRef = (React as any).useRef;
|
||||
|
||||
export type UseCallback = <T extends ((...args: any[]) => any)>(callback: T, args: any[]) => T;
|
||||
|
||||
177
src/useAudio.ts
Normal file
177
src/useAudio.ts
Normal file
@ -0,0 +1,177 @@
|
||||
import * as React from 'react';
|
||||
import {useEffect, useRef, ReactRef} from './react';
|
||||
import useSetState from './useSetState';
|
||||
import parseTimeRanges from './util/parseTimeRanges';
|
||||
|
||||
export interface AudioProps extends React.AudioHTMLAttributes<any> {
|
||||
src: string;
|
||||
}
|
||||
|
||||
export interface AudioState {
|
||||
buffered: any[];
|
||||
duration: number;
|
||||
isPlaying: boolean;
|
||||
muted: boolean;
|
||||
time: number;
|
||||
volume: number;
|
||||
}
|
||||
|
||||
export interface AudioControls {
|
||||
play: () => Promise<void> | void;
|
||||
pause: () => void;
|
||||
mute: () => void;
|
||||
unmute: () => void;
|
||||
volume: (volume: number) => void;
|
||||
seek: (time: number) => void;
|
||||
}
|
||||
|
||||
const useAudio = (props: AudioProps): [React.ReactElement<AudioProps>, AudioState, AudioControls, ReactRef<HTMLAudioElement | null>] => {
|
||||
const [state, setState] = useSetState<AudioState>({
|
||||
buffered: [],
|
||||
time: 0,
|
||||
duration: 0,
|
||||
isPlaying: false,
|
||||
muted: false,
|
||||
volume: 1,
|
||||
});
|
||||
const ref = useRef<HTMLAudioElement | null>(null);
|
||||
|
||||
const wrapEvent = (userEvent, proxyEvent?) => {
|
||||
return (event) => {
|
||||
try {
|
||||
proxyEvent && proxyEvent(event);
|
||||
} finally {
|
||||
userEvent && userEvent(event);
|
||||
}
|
||||
};
|
||||
};
|
||||
|
||||
const onPlay = () => setState({isPlaying: true});
|
||||
const onPause = () => setState({isPlaying: false});
|
||||
const onVolumeChange = (event) => {
|
||||
const el = ref.current;
|
||||
if (!el) return;
|
||||
setState({
|
||||
muted: el.muted,
|
||||
volume: el.volume,
|
||||
});
|
||||
};
|
||||
const onDurationChange = (event) => {
|
||||
const el = ref.current;
|
||||
if (!el) return;
|
||||
const {duration, buffered} = el;
|
||||
setState({
|
||||
duration,
|
||||
buffered: parseTimeRanges(buffered),
|
||||
});
|
||||
};
|
||||
const onTimeUpdate = () => {
|
||||
const el = ref.current;
|
||||
if (!el) return;
|
||||
setState({time: el.currentTime});
|
||||
};
|
||||
const onProgress = (event) => {
|
||||
const el = ref.current;
|
||||
if (!el) return;
|
||||
setState({buffered: parseTimeRanges(el.buffered)});
|
||||
};
|
||||
|
||||
const element = React.createElement('audio', {
|
||||
controls: false,
|
||||
...props,
|
||||
ref,
|
||||
onPlay: wrapEvent(props.onPlay, onPlay),
|
||||
onPause: wrapEvent(props.onPause, onPause),
|
||||
onVolumeChange: wrapEvent(props.onVolumeChange, onVolumeChange),
|
||||
onDurationChange: wrapEvent(props.onDurationChange, onDurationChange),
|
||||
onTimeUpdate: wrapEvent(props.onTimeUpdate, onTimeUpdate),
|
||||
onProgress: wrapEvent(props.onProgress, onProgress),
|
||||
});
|
||||
|
||||
// Some browsers return `Promise` on `.play()` and may throw errors
|
||||
// if one tries to execute another `.play()` or `.pause()` while that
|
||||
// promise is resolving. So we prevent that with this lock.
|
||||
// See: https://bugs.chromium.org/p/chromium/issues/detail?id=593273
|
||||
let lockPlay: boolean = false;
|
||||
|
||||
const controls = {
|
||||
play: () => {
|
||||
const el = ref.current;
|
||||
if (!el) return undefined;
|
||||
|
||||
if (!lockPlay) {
|
||||
const promise = el.play();
|
||||
const isPromise = typeof promise === 'object';
|
||||
|
||||
if (isPromise) {
|
||||
lockPlay = true;
|
||||
const resetLock = () => {
|
||||
lockPlay = false;
|
||||
};
|
||||
promise.then(resetLock, resetLock);
|
||||
}
|
||||
|
||||
return promise;
|
||||
}
|
||||
return undefined;
|
||||
},
|
||||
pause: () => {
|
||||
const el = ref.current;
|
||||
if (el && !lockPlay) {
|
||||
return el.pause();
|
||||
}
|
||||
},
|
||||
seek: (time: number) => {
|
||||
const el = ref.current;
|
||||
if (!el || (state.duration === undefined)) return;
|
||||
time = Math.min(state.duration, Math.max(0, time));
|
||||
el.currentTime = time;
|
||||
},
|
||||
volume: (volume: number) => {
|
||||
const el = ref.current;
|
||||
if (!el) return;
|
||||
volume = Math.min(1, Math.max(0, volume));
|
||||
el.volume = volume;
|
||||
setState({volume});
|
||||
},
|
||||
mute: () => {
|
||||
const el = ref.current;
|
||||
if (!el) return;
|
||||
el.muted = true;
|
||||
},
|
||||
unmute: () => {
|
||||
const el = ref.current;
|
||||
if (!el) return;
|
||||
el.muted = false;
|
||||
},
|
||||
};
|
||||
|
||||
useEffect(() => {
|
||||
const el = ref.current!;
|
||||
|
||||
if (!el) {
|
||||
if (process.env.NODE_ENV !== 'production') {
|
||||
console.error(
|
||||
'useAudio() ref to <audio> element is empty at mount. ' +
|
||||
'It seem you have not rendered the audio element, which is ' +
|
||||
'returns as the first argument const [audio] = useAudio(...).'
|
||||
);
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
// Start media, if autoPlay requested.
|
||||
if (props.autoPlay && el.paused) {
|
||||
controls.play();
|
||||
}
|
||||
|
||||
setState({
|
||||
volume: el.volume,
|
||||
muted: el.muted,
|
||||
});
|
||||
}, [props.src]);
|
||||
|
||||
return [element, state, controls, ref];
|
||||
};
|
||||
|
||||
export default useAudio;
|
||||
@ -2,10 +2,10 @@ import {useState} from './react';
|
||||
|
||||
const useSetState = <T extends object>(initialState: T = {} as T): [T, (patch: Partial<T>) => void]=> {
|
||||
const [state, set] = useState<T>(initialState);
|
||||
const setState = (patch) => set({
|
||||
...(state as object),
|
||||
...patch,
|
||||
});
|
||||
const setState = (patch) => {
|
||||
Object.assign(state, patch);
|
||||
set(state);
|
||||
};
|
||||
|
||||
return [state, setState];
|
||||
};
|
||||
|
||||
14
src/util/parseTimeRanges.ts
Normal file
14
src/util/parseTimeRanges.ts
Normal file
@ -0,0 +1,14 @@
|
||||
const parseTimeRanges = (ranges) => {
|
||||
const result: ({start: number, end: number})[] = [];
|
||||
|
||||
for (let i = 0; i < ranges.length; i++) {
|
||||
result.push({
|
||||
start: ranges.start(i),
|
||||
end: ranges.end(i)
|
||||
});
|
||||
}
|
||||
|
||||
return result;
|
||||
};
|
||||
|
||||
export default parseTimeRanges;
|
||||
Loading…
x
Reference in New Issue
Block a user