Merge pull request #33 from streamich/video-2

Video 2
This commit is contained in:
Va Da 2018-10-29 21:24:04 +01:00 committed by GitHub
commit bb2c91396f
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
8 changed files with 342 additions and 176 deletions

View File

@ -49,6 +49,7 @@
- [**UI**](./docs/UI.md)
- [`useAudio`](./docs/useAudio.md) — plays audio and exposes its controls. [![][img-demo]](https://codesandbox.io/s/5v7q47knwl)
- [`useSpeech`](./docs/useSpeech.md) — synthesizes speech from a text string. [![][img-demo]](https://codesandbox.io/s/n090mqz69m)
- [`useVideo`](./docs/useVideo.md) — plays video, tracks its state, and exposes playback controls.
<br/>
<br/>
- [**Animations**](./docs/Animations.md)

View File

@ -1,6 +1,6 @@
# `useAudio`
Creates `<audio>` element, tracks its state and exposes playback conrols.
Creates `<audio>` element, tracks its state and exposes playback controls.
## Usage
@ -38,8 +38,9 @@ const Demo = () => {
## Reference
```ts
```jsx
const [audio, state, controls, ref] = useAudio(props);
const [audio, state, controls, ref] = useAudio(<audio {...props}/>);
```
`audio` is React's `<audio>` element that you have to insert somewhere in your

87
docs/useVideo.md Normal file
View File

@ -0,0 +1,87 @@
# `useVideo`
Creates `<video>` element, tracks its state and exposes playback controls.
## Usage
```jsx
import {useVideo} from 'react-use';
const Demo = () => {
const [video, state, controls, ref] = useVideo(
<video src="http://clips.vorwaerts-gmbh.de/big_buck_bunny.mp4" autoPlay />
);
return (
<div>
{video}
<pre>{JSON.stringify(state, null, 2)}</pre>
<button onClick={controls.pause}>Pause</button>
<button onClick={controls.play}>Play</button>
<br/>
<button onClick={controls.mute}>Mute</button>
<button onClick={controls.unmute}>Un-mute</button>
<br/>
<button onClick={() => controls.volume(.1)}>Volume: 10%</button>
<button onClick={() => controls.volume(.5)}>Volume: 50%</button>
<button onClick={() => controls.volume(1)}>Volume: 100%</button>
<br/>
<button onClick={() => controls.seek(state.time - 5)}>-5 sec</button>
<button onClick={() => controls.seek(state.time + 5)}>+5 sec</button>
</div>
);
};
```
## Reference
```jsx
const [video, state, controls, ref] = useVideo(props);
const [video, state, controls, ref] = useVideo(<video {...props}/>);
```
`video` is React's `<video>` element that you have to insert somewhere in your
render tree, for example:
```jsx
<div>{video}</div>
```
`state` tracks the state of the video and has the following shape:
```json
{
"buffered": [
{
"start": 0,
"end": 425.952625
}
],
"time": 5.244996,
"duration": 425.952625,
"isPlaying": false,
"muted": false,
"volume": 1
}
```
`controls` is a list collection of methods that allow you to control the
playback of the video, it has the following interface:
```ts
interface AudioControls {
play: () => Promise<void> | void;
pause: () => void;
mute: () => void;
unmute: () => void;
volume: (volume: number) => void;
seek: (time: number) => void;
}
```
`ref` is a React reference to HTML `<video>` element, you can access the element by
`ref.current`, note that it may be `null`.
And finally, `props` &mdash; all props that `<video>` accepts.

View File

@ -0,0 +1,35 @@
import * as React from 'react';
import {storiesOf} from '@storybook/react';
import {useVideo} from '..';
import ShowDocs from '../util/ShowDocs';
const Demo = () => {
const [video, state, controls, ref] = useVideo(
<video src="http://clips.vorwaerts-gmbh.de/big_buck_bunny.mp4" autoPlay />
);
return (
<div>
{video}
<pre>{JSON.stringify(state, null, 2)}</pre>
<button onClick={controls.pause}>Pause</button>
<button onClick={controls.play}>Play</button>
<br/>
<button onClick={controls.mute}>Mute</button>
<button onClick={controls.unmute}>Un-mute</button>
<br/>
<button onClick={() => controls.volume(.1)}>Volume: 10%</button>
<button onClick={() => controls.volume(.5)}>Volume: 50%</button>
<button onClick={() => controls.volume(1)}>Volume: 100%</button>
<br/>
<button onClick={() => controls.seek(state.time - 5)}>-5 sec</button>
<button onClick={() => controls.seek(state.time + 5)}>+5 sec</button>
</div>
);
};
storiesOf('useVideo', module)
.add('Docs', () => <ShowDocs md={require('../../docs/useVideo.md')} />)
.add('Demo', () =>
<Demo/>
)

View File

@ -36,6 +36,7 @@ import useToggle from './useToggle';
import useTween from './useTween';
import useUnmount from './useUnmount';
import useUpdate from './useUpdate';
import useVideo from './useVideo';
import useWindowSize from './useWindowSize';
export {
@ -77,5 +78,6 @@ export {
useTween,
useUnmount,
useUpdate,
useVideo,
useWindowSize,
};

View File

@ -1,177 +1,5 @@
import * as React from 'react';
import {useEffect, useRef, ReactRef} from './react';
import useSetState from './useSetState';
import parseTimeRanges from './util/parseTimeRanges';
import createHTMLMediaHook from './util/createHTMLMediaHook';
export interface AudioProps extends React.AudioHTMLAttributes<any> {
src: string;
}
export interface AudioState {
buffered: any[];
duration: number;
isPlaying: boolean;
muted: boolean;
time: number;
volume: number;
}
export interface AudioControls {
play: () => Promise<void> | void;
pause: () => void;
mute: () => void;
unmute: () => void;
volume: (volume: number) => void;
seek: (time: number) => void;
}
const useAudio = (props: AudioProps): [React.ReactElement<AudioProps>, AudioState, AudioControls, ReactRef<HTMLAudioElement | null>] => {
const [state, setState] = useSetState<AudioState>({
buffered: [],
time: 0,
duration: 0,
isPlaying: false,
muted: false,
volume: 1,
});
const ref = useRef<HTMLAudioElement | null>(null);
const wrapEvent = (userEvent, proxyEvent?) => {
return (event) => {
try {
proxyEvent && proxyEvent(event);
} finally {
userEvent && userEvent(event);
}
};
};
const onPlay = () => setState({isPlaying: true});
const onPause = () => setState({isPlaying: false});
const onVolumeChange = () => {
const el = ref.current;
if (!el) return;
setState({
muted: el.muted,
volume: el.volume,
});
};
const onDurationChange = () => {
const el = ref.current;
if (!el) return;
const {duration, buffered} = el;
setState({
duration,
buffered: parseTimeRanges(buffered),
});
};
const onTimeUpdate = () => {
const el = ref.current;
if (!el) return;
setState({time: el.currentTime});
};
const onProgress = () => {
const el = ref.current;
if (!el) return;
setState({buffered: parseTimeRanges(el.buffered)});
};
const element = React.createElement('audio', {
controls: false,
...props,
ref,
onPlay: wrapEvent(props.onPlay, onPlay),
onPause: wrapEvent(props.onPause, onPause),
onVolumeChange: wrapEvent(props.onVolumeChange, onVolumeChange),
onDurationChange: wrapEvent(props.onDurationChange, onDurationChange),
onTimeUpdate: wrapEvent(props.onTimeUpdate, onTimeUpdate),
onProgress: wrapEvent(props.onProgress, onProgress),
});
// Some browsers return `Promise` on `.play()` and may throw errors
// if one tries to execute another `.play()` or `.pause()` while that
// promise is resolving. So we prevent that with this lock.
// See: https://bugs.chromium.org/p/chromium/issues/detail?id=593273
let lockPlay: boolean = false;
const controls = {
play: () => {
const el = ref.current;
if (!el) return undefined;
if (!lockPlay) {
const promise = el.play();
const isPromise = typeof promise === 'object';
if (isPromise) {
lockPlay = true;
const resetLock = () => {
lockPlay = false;
};
promise.then(resetLock, resetLock);
}
return promise;
}
return undefined;
},
pause: () => {
const el = ref.current;
if (el && !lockPlay) {
return el.pause();
}
},
seek: (time: number) => {
const el = ref.current;
if (!el || (state.duration === undefined)) return;
time = Math.min(state.duration, Math.max(0, time));
el.currentTime = time;
},
volume: (volume: number) => {
const el = ref.current;
if (!el) return;
volume = Math.min(1, Math.max(0, volume));
el.volume = volume;
setState({volume});
},
mute: () => {
const el = ref.current;
if (!el) return;
el.muted = true;
},
unmute: () => {
const el = ref.current;
if (!el) return;
el.muted = false;
},
};
useEffect(() => {
const el = ref.current!;
if (!el) {
if (process.env.NODE_ENV !== 'production') {
console.error(
'useAudio() ref to <audio> element is empty at mount. ' +
'It seem you have not rendered the audio element, which is ' +
'returns as the first argument const [audio] = useAudio(...).'
);
}
return;
}
// Start media, if autoPlay requested.
if (props.autoPlay && el.paused) {
controls.play();
}
setState({
volume: el.volume,
muted: el.muted,
});
}, [props.src]);
return [element, state, controls, ref];
};
const useAudio = createHTMLMediaHook('audio');
export default useAudio;

5
src/useVideo.ts Normal file
View File

@ -0,0 +1,5 @@
import createHTMLMediaHook from './util/createHTMLMediaHook';
const useVideo = createHTMLMediaHook('video');
export default useVideo;

View File

@ -0,0 +1,207 @@
import * as React from 'react';
import {useEffect, useRef, ReactRef} from '../react';
import useSetState from '../useSetState';
import parseTimeRanges from './parseTimeRanges';
export interface HTMLMediaProps extends React.AudioHTMLAttributes<any>, React.VideoHTMLAttributes<any> {
src: string;
}
export interface HTMLMediaState {
buffered: any[];
duration: number;
isPlaying: boolean;
muted: boolean;
time: number;
volume: number;
}
export interface HTMLMediaControls {
play: () => Promise<void> | void;
pause: () => void;
mute: () => void;
unmute: () => void;
volume: (volume: number) => void;
seek: (time: number) => void;
}
const createHTMLMediaHook = (tag: 'audio' | 'video') => {
const hook = (elOrProps: HTMLMediaProps | React.ReactElement<HTMLMediaProps>): [React.ReactElement<HTMLMediaProps>, HTMLMediaState, HTMLMediaControls, ReactRef<HTMLAudioElement | null>] => {
let element: React.ReactElement<any> | undefined;
let props: HTMLMediaProps;
if (React.isValidElement(elOrProps)) {
element = elOrProps;
props = element.props;
} else {
props = elOrProps as HTMLMediaProps;
}
const [state, setState] = useSetState<HTMLMediaState>({
buffered: [],
time: 0,
duration: 0,
isPlaying: false,
muted: false,
volume: 1,
});
const ref = useRef<HTMLAudioElement | null>(null);
const wrapEvent = (userEvent, proxyEvent?) => {
return (event) => {
try {
proxyEvent && proxyEvent(event);
} finally {
userEvent && userEvent(event);
}
};
};
const onPlay = () => setState({isPlaying: true});
const onPause = () => setState({isPlaying: false});
const onVolumeChange = () => {
const el = ref.current;
if (!el) return;
setState({
muted: el.muted,
volume: el.volume,
});
};
const onDurationChange = () => {
const el = ref.current;
if (!el) return;
const {duration, buffered} = el;
setState({
duration,
buffered: parseTimeRanges(buffered),
});
};
const onTimeUpdate = () => {
const el = ref.current;
if (!el) return;
setState({time: el.currentTime});
};
const onProgress = () => {
const el = ref.current;
if (!el) return;
setState({buffered: parseTimeRanges(el.buffered)});
};
if (element) {
element = React.cloneElement(element, {
controls: false,
...props,
ref,
onPlay: wrapEvent(props.onPlay, onPlay),
onPause: wrapEvent(props.onPause, onPause),
onVolumeChange: wrapEvent(props.onVolumeChange, onVolumeChange),
onDurationChange: wrapEvent(props.onDurationChange, onDurationChange),
onTimeUpdate: wrapEvent(props.onTimeUpdate, onTimeUpdate),
onProgress: wrapEvent(props.onProgress, onProgress),
});
} else {
element = React.createElement(tag, {
controls: false,
...props,
ref,
onPlay: wrapEvent(props.onPlay, onPlay),
onPause: wrapEvent(props.onPause, onPause),
onVolumeChange: wrapEvent(props.onVolumeChange, onVolumeChange),
onDurationChange: wrapEvent(props.onDurationChange, onDurationChange),
onTimeUpdate: wrapEvent(props.onTimeUpdate, onTimeUpdate),
onProgress: wrapEvent(props.onProgress, onProgress),
} as any); // TODO: fix this typing.
}
// Some browsers return `Promise` on `.play()` and may throw errors
// if one tries to execute another `.play()` or `.pause()` while that
// promise is resolving. So we prevent that with this lock.
// See: https://bugs.chromium.org/p/chromium/issues/detail?id=593273
let lockPlay: boolean = false;
const controls = {
play: () => {
const el = ref.current;
if (!el) return undefined;
if (!lockPlay) {
const promise = el.play();
const isPromise = typeof promise === 'object';
if (isPromise) {
lockPlay = true;
const resetLock = () => {
lockPlay = false;
};
promise.then(resetLock, resetLock);
}
return promise;
}
return undefined;
},
pause: () => {
const el = ref.current;
if (el && !lockPlay) {
return el.pause();
}
},
seek: (time: number) => {
const el = ref.current;
if (!el || (state.duration === undefined)) return;
time = Math.min(state.duration, Math.max(0, time));
el.currentTime = time;
},
volume: (volume: number) => {
const el = ref.current;
if (!el) return;
volume = Math.min(1, Math.max(0, volume));
el.volume = volume;
setState({volume});
},
mute: () => {
const el = ref.current;
if (!el) return;
el.muted = true;
},
unmute: () => {
const el = ref.current;
if (!el) return;
el.muted = false;
},
};
useEffect(() => {
const el = ref.current!;
if (!el) {
if (process.env.NODE_ENV !== 'production') {
console.error(
'useAudio() ref to <audio> element is empty at mount. ' +
'It seem you have not rendered the audio element, which is ' +
'returns as the first argument const [audio] = useAudio(...).'
);
}
return;
}
// Start media, if autoPlay requested.
if (props.autoPlay && el.paused) {
controls.play();
}
setState({
volume: el.volume,
muted: el.muted,
});
}, [props.src]);
return [element, state, controls, ref];
};
return hook;
};
export default createHTMLMediaHook;