React Integration
Learn how to integrate the tm-videoface-plus
library into your React project by building a reusable component for OCR functionality, step by step.
Basic Implementation
import React, { useEffect, useRef } from 'react';
import { VideoFace, CameraHelper, Envs, ChallengeMoves, ChallengeType } from 'tm-videoface-plus';
const LivenessDetectionComponent: React.FC = () => {
const videoRef = useRef<HTMLVideoElement>(null);
let videoFace: VideoFace | null = null;
let mediaStream: MediaStream | null = null;
useEffect(() => {
const initializeVideoFace = async () => {
const config = [
{ type: ChallengeType.CH_BLINK, enabled: true },
{ type: ChallengeType.CH_SMILE, enabled: true },
{ type: ChallengeType.CH_HEAD_POSE, enabled: true, moves: [ChallengeMoves.MV_HEAD_TURN_LEFT, ChallengeMoves.MV_HEAD_TURN_RIGHT] }
];
videoFace = new VideoFace('YOUR_KEY', Envs.DEV, config);
try {
// Open the camera
mediaStream = await videoFace.openCamera();
if (videoRef.current) {
videoRef.current.srcObject = stream;
}
pauseCamera();
// Subscribe to events
videoFace.events(EventType.CONNECTED).subscribe(() => {
console.log('Connected');
});
videoFace.events(EventType.MOVE).subscribe((move) => {
// Handle move events e.g. ChallengeMoves.MV_HEAD_TURN_LEFT
console.log('Move:', move);
});
videoFace.events(EventType.RESULT).subscribe((result) => {
console.log('Result:', result);
stopCamera();
});
videoFace.events(EventType.ERROR).subscribe((error) => {
console.error('Error:', error);
});
// Start the stream process
await videoFace.startStream();
} catch (error) {
console.error('Error initializing VideoFace:', error);
}
};
initializeVideoFace();
return () => {
if (videoFace) {
videoFace.close();
}
};
}, []);
const pauseCamera = () => {
if (mediaStream) {
mediaStream.getTracks().forEach((track) => (track.enabled = false));
setCameraPaused(true);
}
};
const resumeCamera = () => {
if (mediaStream) {
mediaStream.getTracks().forEach((track) => (track.enabled = true));
setCameraPaused(false);
}
};
const stopCamera = () => {
if (mediaStream) {
mediaStream.getTracks().forEach((track) => track.stop());
mediaStream = null;
}
};
return (
<div>
<video ref={videoRef} autoPlay playsInline style={{ width: '100%' }} />
<div>
<button onClick={resumeCamera} disabled={!cameraPaused}>
Start Camera
</button>
</div>
</div>
);
};
export default LivenessDetectionComponent;
Using the Component
Integrate the LivenessDetectionComponent into your React application:
import React from 'react';
import ReactDOM from 'react-dom';
import LivenessDetectionComponent from './LivenessDetectionComponent';
const App: React.FC = () => (
<div>
<h1>OCR Integration Example</h1>
<LivenessDetectionComponent />
</div>
);
ReactDOM.render(<App />, document.getElementById('root'));