-
Notifications
You must be signed in to change notification settings - Fork 0
/
App.tsx
54 lines (47 loc) · 1.61 KB
/
App.tsx
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
import React, { useState, useEffect } from 'react';
import { Button, Text, View, NativeModules } from 'react-native';
import AudioRecord from 'react-native-audio-record';
import { check, PERMISSIONS, request } from 'react-native-permissions';
const App = () => {
const [result, setResult] = useState('');
const [startTime, setStartTime] = useState(0);
const { InferenceModule } = NativeModules;
useEffect(() => {
const options = {
sampleRate: 16000,
channels: 1,
bitsPerSample: 16,
audioSource: 6,
wavFile: 'test.wav'
};
AudioRecord.init(options);
}, []);
const startRecording = async () => {
const permission = await check(PERMISSIONS.IOS.MICROPHONE);
if (permission !== 'granted') {
await request(PERMISSIONS.IOS.MICROPHONE);
}
setStartTime(new Date().getTime()); // Record the start time
AudioRecord.start();
};
const stopRecording = async () => {
const audioFilePath = await AudioRecord.stop();
const endTime = new Date().getTime();
const durationInSeconds = (endTime - startTime) / 1000;
InferenceModule.recognizeFromFilePath(audioFilePath, durationInSeconds, (error: any, inferenceResult: React.SetStateAction<string>) => {
if (error) {
console.error(error);
return;
}
setResult(inferenceResult);
});
};
return (
<View style={{ flex: 1, justifyContent: 'center', alignItems: 'center' }}>
<Button title="Start Recording" onPress={startRecording} />
<Button title="Stop Recording" onPress={stopRecording} />
<Text>{result}</Text>
</View>
);
};
export default App;