下面是我从上一个屏幕发送的播放语音剪辑的代码
import * as Progress from 'react-native-progress';
import { FontFamily, Fonts } from '../../common/GConstants';
import { Image, SafeAreaView, StyleSheet, Text, TouchableOpacity, View } from 'react-native';
import React, { Component } from 'react';
import { heightPercentageToDP as hp, widthPercentageToDP as wp } from 'react-native-responsive-screen';
import AudioRecorderPlayer from 'react-native-audio-recorder-player';
import Colors from '../../common/GColors';
import GColors from '../../common/GColors';
import KeepAwake from 'react-native-keep-awake';
import WaveForm from 'react-native-audiowaveform';
import images from '../../assets/images/index';
import { secondsToTime } from '../../common/GFunction';
export default class ConfirmRecording extends Component {
constructor() {
super();
this.state = {
recordTime: '',
duration: '',
isPlaying: false,
totalDuration: '',
audioPath: '',
currentPositionSec: '',
currentDurationSec: '',
};
this.audioRecorderPlayer = new AudioRecorderPlayer();
}
showProgress = () => {
if (this.state.currentPositionSec / this.state.totalDuration > 1) {
return Math.round(
this.state.currentPositionSec / this.state.totalDuration,
);
} else {
return Math.fround(
this.state.currentPositionSec / this.state.totalDuration,
);
}
};
onStartPlay = async () => {
console.log('START and is playing', this.state.audioPath + "--------" + this.state.isPlaying);
if (this.state.isPlaying) {
this.setState({ isPlaying: false });
this.audioRecorderPlayer.stopPlayer();
this.audioRecorderPlayer.removePlayBackListener();
} else {
const msg = await this.audioRecorderPlayer.startPlayer(
this.state.audioPath,
// 'http://podcasts.cnn.net/cnn/services/podcasting/specials/audio/2007/05/milesobrien.mp3',
);
console.log('Play MSG', msg);
this.audioRecorderPlayer.addPlayBackListener((e) => {
this.setState({
isPlaying: true,
currentPositionSec: Math.round(
Math.round(Math.round(e.current_position / 10) / 100),
),
currentDurationSec: Math.round(Math.round(e.duration / 10) / 100),
playTime: e.current_position,
duration: e.duration,
});
if (e.duration == e.current_position) {
this.setState({ isPlaying: false });
console.log('Stopped');
this.audioRecorderPlayer.stopPlayer();
this.audioRecorderPlayer.removePlayBackListener();
}
return;
});
}
};
componentDidMount = () => {
var audioPath = this.props.navigation.getParam('audioPath');
var duration = this.props.navigation.getParam('duration');
console.warn("Data from prevciouyas screen", audioPath + "--------" + duration)
this.setState({
audioPath: audioPath,
duration: duration
});
}
componentWillUnmount = () => {
this.audioRecorderPlayer.stopPlayer();
this.audioRecorderPlayer.removePlayBackListener();
this.setState({
audioPath: '',
isPlaying: false
});
}
render() {
return (
<SafeAreaView style={style.mainContainer}>
<View style={style.audioView}>
<Text style={style.audioViewText}>Confirm Recording</Text>
<View style={{ marginTop: hp(2) }}>
<View style={style.secondWaveView}>
<WaveForm
style={style.WaveForm}
source={{ uri: this.state.audioPath }} // not work
stop={this.state.isPlaying}
play={this.state.isPlaying}
// autoPlay={true}
waveFormStyle={{ waveColor: Colors.gray, scrubColor: Colors.darkBlue }}
/>
<Text> {secondsToTime(this.state.currentPositionSec)
.m.toString()
.padStart(2, 0) +
':' +
secondsToTime(this.state.currentPositionSec)
.s.toString()
.padStart(2, 0)}</Text>
</View>
<View style={style.secondAudioView}>
<TouchableOpacity
onPress={(event) => {
this.audioRecorderPlayer.stopPlayer();
this.audioRecorderPlayer.removePlayBackListener();
this.setState({ audioPath: '', isPlaying: false }, () => {
// add Imerssion
// this.props.navigation.state.params.a(true),
this.props.navigation.navigate('ImpressionPro')
});
}
}
activeOpacity={.9}>
<Image source={images.sendIcon} />
</TouchableOpacity>
<View style={{ flex: 1 }} />
<TouchableOpacity style={style.icon}
onPress={() => {
this.audioRecorderPlayer.stopPlayer();
this.audioRecorderPlayer.removePlayBackListener();
this.setState({ audioPath: '', isPlaying: false }, () => {
this.props.navigation.pop(2)
});
}}
activeOpacity={.9}>
<Image source={images.deleteCancelBtn} />
</TouchableOpacity>
<TouchableOpacity
onPress={() => {
this.onStartPlay()
.then(() => {
console.log('Playing');
})
.catch((err) => {
console.log('PErr', err);
});
}}
style={style.icon} activeOpacity={.9}>
<Image source={images.playBtn} />
</TouchableOpacity>
</View>
</View>
</View>
<KeepAwake />
</SafeAreaView>
)
}
}
const style = StyleSheet.create({
mainContainer:
{
flex: 1,
backgroundColor: Colors.darkBlue,
justifyContent: 'center',
alignItems: 'center'
},
secondWaveView:
{
marginTop: hp(2),
marginHorizontal: wp(5.5),
flexDirection: 'row',
justifyContent: 'space-between',
},
secondAudioView:
{
flexDirection: 'row',
marginTop: hp(2),
marginStart: wp(5)
},
WaveForm:
{
height: 25,
flex: 1,
},
icon:
{
marginEnd: wp(5)
},
audioView:
{
backgroundColor: Colors.white,
height: "25%",
width: "88%",
alignSelf: 'center',
borderRadius: hp(2),
},
audioViewText:
{
textAlign: 'center',
marginTop: hp(2),
fontSize: Fonts.fontsize20,
marginHorizontal: wp(6),
fontFamily: FontFamily.medium,
color: Colors.textCoffeeColor
},
})
问题是在回声的波形中...两者都在同一时间播放,所以这就是为什么产生问题的原因...
导入*作为进度从反应本地进度;导入{字体家族,字体}从...常量; 从“react native”导入{图像、SafeAreaView、样式表、文本、TouchableOpacity、视图}; 从“React”导入React,{Component}; 从“react native responsive screen”导入{heightPercentageToDP作为hp,widthPercenta
ap.stopBackgroundAudio(CALLBACK) 停止播放音乐。 代码示例 <script src="https://gw.alipayobjects.com/as/g/h5-lib/alipayjsapi/3.1.1/alipayjsapi.inc.min.js"></script> <style>.output{ display:block; max-width: 100%;
如何在曲目播放期间添加CSS类“playSound”?
用ios自带的AVAudio实现的音乐播放器。可以播放存放在电脑里面的音乐文件,可以调整声音音量。播放的时候界面会有下雪的效果。可以作为练习动画和音频播放的例子。 [Code4App.com]
我正在开发一个音频播放器,它可以在后台播放音频文件。我的问题是,当录像机或视频播放器启动时,我需要暂停音频播放器。 有什么方法可以处理这个任务吗?例如,我有来处理这些调用。当我接到呼叫或wnat呼叫时,我们可以使用呼叫状态暂停播放器。我想为录像机或视频播放器以及相同的场景。当视频/录制开始时,我需要暂停音频播放器。
如果我的应用程序音频开始播放,我必须停止外部媒体播放器。就youtube而言,它会自动停止,但谷歌的媒体播放器不会停止。 我使用了以下代码。 但它也停止了我的其他音频。任何帮助都将不胜感激。