我正在编写一个Android程序,将麦克风直接传输到手机扬声器。代码正常,但UI挂起,应用挂起。但即使应用程序挂起,音频传输仍在工作。错误在哪里。。?
RecordBufferSize=AudioRecord.getMinBufferSize(sampleRateInHz,AudioFormat.CHANNEL_IN_MONO,AudioFormat.ENCODING_PCM_16BIT);
TrackBufferSize= AudioTrack.getMinBufferSize(sampleRateInHz,AudioFormat.CHANNEL_OUT_MONO,AudioFormat.ENCODING_PCM_16BIT);
am = (AudioManager) getSystemService(Context.AUDIO_SERVICE);
Record record = new Record();
record.run();
}
public class Record extends Thread
{
final short[] buffer = new short[RecordBufferSize];
short[] readBuffer = new short[TrackBufferSize];
public void run() {
isRecording = true;
android.os.Process.setThreadPriority(android.os.Process.THREAD_PRIORITY_URGENT_AUDIO);
AudioRecord arec = new AudioRecord(MediaRecorder.AudioSource.MIC,sampleRateInHz,AudioFormat.CHANNEL_IN_MONO, AudioFormat.ENCODING_PCM_16BIT,RecordBufferSize);
AudioTrack atrack = new AudioTrack(AudioManager.STREAM_MUSIC,sampleRateInHz,AudioFormat.CHANNEL_OUT_MONO, AudioFormat.ENCODING_PCM_16BIT, TrackBufferSize, AudioTrack.MODE_STREAM);
//am.setRouting(AudioManager.MODE_NORMAL, AudioManager.ROUTE_EARPIECE, AudioManager.ROUTE_ALL);
atrack.setPlaybackRate(sampleRateInHz);
byte[] buffer = new byte[RecordBufferSize];
arec.startRecording();
atrack.play();
while(isRecording) {
AudioLenght=arec.read(buffer, 0, RecordBufferSize);
atrack.write(buffer, 0, AudioLenght);
}
arec.stop();
atrack.stop();
isRecording = false;
}
}
这是我的密码。
我试了一下,得到了结果。试试这个
我使用的Java代码:-
package com.example.root.akuvo;
import android.content.Context;
import android.content.DialogInterface;
import android.content.Intent;
import android.graphics.PorterDuff;
import android.media.AudioAttributes;
import android.media.AudioDeviceInfo;
import android.media.AudioFormat;
import android.media.AudioManager;
import android.media.AudioRecord;
import android.media.AudioTrack;
import android.media.MediaRecorder;
import android.media.audiofx.AcousticEchoCanceler;
import android.media.audiofx.AutomaticGainControl;
import android.media.audiofx.BassBoost;
import android.media.audiofx.NoiseSuppressor;
import android.os.Build;
import android.support.annotation.RequiresApi;
import android.support.v7.app.AlertDialog;
import android.support.v7.app.AppCompatActivity;
import android.os.Bundle;
import android.util.Log;
import android.view.LayoutInflater;
import android.view.View;
import android.widget.Button;
public class MicToSpeakerActivity extends AppCompatActivity {
//Audio
private Button mOn;
private boolean isOn;
private boolean isRecording;
private AudioRecord record;
private AudioTrack player;
private AudioManager manager;
private int recordState, playerState;
private int minBuffer;
//Audio Settings
private final int source = MediaRecorder.AudioSource.CAMCORDER;
private final int channel_in = AudioFormat.CHANNEL_IN_MONO;
private final int channel_out = AudioFormat.CHANNEL_OUT_MONO;
private final int format = AudioFormat.ENCODING_PCM_16BIT;
private final static int REQUEST_ENABLE_BT = 1;
private boolean IS_HEADPHONE_AVAILBLE=false;
@RequiresApi(api = Build.VERSION_CODES.M)
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_mic_to_speaker);
//Reduce latancy
setVolumeControlStream(AudioManager.MODE_IN_COMMUNICATION);
mOn = (Button) findViewById(R.id.button);
isOn = false;
isRecording = false;
manager = (AudioManager) this.getSystemService(Context.AUDIO_SERVICE);
manager.setMode(AudioManager.MODE_IN_COMMUNICATION);
//Check for headset availability
AudioDeviceInfo[] audioDevices = manager.getDevices(AudioManager.GET_DEVICES_ALL);
for(AudioDeviceInfo deviceInfo : audioDevices) {
if (deviceInfo.getType() == AudioDeviceInfo.TYPE_WIRED_HEADPHONES || deviceInfo.getType() == AudioDeviceInfo.TYPE_WIRED_HEADSET || deviceInfo.getType() == AudioDeviceInfo.TYPE_USB_HEADSET) {
IS_HEADPHONE_AVAILBLE = true;
}
}
if (!IS_HEADPHONE_AVAILBLE){
// get delete_audio_dialog.xml view
LayoutInflater layoutInflater = LayoutInflater.from(MicToSpeakerActivity.this);
View promptView = layoutInflater.inflate(R.layout.insert_headphone_dialog, null);
AlertDialog.Builder alertDialogBuilder = new AlertDialog.Builder(MicToSpeakerActivity.this);
alertDialogBuilder.setView(promptView);
// setup a dialog window
alertDialogBuilder.setCancelable(false)
.setPositiveButton("Try Again", new DialogInterface.OnClickListener() {
public void onClick(DialogInterface dialog, int id) {
startActivity(new Intent(getIntent()));
}
})
.setNegativeButton("Cancel",
new DialogInterface.OnClickListener() {
public void onClick(DialogInterface dialog, int id) {
startActivity(new Intent(MicToSpeakerActivity.this,MainActivity.class));
dialog.cancel();
}
});
// create an alert dialog
AlertDialog alert = alertDialogBuilder.create();
alert.show();
}
initAudio();
mOn.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View view) {
mOn.getBackground().setColorFilter(getResources().getColor(!isOn ? R.color.colorOn : R.color.colorOff), PorterDuff.Mode.SRC_ATOP);
isOn = !isOn;
if(isOn) {
(new Thread() {
@Override
public void run()
{
startAudio();
}
}).start();
} else {
endAudio();
}
}
});
}
public void initAudio() {
//Tests all sample rates before selecting one that works
int sample_rate = getSampleRate();
minBuffer = AudioRecord.getMinBufferSize(sample_rate, channel_in, format);
record = new AudioRecord(source, sample_rate, channel_in, format, minBuffer);
recordState = record.getState();
int id = record.getAudioSessionId();
Log.d("Record", "ID: " + id);
playerState = 0;
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP) {
player = new AudioTrack(
new AudioAttributes.Builder().setUsage(AudioAttributes.USAGE_MEDIA).setContentType(AudioAttributes.CONTENT_TYPE_MUSIC).build(),
new AudioFormat.Builder().setEncoding(format).setSampleRate(sample_rate).setChannelMask(channel_out).build(),
minBuffer,
AudioTrack.MODE_STREAM,
AudioManager.AUDIO_SESSION_ID_GENERATE);
playerState = player.getState();
// Formatting Audio
if(AcousticEchoCanceler.isAvailable()) {
AcousticEchoCanceler echo = AcousticEchoCanceler.create(id);
echo.setEnabled(true);
Log.d("Echo", "Off");
}
if(NoiseSuppressor.isAvailable()) {
NoiseSuppressor noise = NoiseSuppressor.create(id);
noise.setEnabled(true);
Log.d("Noise", "Off");
}
if(AutomaticGainControl.isAvailable()) {
AutomaticGainControl gain = AutomaticGainControl.create(id);
gain.setEnabled(false);
Log.d("Gain", "Off");
}
BassBoost base = new BassBoost(1, player.getAudioSessionId());
base.setStrength((short) 1000);
}
}
public void startAudio() {
int read = 0, write = 0;
if(recordState == AudioRecord.STATE_INITIALIZED && playerState == AudioTrack.STATE_INITIALIZED) {
record.startRecording();
player.play();
isRecording = true;
Log.d("Record", "Recording...");
}
while(isRecording) {
short[] audioData = new short[minBuffer];
if(record != null)
read = record.read(audioData, 0, minBuffer);
else
break;
Log.d("Record", "Read: " + read);
if(player != null)
write = player.write(audioData, 0, read);
else
break;
Log.d("Record", "Write: " + write);
}
}
public void endAudio() {
if(record != null) {
if(record.getRecordingState() == AudioRecord.RECORDSTATE_RECORDING)
record.stop();
isRecording = false;
Log.d("Record", "Stopping...");
}
if(player != null) {
if(player.getPlayState() == AudioTrack.PLAYSTATE_PLAYING)
player.stop();
isRecording = false;
Log.d("Player", "Stopping...");
}
}
public int getSampleRate() {
//Find a sample rate that works with the device
for (int rate : new int[] {8000, 11025, 16000, 22050, 44100, 48000}) {
int buffer = AudioRecord.getMinBufferSize(rate, channel_in, format);
if (buffer > 0)
return rate;
}
return -1;
}
}
我使用的XML代码:
<?xml version="1.0" encoding="utf-8"?>
<android.support.constraint.ConstraintLayout xmlns:android="http://schemas.android.com/apk/res/android"
xmlns:app="http://schemas.android.com/apk/res-auto"
xmlns:tools="http://schemas.android.com/tools"
android:layout_width="match_parent"
android:layout_height="match_parent"
tools:context="com.example.root.akuvo.MicToSpeakerActivity">
<Button
android:id="@+id/button"
android:layout_width="104dp"
android:layout_height="102dp"
android:layout_alignParentBottom="true"
android:layout_centerHorizontal="true"
android:layout_marginBottom="8dp"
android:layout_marginEnd="8dp"
android:layout_marginStart="8dp"
android:layout_marginTop="8dp"
android:background="@android:drawable/ic_lock_power_off"
android:backgroundTint="@color/colorOff"
app:layout_constraintBottom_toBottomOf="parent"
app:layout_constraintEnd_toEndOf="parent"
app:layout_constraintStart_toStartOf="parent"
app:layout_constraintTop_toTopOf="parent"
app:layout_constraintVertical_bias="0.396" />
</android.support.constraint.ConstraintLayout>
有没有办法通过耳机传送麦克风输入,同时通过智能手机扬声器使用音频输出? 我已经看了几个小时了,我看到这在iOS上显然是不可能的,但是Android呢... 我用的是三星Galaxy S4。 这是我的代码的一部分,即使耳机已插入,也可以通过扬声器路由音频输出: 但当我试图通过耳机的麦克风控制我的应用程序时,什么都没有。所以我试着用智能手机的那一个,它显然起作用了。 它看起来不像其他AudioMana
是否可以通过Android设备的手机扬声器播放音频?手机内部较小的扬声器,可产生低音量声音,只有在将耳朵贴在电话上时才能听到。 希望我的描述足够清楚,可以理解我的问题。 如果可能的话,一个如何实现这一点的例子将非常有帮助。 目前我正在使用下面的代码初始化我的MediaPlayer。
播放音符或者歌曲 用法 Your browser does not support the video tag. 案例:小闹钟 功能:今天15:00:00后,用猫叫声叫醒我
扬声器模块能够录制并播放声音。 净重量:24.5g 体积:48×48×13mm 参数 工作电压:DC 5V 抗跌落能力:1.5m 工作温度:-10℃~55℃ 工作湿度:<95% 特点 支持录音存储 内置多种音效 配合软件支持AI语音识别 配合WiFi模块支持离线运行"
光环板可以连接 mbuild 的 扬声器 模块进行编程。 1. 扬声器(1)播放音符(C4)以(0.25)拍 指定扬声器播放指定音符,并持续指定拍数。 示例 按下光环板的按钮,扬声器1会播放音符C4,持续0.25拍。 2. 扬声器(1)以(700)赫兹播放声音,持续(1)秒 指定扬声器以指定频率播放声音,单位为赫兹,并持续指定时间。 示例 按下光环板的按钮,扬声器1会播放频率为700赫兹的声音,持
该扬声器模块可以播放各类预置的音效,并以极为方便的方式支持用户存储自定义的音频文件到模块中,并通过积木块调用并播放。 存储自定义音频文件 通过以下步骤,存储自定义音频文件到该模块 1. 连接模块至电脑 使用 Micro-USB 数据线连接扬声器模块(Mirco-USB 接口)至电脑(USB 接口)。连接成功后,电脑将显示模块磁盘,打开磁盘即可查看模块包含的文件。 2. 存储自定义音频 将您想要播放