有人可以请给我一个Android辅助功能服务实时音频处理的示例代码。我需要处理呼叫音频。但不知道如何实现这一点。请分享您对此的看法
请查看以下清单:
<manifest xmlns:android="http://schemas.android.com/apk/res/android"
package="mycalltest">
<uses-permission android:name="android.permission.INTERNET" />
<uses-permission android:name="android.permission.ACCESS_NETWORK_STATE" />
<uses-permission android:name="android.permission.WRITE_EXTERNAL_STORAGE" />
<uses-permission android:name="android.permission.READ_EXTERNAL_STORAGE" />
<uses-permission android:name="android.permission.RECORD_AUDIO" />
<uses-permission android:name="android.permission.FOREGROUND_SERVICE" />
<uses-permission android:name="android.permission.READ_CONTACTS" />
<uses-permission android:name="android.permission.WRITE_CONTACTS" />
<uses-permission android:name="android.permission.BLUETOOTH" />
<uses-permission android:name="android.permission.SYSTEM_ALERT_WINDOW" />
<uses-permission android:name="android.permission.READ_PHONE_STATE" />
<uses-permission android:name="android.permission.MODIFY_AUDIO_SETTINGS" />
<uses-permission android:name="android.permission.READ_CALL_LOG" />
<uses-permission android:name="android.permission.MANAGE_OWN_CALLS" />
<uses-permission android:name="android.permission.WAKE_LOCK" />
<application
android:allowBackup="true"
android:icon="@mipmap/ic_launcher"
android:label="@string/app_name"
android:requestLegacyExternalStorage="true"
android:roundIcon="@mipmap/ic_launcher_round"
android:supportsRtl="true"
android:theme="@style/AppTheme"
android:usesCleartextTraffic="true">
<service
android:name=".MyAccessibilityService"
android:label="@string/accessibility_service_label"
android:permission="android.permission.BIND_ACCESSIBILITY_SERVICE">
<intent-filter>
<action android:name="android.accessibilityservice.AccessibilityService" />
</intent-filter>
<meta-data
android:name="android.accessibilityservice"
android:resource="@xml/accessibility_service_config" />
</service>
<activity android:name=".MainActivity">
<intent-filter>
<action android:name="android.intent.action.MAIN" />
<category android:name="android.intent.category.LAUNCHER" />
</intent-filter>
</activity>
</application>
</manifest>
请在下面查找可访问性xml:
<accessibility-service xmlns:android="http://schemas.android.com/apk/res/android"
android:description="@string/accessibility_service_description"
android:accessibilityEventTypes="typeWindowContentChanged|typeWindowStateChanged"
android:accessibilityFeedbackType="feedbackGeneric"
android:notificationTimeout="100"
android:accessibilityFlags="flagReportViewIds|flagRetrieveInteractiveWindows"
android:canRetrieveWindowContent="true"
/>
PFB服务:
import android.accessibilityservice.AccessibilityService;
import android.app.Notification;
import android.app.NotificationChannel;
import android.app.NotificationManager;
import android.app.PendingIntent;
import android.content.Context;
import android.content.Intent;
import android.media.AudioDeviceInfo;
import android.media.AudioManager;
import android.media.MediaRecorder;
import android.os.Build;
import android.util.Log;
import android.view.accessibility.AccessibilityEvent;
import androidx.annotation.RequiresApi;
import androidx.core.app.NotificationCompat;
import java.io.IOException;
import java.text.SimpleDateFormat;
import java.util.Date;
import java.util.Locale;
public class MyAccessibilityService extends AccessibilityService {
private static final String TAG="MyAccessibilityService";
private Context context;
public static final String CHANNEL_ID = "MyAccessibilityService";
MediaRecorder mRecorder;
private boolean isStarted;
byte buffer[] = new byte[8916];
private MediaSaver mediaSaver;
@RequiresApi(api = Build.VERSION_CODES.M)
@Override
public void onCreate() {
super.onCreate();
Log.d(TAG,"MyAccessibilityService Salesken Started ...");
context=this;
startForegroundService();
}
private void startForegroundService() {
createNotificationChannel();
Intent notificationIntent = new Intent(this, MainActivity.class);
PendingIntent pendingIntent = PendingIntent.getActivity(this,
0, notificationIntent, 0);
Notification notification = new NotificationCompat.Builder(this, CHANNEL_ID)
.setContentTitle("recording Service")
.setContentText("Start")
.setSmallIcon(R.drawable.ic_launcher_background)
.setContentIntent(pendingIntent)
.build();
startForeground(1, notification);
}
private void createNotificationChannel() {
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.O) {
NotificationChannel serviceChannel = new NotificationChannel(
CHANNEL_ID,
"Recording Service Channel",
NotificationManager.IMPORTANCE_DEFAULT
);
NotificationManager manager = getSystemService(NotificationManager.class);
manager.createNotificationChannel(serviceChannel);
}
}
@Override
public int onStartCommand(Intent intent, int flags, int startId) {
if (intent != null) {
String action = intent.getAction();
switch (action) {
case SaleskenIntent.START_RECORDING:
Log.d(TAG,"Start Recording");
//startRecorder();
String contact = intent.getStringExtra("contact");
startRecording(contact);
break;
case SaleskenIntent.STOP_RECORDING:
Log.d(TAG,"Stop Recording");
stopRecording();
break;
}
}
return super.onStartCommand(intent, flags, startId);
}
@Override
public void onAccessibilityEvent(AccessibilityEvent event) {
}
@Override
public void onInterrupt() {
}
@Override
public void onDestroy() {
super.onDestroy();
}
public void startRecording(String contact) {
try {
String timestamp = new SimpleDateFormat("dd-MM-yyyy-hh-mm-ss", Locale.US).format(new Date());
String fileName =timestamp+".3gp";
mediaSaver = new MediaSaver(context).setParentDirectoryName("Accessibility").
setFileNameKeepOriginalExtension(fileName).
setExternal(MediaSaver.isExternalStorageReadable());
//String selectedPath = Environment.getExternalStorageDirectory() + "/Testing";
//String selectedPath = Environment.getExternalStorageDirectory().getAbsolutePath() +"/Android/data/" + packageName + "/system_sound";
mRecorder = new MediaRecorder();
mRecorder.reset();
//android.permission.MODIFY_AUDIO_SETTINGS
AudioManager mAudioManager = (AudioManager) getSystemService(Context.AUDIO_SERVICE); //turn on speaker
if (mAudioManager != null) {
mAudioManager.setMode(AudioManager.MODE_IN_COMMUNICATION); //MODE_IN_COMMUNICATION | MODE_IN_CALL
// mAudioManager.setSpeakerphoneOn(true);
// mAudioManager.setStreamVolume(AudioManager.STREAM_VOICE_CALL, mAudioManager.getStreamMaxVolume(AudioManager.STREAM_VOICE_CALL), 0); // increase Volume
hasWiredHeadset(mAudioManager);
}
//android.permission.RECORD_AUDIO
String manufacturer = Build.MANUFACTURER;
Log.d(TAG, manufacturer);
/* if (manufacturer.toLowerCase().contains("samsung")) {
mRecorder.setAudioSource(MediaRecorder.AudioSource.VOICE_COMMUNICATION);
} else {
mRecorder.setAudioSource(MediaRecorder.AudioSource.VOICE_CALL);
}*/
/*
VOICE_CALL is the actual call data being sent in a call, up and down (so your side and their side). VOICE_COMMUNICATION is just the microphone, but with codecs and echo cancellation turned on for good voice quality.
*/
mRecorder.setAudioSource(MediaRecorder.AudioSource.VOICE_COMMUNICATION); //MIC | VOICE_COMMUNICATION (Android 10 release) | VOICE_RECOGNITION | (VOICE_CALL = VOICE_UPLINK + VOICE_DOWNLINK)
mRecorder.setOutputFormat(MediaRecorder.OutputFormat.THREE_GPP); //THREE_GPP | MPEG_4
mRecorder.setAudioEncoder(MediaRecorder.AudioEncoder.AMR_NB); //AMR_NB | AAC
mRecorder.setOutputFile(mediaSaver.pathFile().getAbsolutePath());
mRecorder.prepare();
mRecorder.start();
isStarted = true;
} catch (IOException e) {
e.printStackTrace();
}
}
public void stopRecording() {
if (isStarted && mRecorder != null) {
mRecorder.stop();
mRecorder.reset(); // You can reuse the object by going back to setAudioSource() step
mRecorder.release();
mRecorder = null;
isStarted = false;
}
}
// To detect the connected other device like headphone, wifi headphone, usb headphone etc
private boolean hasWiredHeadset(AudioManager mAudioManager) {
if (Build.VERSION.SDK_INT < Build.VERSION_CODES.M) {
return mAudioManager.isWiredHeadsetOn();
} else {
final AudioDeviceInfo[] devices = mAudioManager.getDevices(AudioManager.GET_DEVICES_ALL);
for (AudioDeviceInfo device : devices) {
final int type = device.getType();
if (type == AudioDeviceInfo.TYPE_WIRED_HEADSET) {
Log.d(TAG, "hasWiredHeadset: found wired headset");
return true;
} else if (type == AudioDeviceInfo.TYPE_USB_DEVICE) {
Log.d(TAG, "hasWiredHeadset: found USB audio device");
return true;
} else if (type == AudioDeviceInfo.TYPE_TELEPHONY) {
Log.d(TAG, "hasWiredHeadset: found audio signals over the telephony network");
return true;
}
}
return false;
}
}
}
您需要更改音频源输入。
为程序制作辅助功能是很重要的。在这里,我们很高兴地向你们介绍Devtron和Spectron。这两个新功能有机会使开发者们让他们的应用程序更加可用。 Electron 应用中有关辅助功能的开发和网站是相似的,因为两者最终使用的都是HTML. 然而, 对于Electron应用, 你不能使用在线的辅助功能审查者, 因为你的应用没有一个URL可以提供给审查者. 然而这些新功能将这些审查工具带到您的Ele
为程序制作辅助功能是很重要的。在这里,我们很高兴地向你们介绍Devtron和Spectron。这两个新功能有机会使开发者们让他们的应用程序更加可用。 Electron 应用中有关辅助功能的开发和网站是相似的,因为两者最终使用的都是HTML. 然而, 对于Electron应用, 你不能使用在线的辅助功能审查者, 因为你的应用没有一个URL可以提供给审查者. 这些功能将会提供一些审查工具给你的Elec
本文向大家介绍Android辅助功能AccessibilityService与抢红包辅助,包括了Android辅助功能AccessibilityService与抢红包辅助的使用技巧和注意事项,需要的朋友参考一下 推荐阅读:Android中微信抢红包插件原理解析及开发思路 抢红包的原理都差不多,一般是用Android的辅助功能(AccessibilityService类)先监听通知栏事件或窗口变化事
扩展使用者能够根据个人的能力和偏好创建理想的浏览体验。 扩展程序应包括可访问性组件,该组件通过使视力障碍,听力受损,肢体受限和其他残障人士能够访问扩展程序,从而鼓励包容性用户群。 每个人,不仅是有特殊需求的用户,都可以从辅助功能中受益。 视力障碍,肢体受限用户都将从键盘快捷键中受益。 字幕和笔录对聋哑用户至关重要,但也对语言学习者有所帮助。 人们可以通过多种方式与扩展互动。一些用户具有标准的显示器
一般来说,可访问性意味着可用性,内容可访问意味着内容可用。 在软件方面,应用程序可访问意味着应用程序可供所有人使用。 在这里,所有意味着残疾人,视障者或使用屏幕阅读器使用计算机的人或者喜欢使用键盘而不是使用鼠标导航的人。 用键盘导航而不是使用鼠标。 可访问的应用程序称为ARIA(可访问的富Internet应用程序)。 Ext JS中的辅助功能 Ext JS旨在记住它应该适用于所有键盘导航。 它具有