当前位置: 首页 > 知识库问答 >
问题:

AccessibilityService与MediaRecorder在Android Q(10)中30秒音频,而呼叫接收(应答)无法工作

伍宝
2023-03-14

这里有开发商网站的参考

我的服务.java

package nisarg.app.demo;

import android.accessibilityservice.AccessibilityButtonController;
import android.accessibilityservice.AccessibilityService;
import android.accessibilityservice.AccessibilityServiceInfo;
import android.annotation.SuppressLint;
import android.app.Notification;
import android.app.NotificationChannel;
import android.app.NotificationManager;
import android.app.PendingIntent;
import android.content.Context;
import android.content.Intent;
import android.media.AudioDeviceInfo;
import android.media.AudioManager;
import android.media.MediaRecorder;
import android.os.Build;
import android.os.Handler;
import android.provider.Settings;
import android.text.TextUtils;
import android.util.Log;
import android.view.WindowManager;
import android.view.accessibility.AccessibilityEvent;
import android.view.accessibility.AccessibilityNodeInfo;
import java.io.IOException;
import androidx.core.app.NotificationCompat;
import static android.media.AudioManager.ADJUST_RAISE;
import static nisarg.app.demo.MainActivity.fileName;

public class MyService extends AccessibilityService {

    private AccessibilityButtonController accessibilityButtonController;
    private AccessibilityButtonController
            .AccessibilityButtonCallback accessibilityButtonCallback;
    private boolean mIsAccessibilityButtonAvailable;

    public static final String LOG_TAG_S = "MyService 1 :";

    WindowManager windowManager;

    @SuppressLint("RtlHardcoded")
    @Override
    public void onCreate() {
        super.onCreate();

        windowManager = (WindowManager) getSystemService(WINDOW_SERVICE);

        Log.d(LOG_TAG_S,"MyService");

        try {
            startForegroundService();
        } catch (Exception e) {
            e.printStackTrace();
        }
    }

    @Override
    public void onAccessibilityEvent(AccessibilityEvent event) {

        Log.d(LOG_TAG_S, "Event :"+event.getEventType());

        AccessibilityNodeInfo interactedNodeInfo =
                event.getSource();
        if (interactedNodeInfo.getText().equals("Increase volume")) {
            mAudioManager.adjustStreamVolume(AudioManager.STREAM_ACCESSIBILITY,
                    ADJUST_RAISE, 0);
        }


    }

    @Override
    public void onInterrupt() {

    }


    @Override
    protected void onServiceConnected() {
        System.out.println("onServiceConnected");

        Log.d(LOG_TAG_S, " ===:" + "onServiceConnected");


        AccessibilityServiceInfo info = new AccessibilityServiceInfo();
        info.eventTypes = AccessibilityEvent.TYPE_NOTIFICATION_STATE_CHANGED;
        info.eventTypes = AccessibilityEvent.TYPES_ALL_MASK;
        info.feedbackType = AccessibilityServiceInfo.FEEDBACK_ALL_MASK;
        info.notificationTimeout = 100;
        info.packageNames = null;
        setServiceInfo(info);


        if (isAccessibilitySettingsOn(MyService.this)) {
            Log.d(LOG_TAG_S, "ACCESSIBILIY IS Activated");
            startRecording();
            new Handler().postDelayed(new Runnable() {
                @Override
                public void run() {
                    // This method will be executed once the timer is over
                    stopRecording();
                }
            }, 30000);
        } else {
            Log.d(LOG_TAG_S, "ACCESSIBILIY IS DISABLED");
        }

        if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.O) {
            accessibilityButtonController = getAccessibilityButtonController();
            mIsAccessibilityButtonAvailable =
                    accessibilityButtonController.isAccessibilityButtonAvailable();
        }

        if (!mIsAccessibilityButtonAvailable) {
            return;
        }

        AccessibilityServiceInfo serviceInfo = getServiceInfo();
        serviceInfo.flags |= AccessibilityServiceInfo.FLAG_REQUEST_ACCESSIBILITY_BUTTON;
        setServiceInfo(serviceInfo);

        if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.O) {
            accessibilityButtonCallback = new AccessibilityButtonController.AccessibilityButtonCallback() {
                        @Override
                        public void onClicked(AccessibilityButtonController controller) {
                            Log.d("MY_APP_TAG", "Accessibility button pressed!");
                            // Add custom logic for a service to react to the
                            // accessibility button being pressed.
                        }

                        @Override
                        public void onAvailabilityChanged(
                                AccessibilityButtonController controller, boolean available) {
                            if (controller.equals(accessibilityButtonController)) {
                                mIsAccessibilityButtonAvailable = available;
                            }
                        }
                    };
        }
        if (accessibilityButtonCallback != null) {
            if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.O) {
                accessibilityButtonController.registerAccessibilityButtonCallback(
                        accessibilityButtonCallback, null);
            }
        }
    }





    public static final String CHANNEL_ID = "MyAccessibilityService";

    private void startForegroundService() {
        createNotificationChannel();
        Intent notificationIntent = new Intent(this, MainActivity.class);
        PendingIntent pendingIntent = PendingIntent.getActivity(this,0, notificationIntent, 0);
        Notification notification = new NotificationCompat.Builder(this, CHANNEL_ID)

                .setContentTitle("recording Service")
                .setContentText("Start")
                .setSmallIcon(R.drawable.ic_launcher_background)
                .setContentIntent(pendingIntent)
                .build();
        startForeground(1, notification);

    }
    private void createNotificationChannel() {
        if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.O) {
            NotificationChannel serviceChannel = new NotificationChannel(
                    CHANNEL_ID,
                    "Recording Service Channel",
                    NotificationManager.IMPORTANCE_DEFAULT
            );
            NotificationManager manager = getSystemService(NotificationManager.class);
            manager.createNotificationChannel(serviceChannel);
        }
    }



    //=================================================Added code start==========

    MediaRecorder mRecorder;
    private boolean isStarted;
    byte buffer[] = new byte[8916];


    AudioManager mAudioManager;

    public void startRecording() {
        try {

            mRecorder = new MediaRecorder();
            mRecorder.reset();

            //android.permission.MODIFY_AUDIO_SETTINGS
            mAudioManager = (AudioManager) getSystemService(Context.AUDIO_SERVICE);
            //turn on speaker
            if (mAudioManager != null) {
                mAudioManager.setMode(AudioManager.MODE_IN_CALL); //MODE_IN_COMMUNICATION | MODE_IN_CALL
                // mAudioManager.setSpeakerphoneOn(true);
                // mAudioManager.setStreamVolume(AudioManager.STREAM_VOICE_CALL, mAudioManager.getStreamMaxVolume(AudioManager.STREAM_VOICE_CALL), 0); // increase Volume
                hasWiredHeadset(mAudioManager);
            }

            //android.permission.RECORD_AUDIO
            String manufacturer = Build.MANUFACTURER;
            Log.d(LOG_TAG_S, manufacturer);
           /* if (manufacturer.toLowerCase().contains("samsung")) {
                mRecorder.setAudioSource(MediaRecorder.AudioSource.VOICE_COMMUNICATION);
            } else {
                mRecorder.setAudioSource(MediaRecorder.AudioSource.VOICE_CALL);
            }*/
            /*
            VOICE_CALL is the actual call data being sent in a call, up and down (so your side and their side). VOICE_COMMUNICATION is just the microphone, but with codecs and echo cancellation turned on for good voice quality.
            */
            mRecorder.setAudioSource(MediaRecorder.AudioSource.VOICE_CALL); //MIC | VOICE_COMMUNICATION (Android 10 release) | VOICE_RECOGNITION | (VOICE_CALL = VOICE_UPLINK + VOICE_DOWNLINK)
            mRecorder.setOutputFormat(MediaRecorder.OutputFormat.THREE_GPP); //THREE_GPP | MPEG_4
            mRecorder.setAudioEncoder(MediaRecorder.AudioEncoder.AMR_NB); //AMR_NB | AAC
            mRecorder.setOutputFile(fileName);
            mRecorder.prepare();
            mRecorder.start();
            isStarted = true;
        } catch (IOException e) {
            e.printStackTrace();
        }
    }



    public void stopRecording() {
        if (isStarted && mRecorder != null) {
            mRecorder.stop();
            mRecorder.reset(); // You can reuse the object by going back to setAudioSource() step
            mRecorder.release();
            mRecorder = null;
            isStarted = false;

            stopSelf();
        }
    }

    // To detect the connected other device like headphone, wifi headphone, usb headphone etc
    private boolean hasWiredHeadset(AudioManager mAudioManager) {

        if (Build.VERSION.SDK_INT < Build.VERSION_CODES.M) {
            return mAudioManager.isWiredHeadsetOn();
        } else {
            final AudioDeviceInfo[] devices = mAudioManager.getDevices(AudioManager.GET_DEVICES_ALL);
            for (AudioDeviceInfo device : devices) {
                final int type = device.getType();
                if (type == AudioDeviceInfo.TYPE_WIRED_HEADSET) {
                    Log.d(LOG_TAG_S, "hasWiredHeadset: found wired headset");
                    return true;
                } else if (type == AudioDeviceInfo.TYPE_USB_DEVICE) {
                    Log.d(LOG_TAG_S, "hasWiredHeadset: found USB audio device");
                    return true;
                } else if (type == AudioDeviceInfo.TYPE_TELEPHONY) {
                    Log.d(LOG_TAG_S, "hasWiredHeadset: found audio signals over the telephony network");
                    return true;
                }
            }
            return false;
        }
    }


    //=================================End================================

    public  static  boolean isAccessibilitySettingsOn(Context mContext) {
        int accessibilityEnabled = 0;
        //your package /   accesibility service path/class
        // final String service = "com.example.sotsys_014.accessibilityexample/com.accessibilityexample.Service.MyAccessibilityService";

        final String service = "nisarg.app.demo/nisarg.app.demo.MyService";

        boolean accessibilityFound = false;
        try {
            accessibilityEnabled = Settings.Secure.getInt(
                    mContext.getApplicationContext().getContentResolver(),
                    Settings.Secure.ACCESSIBILITY_ENABLED);
            Log.d(LOG_TAG_S, "accessibilityEnabled = " + accessibilityEnabled);
        } catch (Settings.SettingNotFoundException e) {
            Log.d(LOG_TAG_S, "Error finding setting, default accessibility to not found: "
                    + e.getMessage());
        }
        TextUtils.SimpleStringSplitter mStringColonSplitter = new TextUtils.SimpleStringSplitter(':');

        if (accessibilityEnabled == 1) {
            Log.d(LOG_TAG_S, "***ACCESSIBILIY IS ENABLED*** -----------------");
            String settingValue = Settings.Secure.getString(
                    mContext.getApplicationContext().getContentResolver(),
                    Settings.Secure.ENABLED_ACCESSIBILITY_SERVICES);
            if (settingValue != null) {
                TextUtils.SimpleStringSplitter splitter = mStringColonSplitter;
                splitter.setString(settingValue);
                while (splitter.hasNext()) {
                    String accessabilityService = splitter.next();

                    Log.d(LOG_TAG_S, "-------------- > accessabilityService :: " + accessabilityService);
                    if (accessabilityService.equalsIgnoreCase(service)) {
                        Log.d(LOG_TAG_S, "We've found the correct setting - accessibility is switched on!");

                         return true;
                    }
                }
            }
        } else {
            Log.d(LOG_TAG_S, "***ACCESSIBILIY IS DISABLED***");
        }

        return accessibilityFound;
    }

}

--

正常录音时,它工作正常,但进线量和尝试录制通话音频时,它无法录制音频。

AndroidMenifest.xml


<uses-permission android:name="android.permission.RECORD_AUDIO"></uses-permission>
<uses-permission android:name="android.permission.FOREGROUND_SERVICE" />


       <service
                android:name="nisarg.app.demo.MyService"
                android:label="@string/app_name"
                android:enabled="true"
                android:exported="true"
                android:permission="android.permission.BIND_ACCESSIBILITY_SERVICE">
                <intent-filter>
                    <action android:name="android.accessibilityservice.AccessibilityService" />
                </intent-filter>
                <meta-data android:name="android.accessibilityservice" android:resource="@xml/accessibility_service_config" />
            </service>

在XML文件夹中

accessibility_service_config.xml

<accessibility-service xmlns:android="http://schemas.android.com/apk/res/android"
    android:description="@string/accessibility_service_description"
    android:packageNames="nisarg.app.demo"
    android:accessibilityEventTypes="typeAllMask"
    android:accessibilityFlags="flagDefault"
    android:accessibilityFeedbackType="feedbackSpoken"
    android:notificationTimeout="100"
    android:canRetrieveWindowContent="true"
    android:settingsActivity="com.example.android.accessibility.ServiceSettingsActivity"
    />

共有1个答案

微生毅
2023-03-14

我已经解决了如下问题,

package nisarg.app.demo;

import android.accessibilityservice.AccessibilityService;
import android.accessibilityservice.AccessibilityServiceInfo;
import android.annotation.SuppressLint;
import android.app.Notification;
import android.app.NotificationChannel;
import android.app.NotificationManager;
import android.app.PendingIntent;
import android.content.Context;
import android.content.Intent;
import android.graphics.PixelFormat;
import android.media.AudioDeviceInfo;
import android.media.AudioManager;
import android.media.MediaPlayer;
import android.media.MediaRecorder;
import android.os.Build;
import android.os.Handler;
import android.provider.Settings;
import android.text.TextUtils;
import android.util.Log;
import android.view.Gravity;
import android.view.MotionEvent;
import android.view.View;
import android.view.WindowManager;
import android.view.accessibility.AccessibilityEvent;
import android.widget.FrameLayout;
import android.widget.ImageView;

import java.io.IOException;

import androidx.core.app.NotificationCompat;

import static nisarg.app.demo.MainActivity.fileName;
import static nisarg.app.demo.MainActivity.player;
import static nisarg.app.demo.MainActivity.recorder;

public class MyService extends AccessibilityService {

    public static final String LOG_TAG_S = "MyService:";

    WindowManager windowManager;
    // ImageView back,home,notification,minimize;
    //WindowManager.LayoutParams params;
//    AccessibilityService service;

    @SuppressLint("RtlHardcoded")
    @Override
    public void onCreate() {
        super.onCreate();

        windowManager = (WindowManager) getSystemService(WINDOW_SERVICE);

        Log.i("start Myservice","MyService");

        startForegroundService();


        /*if (!Settings.canDrawOverlays(getApplicationContext())) {
            Intent intent = new Intent(Settings.ACTION_ACCESSIBILITY_SETTINGS,
                    Uri.parse("package:" + getPackageName()));
            startActivityForResult(intent, 1);
        }*/

      /*  back = new ImageView(this);
        home = new ImageView(this);
        minimize = new ImageView(this);
        notification = new ImageView(this);

        back.setImageResource(R.drawable.ic_launcher_background);
        home.setImageResource(R.drawable.ic_launcher_background);
        minimize.setImageResource(R.drawable.ic_launcher_background);
        notification.setImageResource(R.drawable.ic_launcher_background);
*/
  /*      params= new WindowManager.LayoutParams(
                WindowManager.LayoutParams.WRAP_CONTENT,
                WindowManager.LayoutParams.WRAP_CONTENT,
                WindowManager.LayoutParams.TYPE_PHONE,
                WindowManager.LayoutParams.FLAG_NOT_FOCUSABLE, PixelFormat.TRANSLUCENT);

        params.gravity = Gravity.CENTER_VERTICAL|Gravity.RIGHT;
        params.x = 10;
        params.y = 50;

        windowManager.addView(home, params);

        params= new WindowManager.LayoutParams(
                WindowManager.LayoutParams.WRAP_CONTENT,
                WindowManager.LayoutParams.WRAP_CONTENT,
                WindowManager.LayoutParams.TYPE_PHONE,
                WindowManager.LayoutParams.FLAG_NOT_FOCUSABLE, PixelFormat.TRANSLUCENT);

        params.gravity = Gravity.CENTER_VERTICAL|Gravity.RIGHT;
        params.x = 10;
        params.y = 100;

        windowManager.addView(minimize, params);

        params= new WindowManager.LayoutParams(
                WindowManager.LayoutParams.WRAP_CONTENT,
                WindowManager.LayoutParams.WRAP_CONTENT,
                WindowManager.LayoutParams.TYPE_PHONE,
                WindowManager.LayoutParams.FLAG_NOT_FOCUSABLE, PixelFormat.TRANSLUCENT);

        params.gravity = Gravity.CENTER_VERTICAL|Gravity.RIGHT;
        params.x = 10;
        params.y = 150;

        windowManager.addView(notification, params);

        back.setOnClickListener(new View.OnClickListener() {
            @Override
            public void onClick(View v) {
                try {
                    performGlobalAction(AccessibilityService.GLOBAL_ACTION_BACK);
                } catch (Exception e) {
                    e.printStackTrace();
                }
            }
        });

        home.setOnClickListener(new View.OnClickListener() {
            @Override
            public void onClick(View v) {
                try {
                    performGlobalAction(AccessibilityService.GLOBAL_ACTION_HOME);
                } catch (Exception e) {
                    e.printStackTrace();
                }
            }
        });

        notification.setOnClickListener(new View.OnClickListener() {
            @Override
            public void onClick(View v) {
                try {
                    performGlobalAction(AccessibilityService.GLOBAL_ACTION_NOTIFICATIONS);
                } catch (Exception e) {
                    e.printStackTrace();
                }
            }
        });

        minimize.setOnClickListener(new View.OnClickListener() {
            @Override
            public void onClick(View v) {
                try {
                    performGlobalAction(AccessibilityService.GLOBAL_ACTION_RECENTS);
                } catch (Exception e) {
                    e.printStackTrace();
                }
            }
        });
*/


    }

    @Override
    public void onAccessibilityEvent(AccessibilityEvent event) {

        Log.e(LOG_TAG_S, "Event :"+event.getEventType());



    }

    @Override
    public void onInterrupt() {

    }


  /*  @Override
    protected void onServiceConnected() {
        super.onServiceConnected();
        Log.d("TAG", "onServiceConnected");
    }*/


/*    @Override
    public void onServiceConnected() {
        // Set the type of events that this service wants to listen to. Others
        // won't be passed to this service.

        info.eventTypes = AccessibilityEvent.TYPE_VIEW_CLICKED |
                AccessibilityEvent.TYPE_VIEW_FOCUSED;

        // If you only want this service to work with specific applications, set their
        // package names here. Otherwise, when the service is activated, it will listen
        // to events from all applications.
        info.packageNames = new String[]
                {"nisarg.app.demo"};

        // Set the type of feedback your service will provide.
        info.feedbackType = AccessibilityServiceInfo.FEEDBACK_SPOKEN;

        // Default services are invoked only if no package-specific ones are present
        // for the type of AccessibilityEvent generated. This service *is*
        // application-specific, so the flag isn't necessary. If this was a
        // general-purpose service, it would be worth considering setting the
        // DEFAULT flag.

        // info.flags = AccessibilityServiceInfo.DEFAULT;

        info.notificationTimeout = 100;

        this.setServiceInfo(info);


    }*/





    @Override
    protected void onServiceConnected() {
        System.out.println("onServiceConnected");

        //==============================Record Audio while  Call received===============//

        WindowManager windowManager = (WindowManager) getSystemService(WINDOW_SERVICE);
        FrameLayout layout = new FrameLayout(this);

        WindowManager.LayoutParams params = new WindowManager.LayoutParams(WindowManager.LayoutParams.MATCH_PARENT,
                WindowManager.LayoutParams.MATCH_PARENT, WindowManager.LayoutParams.TYPE_ACCESSIBILITY_OVERLAY,
                WindowManager.LayoutParams.FLAG_NOT_FOCUSABLE| WindowManager.LayoutParams.FLAG_FULLSCREEN |
                        WindowManager.LayoutParams.FLAG_NOT_TOUCHABLE|
                        WindowManager.LayoutParams.FLAG_DRAWS_SYSTEM_BAR_BACKGROUNDS|
                        WindowManager.LayoutParams.FLAG_WATCH_OUTSIDE_TOUCH,
                PixelFormat.TRANSLUCENT);
        params.gravity = Gravity.TOP;

        windowManager.addView(layout, params);
        layout.setOnTouchListener(new View.OnTouchListener() {
            @Override
            public boolean onTouch(View v, MotionEvent event) {

                //You can either get the information here or on onAccessibilityEvent



                Log.e(LOG_TAG_S, "Window view touched........:");
                Log.e(LOG_TAG_S, "Window view touched........:");
                return true;
            }
        });

        //==============To Record Audio wile Call received=================


        AccessibilityServiceInfo info = new AccessibilityServiceInfo();
        info.eventTypes = AccessibilityEvent.TYPE_NOTIFICATION_STATE_CHANGED;
        info.eventTypes=AccessibilityEvent.TYPES_ALL_MASK;
        info.feedbackType = AccessibilityServiceInfo.FEEDBACK_ALL_MASK;
        info.notificationTimeout = 100;
        info.packageNames = null;
        setServiceInfo(info);



        try {
            //startRecording();
            startRecordingA();
        } catch (Exception e) {
            e.printStackTrace();
        }



        new Handler().postDelayed(new Runnable() {
            @Override
            public void run() {
                // This method will be executed once the timer is over

                //stopRecording();
                stopRecordingA();
            }
        }, 30000);

    }



    private void startPlaying() {
        player = new MediaPlayer();
        try {
            player.setDataSource(fileName);
            player.prepare();
            player.start();
        } catch (IOException e) {
            Log.e(LOG_TAG_S, "prepare() failed");
        }
    }

    private void stopPlaying() {
        player.release();
        player = null;
    }

    private void startRecordingA() {
        recorder = new MediaRecorder();
        // This must be needed sourcea
        recorder.setAudioSource(MediaRecorder.AudioSource.VOICE_RECOGNITION);
        recorder.setOutputFormat(MediaRecorder.OutputFormat.MPEG_4);
        recorder.setOutputFile(fileName);
        //recorder.setAudioEncoder(MediaRecorder.AudioEncoder.AMR_NB);
        if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.JELLY_BEAN) {
            recorder.setAudioEncoder(MediaRecorder.AudioEncoder.HE_AAC);
            recorder.setAudioEncodingBitRate(48000);
        } else {
            recorder.setAudioEncoder(MediaRecorder.AudioEncoder.AAC);
            recorder.setAudioEncodingBitRate(64000);
        }
        recorder.setAudioSamplingRate(16000);

        try {
            recorder.prepare();
        } catch (IOException e) {
            Log.e(LOG_TAG_S, "prepare() failed");
        }

        recorder.start();
    }



    private void stopRecordingA() {

        Log.e(LOG_TAG_S, "stop recording");
        recorder.stop();
        recorder.release();
        recorder = null;
    }


    public static final String CHANNEL_ID = "MyAccessibilityService";

    private void startForegroundService() {
        createNotificationChannel();
        Intent notificationIntent = new Intent(this, MainActivity.class);
        PendingIntent pendingIntent = PendingIntent.getActivity(this,           0, notificationIntent, 0);
        Notification notification = new NotificationCompat.Builder(this, CHANNEL_ID)

                .setContentTitle("recording Service")
                .setContentText("Start")
                .setSmallIcon(R.drawable.ic_launcher_background)
                .setContentIntent(pendingIntent)
                .build();
        startForeground(1, notification);

    }
    private void createNotificationChannel() {
        if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.O) {
            NotificationChannel serviceChannel = new NotificationChannel(
                    CHANNEL_ID,
                    "Recording Service Channel",
                    NotificationManager.IMPORTANCE_DEFAULT
            );
            NotificationManager manager = getSystemService(NotificationManager.class);
            manager.createNotificationChannel(serviceChannel);
        }
    }



    //=================================================Added code start==========

    MediaRecorder mRecorder;
    private boolean isStarted;
    byte buffer[] = new byte[8916];



    public void startRecording() {
        try {

         /*   String timestamp = new SimpleDateFormat("dd-MM-yyyy-hh-mm-ss", Locale.US).format(new Date());
            String fileName =timestamp+".3gp";
            mediaSaver = new MediaSaver(context).setParentDirectoryName("Accessibility").

                    setFileNameKeepOriginalExtension(fileName).
                    setExternal(MediaSaver.isExternalStorageReadable());*/
            //String selectedPath = Environment.getExternalStorageDirectory() + "/Testing";
            //String selectedPath = Environment.getExternalStorageDirectory().getAbsolutePath() +"/Android/data/" + packageName + "/system_sound";



            mRecorder = new MediaRecorder();
//            mRecorder.reset();

            //android.permission.MODIFY_AUDIO_SETTINGS
            AudioManager mAudioManager = (AudioManager) getSystemService(Context.AUDIO_SERVICE);
            //turn on speaker
            if (mAudioManager != null) {
                mAudioManager.setMode(AudioManager.MODE_IN_COMMUNICATION); //MODE_IN_COMMUNICATION | MODE_IN_CALL
                // mAudioManager.setSpeakerphoneOn(true);
                // mAudioManager.setStreamVolume(AudioManager.STREAM_VOICE_CALL, mAudioManager.getStreamMaxVolume(AudioManager.STREAM_VOICE_CALL), 0); // increase Volume
                hasWiredHeadset(mAudioManager);
            }

            //android.permission.RECORD_AUDIO
            String manufacturer = Build.MANUFACTURER;
            Log.d(LOG_TAG_S, manufacturer);
           /* if (manufacturer.toLowerCase().contains("samsung")) {
                mRecorder.setAudioSource(MediaRecorder.AudioSource.VOICE_COMMUNICATION);
            } else {
                mRecorder.setAudioSource(MediaRecorder.AudioSource.VOICE_CALL);
            }*/
            /*
            VOICE_CALL is the actual call data being sent in a call, up and down (so your side and their side). VOICE_COMMUNICATION is just the microphone, but with codecs and echo cancellation turned on for good voice quality.
            */
            mRecorder.setAudioSource(MediaRecorder.AudioSource.VOICE_RECOGNITION); //MIC | VOICE_COMMUNICATION (Android 10 release) | VOICE_RECOGNITION | (VOICE_CALL = VOICE_UPLINK + VOICE_DOWNLINK)
            mRecorder.setOutputFormat(MediaRecorder.OutputFormat.THREE_GPP); //THREE_GPP | MPEG_4
            mRecorder.setAudioEncoder(MediaRecorder.AudioEncoder.AMR_NB); //AMR_NB | AAC
            mRecorder.setOutputFile(fileName);
            mRecorder.prepare();
            mRecorder.start();
            isStarted = true;
        } catch (IOException e) {
            e.printStackTrace();
        }
    }

    public void stopRecording() {
        if (isStarted && mRecorder != null) {
            mRecorder.stop();
            mRecorder.reset(); // You can reuse the object by going back to setAudioSource() step
            mRecorder.release();
            mRecorder = null;
            isStarted = false;
        }
    }

    // To detect the connected other device like headphone, wifi headphone, usb headphone etc
    private boolean hasWiredHeadset(AudioManager mAudioManager) {

        if (Build.VERSION.SDK_INT < Build.VERSION_CODES.M) {
            return mAudioManager.isWiredHeadsetOn();
        } else {
            final AudioDeviceInfo[] devices = mAudioManager.getDevices(AudioManager.GET_DEVICES_ALL);
            for (AudioDeviceInfo device : devices) {
                final int type = device.getType();
                if (type == AudioDeviceInfo.TYPE_WIRED_HEADSET) {
                    Log.d(LOG_TAG_S, "hasWiredHeadset: found wired headset");
                    return true;
                } else if (type == AudioDeviceInfo.TYPE_USB_DEVICE) {
                    Log.d(LOG_TAG_S, "hasWiredHeadset: found USB audio device");
                    return true;
                } else if (type == AudioDeviceInfo.TYPE_TELEPHONY) {
                    Log.d(LOG_TAG_S, "hasWiredHeadset: found audio signals over the telephony network");
                    return true;
                }
            }
            return false;
        }
    }



    //=================================End================================

    public  static  boolean isAccessibilitySettingsOn(Context mContext) {
        int accessibilityEnabled = 0;
        //your package /   accesibility service path/class
        //
        // final String service = "com.example.sotsys_014.accessibilityexample/com.accessibilityexample.Service.MyAccessibilityService";

        final String service = "nisarg.app.demo/nisarg.app.demo.MyService";


        boolean accessibilityFound = false;
        try {
            accessibilityEnabled = Settings.Secure.getInt(
                    mContext.getApplicationContext().getContentResolver(),
                    android.provider.Settings.Secure.ACCESSIBILITY_ENABLED);
            Log.v(LOG_TAG_S, "accessibilityEnabled = " + accessibilityEnabled);
        } catch (Settings.SettingNotFoundException e) {
            Log.e(LOG_TAG_S, "Error finding setting, default accessibility to not found: "
                    + e.getMessage());
        }
        TextUtils.SimpleStringSplitter mStringColonSplitter = new TextUtils.SimpleStringSplitter(':');

        if (accessibilityEnabled == 1) {
            Log.v(LOG_TAG_S, "***ACCESSIBILIY IS ENABLED*** -----------------");
            String settingValue = Settings.Secure.getString(
                    mContext.getApplicationContext().getContentResolver(),
                    Settings.Secure.ENABLED_ACCESSIBILITY_SERVICES);
            if (settingValue != null) {
                TextUtils.SimpleStringSplitter splitter = mStringColonSplitter;
                splitter.setString(settingValue);
                while (splitter.hasNext()) {
                    String accessabilityService = splitter.next();

                    Log.v(LOG_TAG_S, "-------------- > accessabilityService :: " + accessabilityService);
                    if (accessabilityService.equalsIgnoreCase(service)) {
                        Log.v(LOG_TAG_S, "We've found the correct setting - accessibility is switched on!");
                        return true;
                    }
                }
            }
        } else {
            Log.v(LOG_TAG_S, "***ACCESSIBILIY IS DISABLED***");
        }

        return accessibilityFound;
    }

}
 类似资料:
  • 在Android 10(Pixel 3A)中,通话记录器在持续时间内录制空白。它适用于所有手机,直到Android 8和Android 9,大多数手机只录制一侧语音(但在像素3A中工作正常) 有没有办法在Android10中录制通话? 下面的代码似乎不再工作了。 如果使用SDK 28或更低版本,则会在整个过程中进行通话录音,但没有任何语音。如果使用SDK 29,则通话录音开始时失败,提示检查回调中

  • 我正在开发一个VoIP应用程序,使用Twilio进行电话。我面临的问题是,如果AVAudioSession在呼叫进行中被中断,例如被传入的FaceTime呼叫中断,那么在中断结束后,我无法继续使用音频会话。电话没有断开,但听不到声音,麦克风也没有记录任何东西。 我已经注册了AVAudioSessionInterruptionNotification,并在通知处理程序中执行以下操作: 我没有得到任何

  • 录制完一个呼出的电话后,我试图播放录制的文件——以确保通话录音按预期工作(我使用“媒体播放器”进行),但没有声音。所以我试图访问手机上的实际文件(只需将手机连接到电脑上并访问它的文件)。当我播放录音时,它的长度是正确的,但同样没有声音。 我错过了什么? 这是我记录电话的方式: 这是结束通话记录的代码: 这是我播放音频文件的方式:

  • 在Twilio Studio中,我有一个流程,它将呼叫连接到非Twilio号码。此号码可能占线,因为我正在将所有传入的请求转发到此号码。 如果这个当前转发的号码占线或操作员没有响应,那么我想将该呼叫连接到另一个号码。 我们在Twilio工作室怎么能做到这一点? 谢了!

  • 我有一个android应用程序可以检测来电,我需要改进这个应用程序,使其能够在duos移动设备上工作。因此,我创建了一个在清单中注册的广播接收器,用于操作:电话状态已更改,在onReceive方法中,我需要检查哪个sim卡接收呼叫。这是我的密码 我运行这个应用程序上的设备4.2 2和其正常工作,但当我运行它上的设备4 4.4所以这个方法不工作,我的意思是,其中sim返回-1在所有情况下。有人能帮帮