导航菜单

页面标题

页面副标题

Petal Maps v4.7.0.310001 - AudioAcquisition.java 源代码

正在查看: Petal Maps v4.7.0.310001 应用的 AudioAcquisition.java JAVA 源代码文件

本页面展示 JAVA 反编译生成的源代码文件,支持语法高亮显示。 仅供安全研究与技术分析使用,严禁用于任何非法用途。请遵守相关法律法规。


package com.huawei.hiassistant.voice.dataacquisition;

import android.content.Intent;
import android.content.pm.PackageManager;
import android.media.AudioRecord;
import android.os.Handler;
import android.os.HandlerThread;
import android.os.Looper;
import android.os.Message;
import android.os.SystemClock;
import android.text.TextUtils;
import com.google.gson.JsonElement;
import com.google.gson.JsonObject;
import com.huawei.agconnect.remoteconfig.AGConnectConfig;
import com.huawei.hiassistant.platform.base.VoiceKitSdkContext;
import com.huawei.hiassistant.platform.base.adapter.businessadapter.HmsProxyFactory;
import com.huawei.hiassistant.platform.base.bean.AudioDataMessage;
import com.huawei.hiassistant.platform.base.bean.CommonHeader;
import com.huawei.hiassistant.platform.base.bean.ErrorInfo;
import com.huawei.hiassistant.platform.base.bean.InteractionIdInfo;
import com.huawei.hiassistant.platform.base.bean.llmstatus.StreamingStatusManager;
import com.huawei.hiassistant.platform.base.bean.recognize.HeaderPayload;
import com.huawei.hiassistant.platform.base.bean.recognize.MessageConstants;
import com.huawei.hiassistant.platform.base.bean.recognize.RecognizeContext;
import com.huawei.hiassistant.platform.base.bean.recognize.Session;
import com.huawei.hiassistant.platform.base.bean.recognize.VoiceKitMessage;
import com.huawei.hiassistant.platform.base.bean.recognize.payload.Header;
import com.huawei.hiassistant.platform.base.bean.recognize.payload.Payload;
import com.huawei.hiassistant.platform.base.bean.util.GsonUtils;
import com.huawei.hiassistant.platform.base.commonrejection.CommonRejection;
import com.huawei.hiassistant.platform.base.fullduplex.FullDuplex;
import com.huawei.hiassistant.platform.base.fullduplex.FullDuplexUiStateUtil;
import com.huawei.hiassistant.platform.base.interrupt.InterruptTtsInfo;
import com.huawei.hiassistant.platform.base.module.DataAcquisitionInterface;
import com.huawei.hiassistant.platform.base.module.ModuleInstanceFactory;
import com.huawei.hiassistant.platform.base.module.RecordStartType;
import com.huawei.hiassistant.platform.base.msg.AssistantMessage;
import com.huawei.hiassistant.platform.base.msg.MessageSparse;
import com.huawei.hiassistant.platform.base.northinterface.recognize.RecognizerIntent;
import com.huawei.hiassistant.platform.base.northinterface.wakeup.WakeupIntent;
import com.huawei.hiassistant.platform.base.report.OperationReportUtils;
import com.huawei.hiassistant.platform.base.util.BaseUtils;
import com.huawei.hiassistant.platform.base.util.BusinessFlowId;
import com.huawei.hiassistant.platform.base.util.DeviceUtil;
import com.huawei.hiassistant.platform.base.util.FixedLinkedBlockingDeque;
import com.huawei.hiassistant.platform.base.util.IAssistantConfig;
import com.huawei.hiassistant.platform.base.util.KeyguardUtil;
import com.huawei.hiassistant.platform.base.util.KitLog;
import com.huawei.hiassistant.platform.base.util.LocationUtil;
import com.huawei.hiassistant.platform.base.util.NetworkUtil;
import com.huawei.hiassistant.platform.base.util.PermissionUtil;
import com.huawei.hiassistant.platform.base.util.SecureIntentUtil;
import com.huawei.hiassistant.platform.base.util.VoiceBusinessFlowCache;
import com.huawei.hiassistant.platform.base.util.common.HeadsetScoManager;
import com.huawei.hiassistant.platform.base.util.voice.AecStateUtil;
import com.huawei.hiassistant.voice.abilityconnector.recognizer.SpeechCheck;
import com.huawei.hiassistant.voice.common.util.CommonDataUtil;
import com.huawei.hiassistant.voice.common.util.CountDown;
import com.huawei.hiassistant.voice.dataacquisition.AudioAcquisition;
import defpackage.ad4;
import defpackage.bq;
import defpackage.dl;
import defpackage.hhb;
import defpackage.joa;
import defpackage.rma;
import defpackage.uq1;
import defpackage.ynb;
import java.io.UnsupportedEncodingException;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import java.util.concurrent.CopyOnWriteArrayList;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicReference;
import java.util.function.Consumer;
import java.util.function.Function;
import java.util.function.Predicate;

public class AudioAcquisition implements DataAcquisitionInterface {
    private static final int ACQUISITION_GAP = 10;
    private static final int AUDIO_ACQUISITION_START = 1;
    private static final int AUDIO_RECORDING_TIMEOUT = 20000;
    private static final int AUDIO_RECORDING_TIMEOUT_FOR_V5 = 30000;
    private static final int DATA_BUFFER_SIZE = 640;
    private static final int FLAG_IS_COMMON_REJECTION = 8;
    private static final int FLAG_IS_FULLDUPLEX = 4;
    private static final int FLAG_IS_RECOGNIZING = 1;
    private static final int FLAG_IS_WAKEUP = 2;
    private static final int GET_LOGINSTATUS_TIMEOUT = 5000;
    private static final int INVALID_VOLUME = -1;
    private static final double MAX_INT = 32767.0d;
    private static final int MICRO_PA = 20;
    private static final String NAMESPACE_SYSTEM = "System";
    private static final String NAME_ASR_SETTINGS_PARAMETER = "ASRSettingsParameter";
    private static final String NAME_CLIENT_CONTEXT = "ClientContext";
    private static final int NO_RECORD_DATA_TIMEOUT = 3000;
    private static final long NO_SPEECH_TIMEOUT = 2500;
    private static final long SPEECH_CHECK_DELAY = 500;
    private static final String TAG = "AudioAcquisition";
    private static final long TEXT_RECOGNIZE_LEN_LIMIT = 1048576;
    private static final int WAIT_RETRY_RECORD_PERIOD = 20;
    private static final int WAIT_RETRY_RECORD_TIMES = 10;
    private Handler audioAcquisitionHandler;
    private HandlerThread audioAcquisitionThread;
    private volatile Session currentSession;
    private volatile boolean hasReadInstantCached;
    private boolean isAppRecord;
    private boolean isAppSelfWakeupRecord;
    private volatile boolean isRecognizeRecording;
    private volatile boolean isRecording;
    private volatile long startTime;
    private FixedLinkedBlockingDeque<AudioDataMessage> processCacheAudioDataQueue = new FixedLinkedBlockingDeque<>(100);
    private FixedLinkedBlockingDeque<AudioDataMessage> instantCacheAudioDataQueue = new FixedLinkedBlockingDeque<>(5);
    private int recordTimeoutTime = 20000;
    private final List<AudioDataMessage> cacheAudioLists = new CopyOnWriteArrayList();
    private DataAcquisitionInterface.CallBack acquisitionCallback = new DataAcquisitionCallBackImpl();
    private volatile boolean hasRecordData = false;
    private SpeechCheck speechCheck = new SpeechCheck();
    private CountDown countDown = new CountDown();
    private int lastVolume = -1;
    private int audioState = 0;
    private hhb audioRecordImpl = new hhb();

    public class AudioAcquisitionHandler extends Handler {
        public AudioAcquisitionHandler(Looper looper) {
            super(looper);
        }

        @Override
        public void handleMessage(Message message) {
            if (message.what != 1) {
                return;
            }
            AudioAcquisition.this.acquisitionThread((Session) BaseUtils.getTargetInstance(message.obj, Session.class).orElse(null));
        }
    }

    public class DataAcquisitionCallBackImpl implements DataAcquisitionInterface.CallBack {
        private DataAcquisitionCallBackImpl() {
        }

        @Override
        public void onCancel(Session session, boolean z) {
            KitLog.error(AudioAcquisition.TAG, "acquisitionCallback:unexpected method onCancel call");
        }

        @Override
        public <T> void onDmResult(Intent intent, T t) {
            KitLog.error(AudioAcquisition.TAG, "acquisitionCallback:unexpected method onDmResult call");
        }

        @Override
        public void onError(Session session, DataAcquisitionInterface.ErrorType errorType, ErrorInfo errorInfo) {
            KitLog.error(AudioAcquisition.TAG, "acquisitionCallback:unexpected method onError call");
        }

        @Override
        public void onJudgeUpdateIdResult(AssistantMessage<?> assistantMessage) {
            KitLog.error(AudioAcquisition.TAG, "acquisitionCallback:unexpected method onJudgeUpdateIdResult call");
        }

        @Override
        public <T> void onNluResult(Session session, T t) {
            KitLog.error(AudioAcquisition.TAG, "acquisitionCallback:unexpected method onNluResult call");
        }

        @Override
        public void onRecordEnd() {
            KitLog.error(AudioAcquisition.TAG, "acquisitionCallback:unexpected method onRecordEnd call");
        }

        @Override
        public void onRecordStart() {
            KitLog.error(AudioAcquisition.TAG, "acquisitionCallback:unexpected method onRecordStart call");
        }

        @Override
        public void onResult(Session session, Object obj) {
            KitLog.error(AudioAcquisition.TAG, "acquisitionCallback:unexpected method onResult call");
        }

        @Override
        public void onStart(AssistantMessage<?> assistantMessage, boolean z) {
            KitLog.error(AudioAcquisition.TAG, "acquisitionCallback:unexpected method onStart call");
        }

        @Override
        public void onStop(Session session, boolean z) {
            KitLog.error(AudioAcquisition.TAG, "acquisitionCallback:unexpected method onStop call");
        }

        @Override
        public <T> void onWakeupResult(T t) {
            KitLog.error(AudioAcquisition.TAG, "acquisitionCallback:unexpected method onWakeupResult call");
        }

        @Override
        public void startFreeWakeupRecord() {
            KitLog.error(AudioAcquisition.TAG, "acquisitionCallback:unexpected method startFreeWakeupRecord call");
        }

        @Override
        public void startRecord() {
            KitLog.error(AudioAcquisition.TAG, "acquisitionCallback:unexpected method startRecord call");
        }

        @Override
        public void stopFreeWakeupRecord() {
            KitLog.error(AudioAcquisition.TAG, "acquisitionCallback:unexpected method stopFreeWakeupRecord call");
        }

        @Override
        public void stopRecord() {
            KitLog.error(AudioAcquisition.TAG, "acquisitionCallback:unexpected method stopRecord call");
        }
    }

    public AudioAcquisition() {
        this.audioAcquisitionThread = null;
        HandlerThread handlerThread = new HandlerThread("AcquisitionThread");
        this.audioAcquisitionThread = handlerThread;
        handlerThread.start();
        Looper looper = this.audioAcquisitionThread.getLooper();
        if (looper == null) {
            KitLog.error(TAG, "Get looper returns null while init");
        } else {
            this.audioAcquisitionHandler = new AudioAcquisitionHandler(looper);
        }
    }

    public void acquisitionThread(Session session) {
        KitLog.info(TAG, " acquisitionThread start");
        while (true) {
            if (!this.isRecording) {
                break;
            }
            if ((this.audioState & 12) == 0) {
                if (session != null) {
                    if (this.currentSession != null) {
                        if (this.currentSession.getInteractionId() != session.getInteractionId()) {
                            KitLog.info(TAG, "acquisitionThread session is different with currentSession");
                            break;
                        }
                    } else {
                        KitLog.info(TAG, "acquisitionThread currentSession is null");
                        break;
                    }
                } else {
                    KitLog.info(TAG, "acquisitionThread session is null");
                    break;
                }
            }
            if (!readRecordData()) {
                break;
            } else {
                SystemClock.sleep(10L);
            }
        }
        KitLog.debug(TAG, " acquisitionThread finish", new Object[0]);
    }

    private short[] byteArray2ShortArray(byte[] bArr) {
        short[] sArr = new short[bArr.length >> 1];
        ByteBuffer.wrap(bArr).order(ByteOrder.LITTLE_ENDIAN).asShortBuffer().get(sArr);
        return sArr;
    }

    private void cacheDataType(String str) {
        VoiceKitSdkContext.getInstance().setRecognizeParam("dataType", str);
    }

    private int calculateVolume(byte[] bArr) {
        if (!IAssistantConfig.getInstance().sdkConfig().isNeedVolumeCallback()) {
            KitLog.debug(TAG, "skip calculateVolume", new Object[0]);
            return 0;
        }
        if (bArr == null || bArr.length == 0) {
            KitLog.error(TAG, "buffer is empty");
            return 0;
        }
        short[] byteArray2ShortArray = byteArray2ShortArray(bArr);
        int length = bArr.length;
        int i = 0;
        for (short s : byteArray2ShortArray) {
            i += (s * s) / length;
        }
        double log10 = Math.log10(i / MAX_INT) * 20.0d;
        int i2 = log10 >= AGConnectConfig.DEFAULT.DOUBLE_VALUE ? log10 > 100.0d ? 100 : (int) log10 : 0;
        if (i2 != this.lastVolume) {
            this.lastVolume = i2;
            KitLog.debug(TAG, "calculateVolume::{}", Integer.valueOf(i2));
        }
        return i2;
    }

    private void cancelSpeechCheck() {
        KitLog.debug(TAG, "speechCheck: cancel", new Object[0]);
        CountDown countDown = this.countDown;
        if (countDown != null) {
            countDown.c();
        }
        SpeechCheck speechCheck = this.speechCheck;
        if (speechCheck != null) {
            speechCheck.e();
        }
    }

    private boolean checkRecordAudioPermission() {
        return IAssistantConfig.getInstance().getAppContext().checkSelfPermission("android.permission.RECORD_AUDIO") == 0;
    }

    private boolean checkRecordTimeOut(boolean z) {
        if ((this.audioState & 4) != 0) {
            return false;
        }
        long elapsedRealtime = SystemClock.elapsedRealtime();
        if (!z && elapsedRealtime - this.startTime > 3000) {
            KitLog.warn(TAG, "onRecordError error : no data time out.");
            return true;
        }
        if (elapsedRealtime - this.startTime <= this.recordTimeoutTime) {
            return false;
        }
        KitLog.warn(TAG, "onRecordError AudioRecording 20s timeout");
        return true;
    }

    private void closeSvadMode(RecognizeContext recognizeContext) {
        KitLog.info(TAG, "closeSvadMode");
        Optional.ofNullable(recognizeContext).map(new Function() {
            @Override
            public final Object apply(Object obj) {
                HeaderPayload lambda$closeSvadMode$13;
                lambda$closeSvadMode$13 = AudioAcquisition.lambda$closeSvadMode$13((RecognizeContext) obj);
                return lambda$closeSvadMode$13;
            }
        }).map(new Function() {
            @Override
            public final Object apply(Object obj) {
                JsonObject lambda$closeSvadMode$14;
                lambda$closeSvadMode$14 = AudioAcquisition.lambda$closeSvadMode$14((HeaderPayload) obj);
                return lambda$closeSvadMode$14;
            }
        }).ifPresent(new Consumer() {
            @Override
            public final void accept(Object obj) {
                AudioAcquisition.lambda$closeSvadMode$16((JsonObject) obj);
            }
        });
    }

    private void dealCommonRejectionBusiness(RecognizeContext recognizeContext, Session session, Intent intent) {
        InteractionIdInfo interactionIdInfo = (InteractionIdInfo) Optional.ofNullable(session).map(new Function() {
            @Override
            public final Object apply(Object obj) {
                InteractionIdInfo lambda$dealCommonRejectionBusiness$23;
                lambda$dealCommonRejectionBusiness$23 = AudioAcquisition.lambda$dealCommonRejectionBusiness$23((Session) obj);
                return lambda$dealCommonRejectionBusiness$23;
            }
        }).orElse(null);
        if (SecureIntentUtil.getSecureIntentBoolean(intent, RecognizerIntent.CONTINUE_FRONT_VAD, false) && CommonRejection.Helper.stateManager().isCommonRejectionMode(interactionIdInfo)) {
            long calculateNextVadFrontTime = CommonRejection.Helper.calculator().calculateNextVadFrontTime();
            if (calculateNextVadFrontTime > CommonRejection.Constants.MIN_VAD_FRONT_TIME.longValue()) {
                updateVadFrontTime(recognizeContext, calculateNextVadFrontTime);
                CommonRejection.Helper.calculator().setVadFrontTime((int) calculateNextVadFrontTime);
            }
        }
    }

    private void dealSvad(RecognizeContext recognizeContext) {
        boolean extractSvadValue = extractSvadValue(recognizeContext);
        KitLog.info(TAG, "dealSvad isSvadOn= " + extractSvadValue);
        if (extractSvadValue) {
            if (ModuleInstanceFactory.Ability.recognize().checkSupportFeature("multi_adaptive_vad")) {
                KitLog.info(TAG, "support multi_adaptive_vad.");
            } else {
                if (ModuleInstanceFactory.Ability.recognize().checkSupportFeature("semantical_vad")) {
                    return;
                }
                closeSvadMode(recognizeContext);
            }
        }
    }

    private void externalCancel() {
        if (this.isRecognizeRecording) {
            KitLog.debug(TAG, "externalCancel before , audioState={}", Integer.valueOf(this.audioState));
            this.audioState &= -14;
            this.isRecognizeRecording = false;
            if (this.audioState == 0) {
                this.isRecording = false;
            }
            this.acquisitionCallback.stopRecord();
            FullDuplexUiStateUtil.setIsAudioRecording(false);
            this.processCacheAudioDataQueue.clear();
            this.instantCacheAudioDataQueue.clear();
            OperationReportUtils.getInstance().reportRecordTypeRecord();
            OperationReportUtils.getInstance().reportEndRecord();
        }
    }

    private boolean externalStop(Session session) {
        if (!this.isRecognizeRecording) {
            KitLog.info(TAG, "externalStop audio Recording has stopped.");
            this.acquisitionCallback.onStop(session, false);
            return false;
        }
        KitLog.info(TAG, "externalStop stop Recording start");
        this.acquisitionCallback.onStop(session, true);
        int i = this.audioState & (-2);
        this.audioState = i;
        KitLog.debug(TAG, "startAudioIfNeeded state = {}", Integer.valueOf(i));
        stopRecognizeRecord();
        if (this.audioState == 0) {
            this.isRecording = false;
            OperationReportUtils.getInstance().reportRecordTypeRecord();
            OperationReportUtils.getInstance().reportEndRecord();
        }
        return true;
    }

    private Payload extractClientContextPayload(RecognizeContext recognizeContext) {
        Payload payload = new Payload();
        if (recognizeContext == null) {
            return payload;
        }
        HeaderPayload contextsPayload = recognizeContext.getContextsPayload(NAME_CLIENT_CONTEXT, NAMESPACE_SYSTEM);
        return contextsPayload == null ? payload : contextsPayload.getPayload();
    }

    private void extractCommonRejectionFields(RecognizeContext recognizeContext, Session session) {
        if (session.isFullDuplexMode()) {
            return;
        }
        Optional<Boolean> extractIsSupportRejection = extractIsSupportRejection(recognizeContext);
        Boolean bool = extractIsSupportRejection.isPresent() ? extractIsSupportRejection.get() : (Boolean) extractRejectionMode(recognizeContext).map(new Function() {
            @Override
            public final Object apply(Object obj) {
                Boolean lambda$extractCommonRejectionFields$17;
                lambda$extractCommonRejectionFields$17 = AudioAcquisition.lambda$extractCommonRejectionFields$17((String) obj);
                return lambda$extractCommonRejectionFields$17;
            }
        }).orElse(Boolean.FALSE);
        boolean booleanValue = bool.booleanValue();
        KitLog.info(TAG, "extractCommonRejectionFields, isSupportRejeciton exist = " + extractIsSupportRejection.isPresent() + " ,isCommonRejectionMode=" + booleanValue);
        if (booleanValue) {
            CommonRejection.Helper.stateManager().setRejectionMode(InteractionIdInfo.build(session.getSessionId(), session.getInteractionId()), bool);
            CommonRejection.Helper.calculator().setVadFrontTime(extractVadFontTime(recognizeContext));
        }
    }

    private Optional<Boolean> extractIsSupportRejection(RecognizeContext recognizeContext) {
        return Optional.ofNullable(extractClientContextPayload(recognizeContext)).map(new joa()).map(new Function() {
            @Override
            public final Object apply(Object obj) {
                JsonElement lambda$extractIsSupportRejection$21;
                lambda$extractIsSupportRejection$21 = AudioAcquisition.lambda$extractIsSupportRejection$21((JsonObject) obj);
                return lambda$extractIsSupportRejection$21;
            }
        }).map(new ad4());
    }

    private Optional<String> extractRejectionMode(RecognizeContext recognizeContext) {
        return Optional.ofNullable(extractClientContextPayload(recognizeContext)).map(new joa()).map(new Function() {
            @Override
            public final Object apply(Object obj) {
                JsonElement lambda$extractRejectionMode$22;
                lambda$extractRejectionMode$22 = AudioAcquisition.lambda$extractRejectionMode$22((JsonObject) obj);
                return lambda$extractRejectionMode$22;
            }
        }).map(new uq1());
    }

    private boolean extractSvadValue(RecognizeContext recognizeContext) {
        return ((Boolean) Optional.ofNullable(recognizeContext).map(new Function() {
            @Override
            public final Object apply(Object obj) {
                HeaderPayload lambda$extractSvadValue$10;
                lambda$extractSvadValue$10 = AudioAcquisition.lambda$extractSvadValue$10((RecognizeContext) obj);
                return lambda$extractSvadValue$10;
            }
        }).map(new Function() {
            @Override
            public final Object apply(Object obj) {
                JsonObject lambda$extractSvadValue$11;
                lambda$extractSvadValue$11 = AudioAcquisition.lambda$extractSvadValue$11((HeaderPayload) obj);
                return lambda$extractSvadValue$11;
            }
        }).map(new Function() {
            @Override
            public final Object apply(Object obj) {
                JsonElement lambda$extractSvadValue$12;
                lambda$extractSvadValue$12 = AudioAcquisition.lambda$extractSvadValue$12((JsonObject) obj);
                return lambda$extractSvadValue$12;
            }
        }).map(new ad4()).orElse(Boolean.FALSE)).booleanValue();
    }

    private int extractVadFontTime(RecognizeContext recognizeContext) {
        return ((Integer) Optional.ofNullable(recognizeContext).map(new Function() {
            @Override
            public final Object apply(Object obj) {
                HeaderPayload lambda$extractVadFontTime$18;
                lambda$extractVadFontTime$18 = AudioAcquisition.lambda$extractVadFontTime$18((RecognizeContext) obj);
                return lambda$extractVadFontTime$18;
            }
        }).map(new Function() {
            @Override
            public final Object apply(Object obj) {
                JsonObject lambda$extractVadFontTime$19;
                lambda$extractVadFontTime$19 = AudioAcquisition.lambda$extractVadFontTime$19((HeaderPayload) obj);
                return lambda$extractVadFontTime$19;
            }
        }).map(new Function() {
            @Override
            public final Object apply(Object obj) {
                JsonElement lambda$extractVadFontTime$20;
                lambda$extractVadFontTime$20 = AudioAcquisition.lambda$extractVadFontTime$20((JsonObject) obj);
                return lambda$extractVadFontTime$20;
            }
        }).map(new uq1()).map(new bq()).orElse(10000)).intValue();
    }

    private static void fillCaModelParams(final JsonObject jsonObject) {
        ((JsonObject) Optional.ofNullable(VoiceKitSdkContext.getInstance().get(RecognizerIntent.EXT_NLU_CONTEXT_CA_INFO)).filter(new Predicate() {
            @Override
            public final boolean test(Object obj) {
                boolean lambda$fillCaModelParams$28;
                lambda$fillCaModelParams$28 = AudioAcquisition.lambda$fillCaModelParams$28(obj);
                return lambda$fillCaModelParams$28;
            }
        }).map(new rma()).filter(new Predicate() {
            @Override
            public final boolean test(Object obj) {
                boolean lambda$fillCaModelParams$29;
                lambda$fillCaModelParams$29 = AudioAcquisition.lambda$fillCaModelParams$29((String) obj);
                return lambda$fillCaModelParams$29;
            }
        }).map(new Function() {
            @Override
            public final Object apply(Object obj) {
                JsonObject lambda$fillCaModelParams$30;
                lambda$fillCaModelParams$30 = AudioAcquisition.lambda$fillCaModelParams$30((String) obj);
                return lambda$fillCaModelParams$30;
            }
        }).orElse(new JsonObject())).entrySet().forEach(new Consumer() {
            @Override
            public final void accept(Object obj) {
                AudioAcquisition.lambda$fillCaModelParams$31(jsonObject, (Map.Entry) obj);
            }
        });
    }

    private void fillFirstDialog(Session session, JsonObject jsonObject) {
        jsonObject.addProperty("firstDialog", Boolean.valueOf(session.isFullDuplexMode() ? FullDuplex.stateManager().isFirstDialog(InteractionIdInfo.build(session.getSessionId(), session.getInteractionId())) : ((Boolean) Optional.ofNullable(jsonObject.get("firstDialog")).map(new ad4()).orElse(Boolean.FALSE)).booleanValue() || !VoiceBusinessFlowCache.getInstance().isOccurredEffectiveBusiness()));
    }

    private void fillHomeIdAndVersionType(JsonObject jsonObject) {
        String str = (String) VoiceKitSdkContext.getInstance().get(RecognizerIntent.EXT_HOME_ID, String.class).orElse("");
        String str2 = (String) VoiceKitSdkContext.getInstance().get(RecognizerIntent.EXT_VERSION_TYPE, String.class).orElse("");
        if (!TextUtils.isEmpty(str)) {
            jsonObject.addProperty(RecognizerIntent.EXT_HOME_ID, str);
        }
        if (TextUtils.isEmpty(str2)) {
            return;
        }
        jsonObject.addProperty(RecognizerIntent.EXT_VERSION_TYPE, str2);
    }

    private void fillInterruptParam(Intent intent, JsonObject jsonObject) {
        InterruptTtsInfo interruptTtsInfo = (InterruptTtsInfo) SecureIntentUtil.getSecureIntentParcelable(intent, RecognizerIntent.EXT_INTERRUPT_INFO, InterruptTtsInfo.class);
        if (interruptTtsInfo != null) {
            jsonObject.addProperty("interrupt", Boolean.TRUE);
            if (HeadsetScoManager.getInstance().isBluetoothConnected()) {
                JsonObject jsonObject2 = new JsonObject();
                jsonObject2.addProperty("scenario", "bluetooth");
                jsonObject2.addProperty("speakerText", interruptTtsInfo.getTts());
                jsonObject.add("speakerInformation", jsonObject2);
            }
        }
    }

    private void fillLocationServiceOn(JsonObject jsonObject) {
        if (jsonObject.has("isLocationServiceOn")) {
            return;
        }
        KitLog.warn(TAG, "voice context has no isLocationServiceOn");
        jsonObject.addProperty("isLocationServiceOn", Boolean.valueOf(LocationUtil.getInstance().isLocationServiceOn(IAssistantConfig.getInstance().getAppContext())));
    }

    private void fillLoginStatus(Intent intent, JsonObject jsonObject) {
        Boolean bool;
        if (!IAssistantConfig.getInstance().sdkConfig().isNeedHms()) {
            KitLog.warn(TAG, "fillLoginStatus ignore, do not need hms.");
            return;
        }
        if (intent.hasExtra(RecognizerIntent.EXT_LOGIN_STATUS)) {
            bool = Boolean.valueOf(SecureIntentUtil.getSecureIntentBoolean(intent, RecognizerIntent.EXT_LOGIN_STATUS, false));
        } else {
            final AtomicReference atomicReference = new AtomicReference();
            final CountDownLatch countDownLatch = new CountDownLatch(1);
            ModuleInstanceFactory.Tools.THREAD_POOL.execute(new Runnable() {
                @Override
                public final void run() {
                    AudioAcquisition.lambda$fillLoginStatus$27(atomicReference, countDownLatch);
                }
            });
            try {
                countDownLatch.await(5000L, TimeUnit.MILLISECONDS);
            } catch (InterruptedException unused) {
                KitLog.error(TAG, "fillLoginStatus await error");
            }
            bool = atomicReference.get() != null ? (Boolean) atomicReference.get() : null;
        }
        if (bool != null) {
            jsonObject.addProperty(RecognizerIntent.EXT_LOGIN_STATUS, bool);
        }
    }

    private String fillRecognizeContextString(Intent intent, Session session) {
        String secureIntentString = SecureIntentUtil.getSecureIntentString(intent, RecognizerIntent.EXT_VOICE_CONTEXT);
        KitLog.debug(TAG, "extVoiceContext is {}", secureIntentString);
        long currentTimeMillis = System.currentTimeMillis();
        RecognizeContext recognizeContext = (RecognizeContext) Optional.ofNullable(secureIntentString).map(new Function() {
            @Override
            public final Object apply(Object obj) {
                RecognizeContext lambda$fillRecognizeContextString$8;
                lambda$fillRecognizeContextString$8 = AudioAcquisition.lambda$fillRecognizeContextString$8((String) obj);
                return lambda$fillRecognizeContextString$8;
            }
        }).orElse(new RecognizeContext());
        int i = 0;
        if (!DeviceUtil.isTv()) {
            try {
                i = IAssistantConfig.getInstance().getAppContext().getPackageManager().getPackageInfo("com.huawei.hiai", 0).versionCode;
            } catch (PackageManager.NameNotFoundException | RuntimeException unused) {
                KitLog.error(TAG, "RuntimeException or NameNotFoundException");
            }
            recognizeContext.getContexts().add(PermissionUtil.getAppPermissionList());
            modifyRecognizeContext(recognizeContext);
        }
        Payload generatePayload = generatePayload(recognizeContext, intent, session, i);
        if (recognizeContext.getContextsPayload(NAME_CLIENT_CONTEXT, NAMESPACE_SYSTEM) == null) {
            HeaderPayload headerPayload = new HeaderPayload();
            headerPayload.setHeader(new Header(NAME_CLIENT_CONTEXT, NAMESPACE_SYSTEM));
            headerPayload.setPayload(generatePayload);
            recognizeContext.getContexts().add(headerPayload);
        }
        String json = GsonUtils.toJson(recognizeContext);
        KitLog.debug(TAG, "recognizeContext is {}", json);
        KitLog.info(TAG, "recognizeContext cost " + (System.currentTimeMillis() - currentTimeMillis));
        return json;
    }

    private void fullDuplexSpeechCheckStart(final Session session) {
        if (this.speechCheck == null) {
            return;
        }
        KitLog.info(TAG, "fullDuplexSpeechCheckStart: start, session =" + session.getSecureInfo());
        this.speechCheck.a();
        this.speechCheck.c(new SpeechCheck.SpeechListener() {
            @Override
            public final void onSpeaking() {
                AudioAcquisition.lambda$fullDuplexSpeechCheckStart$34(Session.this);
            }
        });
    }

    private Payload generatePayload(RecognizeContext recognizeContext, Intent intent, Session session, int i) {
        extractCommonRejectionFields(recognizeContext, session);
        dealCommonRejectionBusiness(recognizeContext, session, intent);
        dealSvad(recognizeContext);
        Payload extractClientContextPayload = extractClientContextPayload(recognizeContext);
        final JsonObject jsonObject = extractClientContextPayload.getJsonObject();
        if (!DeviceUtil.isTv()) {
            fillLocationServiceOn(jsonObject);
            jsonObject.addProperty("isNetworkAvailable", Boolean.valueOf(NetworkUtil.isNetworkAvailable(IAssistantConfig.getInstance().getAppContext())));
            fillInterruptParam(intent, jsonObject);
            if (!jsonObject.has("screenLockerStatus")) {
                jsonObject.addProperty("screenLockerStatus", Boolean.valueOf(KeyguardUtil.isKeyguardLocked()));
            }
        }
        if (i != 0) {
            jsonObject.addProperty("HiAiVersion", String.valueOf(i));
        }
        jsonObject.addProperty("isDebugOn", Boolean.valueOf(IAssistantConfig.getInstance().isLogDebug()));
        jsonObject.addProperty("ttsStreamingSpeaking", Boolean.valueOf(StreamingStatusManager.getInstance().getTtsStreamingSpeaking()));
        if (IAssistantConfig.getInstance().sdkConfig().isNeedSsid()) {
            jsonObject.addProperty("wifiSsid", NetworkUtil.getWifiSsid());
        }
        fillLoginStatus(intent, jsonObject);
        Optional.ofNullable(VoiceKitSdkContext.getInstance().get(RecognizerIntent.EXT_FULL_SCENE)).ifPresent(new Consumer() {
            @Override
            public final void accept(Object obj) {
                AudioAcquisition.lambda$generatePayload$9(jsonObject, obj);
            }
        });
        fillCaModelParams(jsonObject);
        fillFirstDialog(session, jsonObject);
        fillHomeIdAndVersionType(jsonObject);
        jsonObject.addProperty("isTtsSpeaking", Boolean.valueOf(ModuleInstanceFactory.Ability.tts().isSpeaking()));
        extractClientContextPayload.setJsonObject(jsonObject);
        return extractClientContextPayload;
    }

    private void handleInstantCachedIfNeeded(AudioDataMessage audioDataMessage) {
        int i = this.audioState;
        if ((i & 4) == 0 || (i & 1) == 0) {
            return;
        }
        this.instantCacheAudioDataQueue.offer(audioDataMessage);
    }

    private void handleProcessCachedIfNeeded() {
        int i = this.audioState;
        if ((i & 12) == 0 || (i & 1) == 0 || this.processCacheAudioDataQueue.size() == 0) {
            return;
        }
        processCachedAudioData();
    }

    private boolean handleRecordData(AudioDataMessage audioDataMessage) {
        if (IAssistantConfig.getInstance().sdkConfig().isFilterZeroData() && isZeroData(audioDataMessage.getBuffers())) {
            KitLog.info(TAG, "zero data");
        } else {
            this.hasRecordData = true;
            int calculateVolume = calculateVolume(audioDataMessage.getBuffers());
            audioDataMessage.setVolume(calculateVolume);
            SpeechCheck speechCheck = this.speechCheck;
            if (speechCheck != null) {
                speechCheck.b(calculateVolume);
            }
            this.cacheAudioLists.add(audioDataMessage);
        }
        if (checkRecordTimeOut(this.hasRecordData)) {
            this.acquisitionCallback.onError(this.currentSession, DataAcquisitionInterface.ErrorType.ACQUISITION_TIMEOUT, new ErrorInfo(3, "record timeout"));
            return false;
        }
        if (ModuleInstanceFactory.Ability.recognize().isInitEngineFinished()) {
            processRecordBuffData();
        }
        return true;
    }

    private boolean handleRecordDataToCache(AudioDataMessage audioDataMessage) {
        if (IAssistantConfig.getInstance().sdkConfig().isFilterZeroData() && isZeroData(audioDataMessage.getBuffers())) {
            KitLog.info(TAG, "handleRecordDataToCache zero data");
            return true;
        }
        this.processCacheAudioDataQueue.offer(audioDataMessage);
        return true;
    }

    private boolean handleRecordDataToWakeup(AudioDataMessage audioDataMessage) {
        if (IAssistantConfig.getInstance().sdkConfig().isFilterZeroData() && isZeroData(audioDataMessage.getBuffers())) {
            KitLog.info(TAG, "handleRecordDataToWakeup zero data");
            return true;
        }
        this.acquisitionCallback.onWakeupResult(audioDataMessage);
        return true;
    }

    private void handleVadCachedIfNeeded() {
        int i = this.audioState;
        if ((i & 4) == 0 || (i & 1) == 0 || this.hasReadInstantCached) {
            return;
        }
        KitLog.info(TAG, "handleInstantCachedIfNeeded size = " + this.instantCacheAudioDataQueue.size());
        while (!this.instantCacheAudioDataQueue.isEmpty()) {
            AudioDataMessage poll = this.instantCacheAudioDataQueue.poll();
            if (poll != null) {
                handleRecordData(poll);
            }
        }
        this.hasReadInstantCached = true;
        KitLog.debug(TAG, "handleInstantCachedIfNeeded read finish", new Object[0]);
    }

    private void innerCancel() {
        if (!this.isRecognizeRecording) {
            KitLog.info(TAG, "audio Recording has canceled.");
            return;
        }
        KitLog.info(TAG, "cancel Recording start");
        this.isRecognizeRecording = false;
        stopAudioIfNeeded(13);
        AecStateUtil.tryUpdateAecState(false);
        FullDuplexUiStateUtil.setIsAudioRecording(false);
        this.acquisitionCallback.onRecordEnd();
        this.processCacheAudioDataQueue.clear();
        this.instantCacheAudioDataQueue.clear();
        KitLog.debug(TAG, "cancel Recording end", new Object[0]);
    }

    private boolean innerStop(Session session) {
        if (!this.isRecognizeRecording) {
            KitLog.info(TAG, "innerStop audio Recording has stopped.");
            this.acquisitionCallback.onStop(session, false);
            return false;
        }
        KitLog.info(TAG, "innerStop stop Recording start");
        this.acquisitionCallback.onStop(session, true);
        stopAudioIfNeeded(1);
        stopRecognizeRecord();
        KitLog.debug(TAG, "innerStop stop Recording end", new Object[0]);
        return true;
    }

    private boolean isAudioFullDuplexRecognize(Session session, Intent intent) {
        return (intent == null || session == null || !session.isFullDuplexMode() || intent.hasExtra("text") || intent.hasExtra("dataType")) ? false : true;
    }

    private boolean isCmdOrScenarioType(Intent intent) {
        String secureIntentString = SecureIntentUtil.getSecureIntentString(intent, "dataType");
        return TextUtils.equals(secureIntentString, RecognizerIntent.COMMAND_DATA_TYPE) || TextUtils.equals(secureIntentString, RecognizerIntent.SCENARIO_DATA_TYPE);
    }

    private boolean isNeedTurnOnAec(Intent intent, Session session) {
        if (intent != null && SecureIntentUtil.getSecureIntentBoolean(intent, RecognizerIntent.EXT_SELF_RECORDING, false)) {
            return false;
        }
        if (session != null && session.isFullDuplexMode()) {
            return true;
        }
        if ((session != null && CommonRejection.Helper.stateManager().isCommonRejectionMode(InteractionIdInfo.build(session.getSessionId(), session.getInteractionId()))) || intent.hasExtra(RecognizerIntent.EXT_INTERRUPT_INFO)) {
            return true;
        }
        if (!TextUtils.isEmpty(SecureIntentUtil.getSecureIntentString(intent, RecognizerIntent.EXT_VOICE_CONTEXT))) {
            return ((Boolean) Optional.ofNullable(extractClientContextPayload((RecognizeContext) GsonUtils.toBean(SecureIntentUtil.getSecureIntentString(intent, RecognizerIntent.EXT_VOICE_CONTEXT), RecognizeContext.class))).map(new joa()).map(new Function() {
                @Override
                public final Object apply(Object obj) {
                    JsonElement lambda$isNeedTurnOnAec$33;
                    lambda$isNeedTurnOnAec$33 = AudioAcquisition.lambda$isNeedTurnOnAec$33((JsonObject) obj);
                    return lambda$isNeedTurnOnAec$33;
                }
            }).map(new ad4()).orElse(Boolean.FALSE)).booleanValue();
        }
        KitLog.info(TAG, "[aec feature]EXT_VOICE_CONTEXT is empty");
        return false;
    }

    private boolean isZeroData(byte[] bArr) {
        for (byte b : bArr) {
            if (b != 0) {
                return false;
            }
        }
        return true;
    }

    private int judgeRecordState(AssistantMessage<?> assistantMessage) {
        Optional map = Optional.ofNullable(assistantMessage).map(new Function() {
            @Override
            public final Object apply(Object obj) {
                CommonHeader header;
                header = ((AssistantMessage) obj).getHeader();
                return header;
            }
        }).map(new Function() {
            @Override
            public final Object apply(Object obj) {
                Boolean lambda$judgeRecordState$3;
                lambda$judgeRecordState$3 = AudioAcquisition.lambda$judgeRecordState$3((CommonHeader) obj);
                return lambda$judgeRecordState$3;
            }
        });
        Boolean bool = Boolean.FALSE;
        boolean booleanValue = ((Boolean) map.orElse(bool)).booleanValue();
        boolean booleanValue2 = ((Boolean) Optional.ofNullable(assistantMessage).map(new dl()).map(new Function() {
            @Override
            public final Object apply(Object obj) {
                InteractionIdInfo lambda$judgeRecordState$4;
                lambda$judgeRecordState$4 = AudioAcquisition.lambda$judgeRecordState$4((CommonHeader) obj);
                return lambda$judgeRecordState$4;
            }
        }).map(new Function() {
            @Override
            public final Object apply(Object obj) {
                Boolean lambda$judgeRecordState$5;
                lambda$judgeRecordState$5 = AudioAcquisition.lambda$judgeRecordState$5((InteractionIdInfo) obj);
                return lambda$judgeRecordState$5;
            }
        }).orElse(bool)).booleanValue();
        if (booleanValue) {
            return 5;
        }
        return booleanValue2 ? 9 : 1;
    }

    public static HeaderPayload lambda$closeSvadMode$13(RecognizeContext recognizeContext) {
        return recognizeContext.getContextsPayload(NAME_ASR_SETTINGS_PARAMETER, NAMESPACE_SYSTEM);
    }

    public static JsonObject lambda$closeSvadMode$14(HeaderPayload headerPayload) {
        return headerPayload.getPayload().getJsonObject();
    }

    public static void lambda$closeSvadMode$16(final JsonObject jsonObject) {
        jsonObject.addProperty("svad", Boolean.FALSE);
        Optional.ofNullable(jsonObject.get("subVadEndTimems")).map(new uq1()).map(new bq()).ifPresent(new Consumer() {
            @Override
            public final void accept(Object obj) {
                jsonObject.addProperty("vadendtimems", (Integer) obj);
            }
        });
    }

    public static InteractionIdInfo lambda$dealCommonRejectionBusiness$23(Session session) {
        return InteractionIdInfo.build(session.getSessionId(), session.getInteractionId());
    }

    public static Boolean lambda$extractCommonRejectionFields$17(String str) {
        return Boolean.valueOf(!TextUtils.equals(str, CommonRejection.Constants.NO_REJECTION));
    }

    public static JsonElement lambda$extractIsSupportRejection$21(JsonObject jsonObject) {
        return jsonObject.get("isSupportRejection");
    }

    public static JsonElement lambda$extractRejectionMode$22(JsonObject jsonObject) {
        return jsonObject.get("rejectionMode");
    }

    public static HeaderPayload lambda$extractSvadValue$10(RecognizeContext recognizeContext) {
        return recognizeContext.getContextsPayload(NAME_ASR_SETTINGS_PARAMETER, NAMESPACE_SYSTEM);
    }

    public static JsonObject lambda$extractSvadValue$11(HeaderPayload headerPayload) {
        return headerPayload.getPayload().getJsonObject();
    }

    public static JsonElement lambda$extractSvadValue$12(JsonObject jsonObject) {
        return jsonObject.get("svad");
    }

    public static HeaderPayload lambda$extractVadFontTime$18(RecognizeContext recognizeContext) {
        return recognizeContext.getContextsPayload(NAME_ASR_SETTINGS_PARAMETER, NAMESPACE_SYSTEM);
    }

    public static JsonObject lambda$extractVadFontTime$19(HeaderPayload headerPayload) {
        return headerPayload.getPayload().getJsonObject();
    }

    public static JsonElement lambda$extractVadFontTime$20(JsonObject jsonObject) {
        return jsonObject.get("vadfonttimems");
    }

    public static boolean lambda$fillCaModelParams$28(Object obj) {
        return obj instanceof String;
    }

    public static boolean lambda$fillCaModelParams$29(String str) {
        return !TextUtils.isEmpty(str);
    }

    public static JsonObject lambda$fillCaModelParams$30(String str) {
        return (JsonObject) GsonUtils.toBean(str, JsonObject.class);
    }

    public static void lambda$fillCaModelParams$31(JsonObject jsonObject, Map.Entry entry) {
        if (entry == null || entry.getKey() == null) {
            return;
        }
        jsonObject.add((String) entry.getKey(), (JsonElement) entry.getValue());
    }

    public static void lambda$fillLoginStatus$27(AtomicReference atomicReference, CountDownLatch countDownLatch) {
        atomicReference.set(Boolean.valueOf(HmsProxyFactory.getHmsDelegateProxy().getLoginStatus(IAssistantConfig.getInstance().getAppContext())));
        countDownLatch.countDown();
    }

    public static RecognizeContext lambda$fillRecognizeContextString$8(String str) {
        return (RecognizeContext) GsonUtils.toBean(str, RecognizeContext.class);
    }

    public static void lambda$fullDuplexSpeechCheckStart$34(Session session) {
        KitLog.debug(TAG, "fullDuplexSpeechCheckStart: speech start", new Object[0]);
        ModuleInstanceFactory.Ability.recognize().onEnergyDetected(session);
    }

    public static void lambda$generatePayload$9(JsonObject jsonObject, Object obj) {
        jsonObject.addProperty(RecognizerIntent.EXT_FULL_SCENE, obj.toString());
    }

    public static JsonElement lambda$isNeedTurnOnAec$33(JsonObject jsonObject) {
        return jsonObject.get("isOpenAsrAec");
    }

    public static Boolean lambda$judgeRecordState$3(CommonHeader commonHeader) {
        return Boolean.valueOf(commonHeader.isFullDuplexMode());
    }

    public static InteractionIdInfo lambda$judgeRecordState$4(CommonHeader commonHeader) {
        return InteractionIdInfo.build(commonHeader.getSessionId(), commonHeader.getInteractionId());
    }

    public static Boolean lambda$judgeRecordState$5(InteractionIdInfo interactionIdInfo) {
        return Boolean.valueOf(CommonRejection.Helper.stateManager().isCommonRejectionMode(interactionIdInfo));
    }

    public static RecognizeContext lambda$preprocessAsrContext$7(String str) {
        return (RecognizeContext) GsonUtils.toBean(str, RecognizeContext.class);
    }

    public static void lambda$saveContextIfNeeded$1(CommonHeader commonHeader) {
        ModuleInstanceFactory.Ability.recognize().saveFullDuplexCurrentContext(commonHeader);
    }

    public static InteractionIdInfo lambda$setVadEndTimestampIfCommonRejection$32(Session session, Session session2) {
        return InteractionIdInfo.build(session.getSessionId(), session.getInteractionId());
    }

    public static void lambda$speechCheckStart$35(Session session) {
        KitLog.debug(TAG, "speechCheck: speech timeout", new Object[0]);
        ModuleInstanceFactory.Ability.recognize().onEnergyDetectTimeout(session);
    }

    public static void lambda$speechCheckStart$36(Session session) {
        KitLog.debug(TAG, "speechCheck: speech start", new Object[0]);
        ModuleInstanceFactory.Ability.recognize().onEnergyDetected(session);
    }

    public void lambda$speechCheckStart$37(final Session session) {
        if (this.speechCheck == null) {
            return;
        }
        KitLog.info(TAG, "speechCheck: start");
        this.speechCheck.d(new CountDown.CountDownFinishListener() {
            @Override
            public final void onFinish() {
                AudioAcquisition.lambda$speechCheckStart$35(Session.this);
            }
        }, NO_SPEECH_TIMEOUT);
        this.speechCheck.c(new SpeechCheck.SpeechListener() {
            @Override
            public final void onSpeaking() {
                AudioAcquisition.lambda$speechCheckStart$36(Session.this);
            }
        });
    }

    public Boolean lambda$startAudioIfNeeded$0(AssistantMessage assistantMessage) {
        return Boolean.valueOf(startRecord(assistantMessage.getHeader()));
    }

    public static HeaderPayload lambda$updateVadFrontTime$24(RecognizeContext recognizeContext) {
        return recognizeContext.getContextsPayload(NAME_ASR_SETTINGS_PARAMETER, NAMESPACE_SYSTEM);
    }

    public static JsonObject lambda$updateVadFrontTime$25(HeaderPayload headerPayload) {
        return headerPayload.getPayload().getJsonObject();
    }

    public static void lambda$updateVadFrontTime$26(long j, JsonObject jsonObject) {
        jsonObject.addProperty("vadfonttimems", String.valueOf(j));
    }

    private void modifyRecognizeContext(RecognizeContext recognizeContext) {
        if (ModuleInstanceFactory.Ability.visible().syncGetVisibleInfo() == null) {
            return;
        }
        HeaderPayload parseHeaderPayload = BaseUtils.parseHeaderPayload(recognizeContext.getContexts(), "VisibleInfo", "UserWindow");
        if (parseHeaderPayload == null) {
            recognizeContext.getContexts().add(ynb.d());
        } else {
            parseHeaderPayload.setPayload(ynb.h());
        }
    }

    private String preprocessAsrContext(String str) {
        KitLog.debug(TAG, "preprocessAsrContext extVoiceContext is {}", str);
        RecognizeContext recognizeContext = (RecognizeContext) Optional.ofNullable(str).map(new Function() {
            @Override
            public final Object apply(Object obj) {
                RecognizeContext lambda$preprocessAsrContext$7;
                lambda$preprocessAsrContext$7 = AudioAcquisition.lambda$preprocessAsrContext$7((String) obj);
                return lambda$preprocessAsrContext$7;
            }
        }).orElse(new RecognizeContext());
        dealSvad(recognizeContext);
        String json = GsonUtils.toJson(recognizeContext);
        KitLog.debug(TAG, "preprocessAsrContext recognizeContext is {}", json);
        return json;
    }

    private void processCachedAudioData() {
        KitLog.info(TAG, "cacheAudioDataQueue size = " + this.processCacheAudioDataQueue.size());
        while (!this.processCacheAudioDataQueue.isEmpty()) {
            AudioDataMessage poll = this.processCacheAudioDataQueue.poll();
            if (poll != null) {
                handleRecordData(poll);
            }
        }
        KitLog.debug(TAG, "cacheAudioDataQueue read finish", new Object[0]);
    }

    private void processRecordBuffData() {
        if (this.cacheAudioLists.isEmpty()) {
            return;
        }
        Iterator<AudioDataMessage> it = this.cacheAudioLists.iterator();
        while (it.hasNext()) {
            this.acquisitionCallback.onResult(this.currentSession, it.next());
        }
        this.cacheAudioLists.clear();
    }

    private boolean readRecordData() {
        handleVadCachedIfNeeded();
        handleProcessCachedIfNeeded();
        byte[] bArr = new byte[DATA_BUFFER_SIZE];
        int a = this.audioRecordImpl.a(bArr);
        AudioDataMessage audioDataMessage = new AudioDataMessage();
        audioDataMessage.setBuffers(bArr);
        audioDataMessage.setSize(a);
        if (a < 0) {
            KitLog.error(TAG, "onRecordError error : " + a);
            this.acquisitionCallback.onError(this.currentSession, DataAcquisitionInterface.ErrorType.ACQUISITION_ERROR, new ErrorInfo(3, "record read error"));
            return false;
        }
        if (a <= 0) {
            KitLog.debug(TAG, "readRecordData read 0 count", new Object[0]);
            return true;
        }
        handleInstantCachedIfNeeded(audioDataMessage);
        int i = this.audioState;
        if ((i & 1) != 0) {
            return handleRecordData(audioDataMessage);
        }
        if ((i & 4) != 0) {
            return handleRecordDataToCache(audioDataMessage);
        }
        if ((i & 8) != 0) {
            return handleRecordDataToCache(audioDataMessage);
        }
        if ((i & 2) != 0) {
            return handleRecordDataToWakeup(audioDataMessage);
        }
        KitLog.debug(TAG, "ERROR", new Object[0]);
        return true;
    }

    private void removeVoiceKitSdkContext() {
        VoiceKitSdkContext.getInstance().removeRecognizeContext(RecognizerIntent.EXT_VOICE_CONTEXT);
        VoiceKitSdkContext.getInstance().removeRecognizeContext(RecognizerIntent.EXT_APP_CONTEXT);
        VoiceKitSdkContext.getInstance().removeRecognizeContext("dataType");
    }

    private void reportStartAudioRecord(String str, boolean z) {
        if (z) {
            OperationReportUtils.getInstance().getRecordTypeRecord().setRecordType(RecordStartType.COMMON_REJECTION.getType());
        } else if (str != null) {
            OperationReportUtils.getInstance().getRecordTypeRecord().setRecordType(str);
            KitLog.debug(TAG, "current record type is {}", str);
        } else {
            OperationReportUtils.getInstance().getRecordTypeRecord().setRecordType(RecordStartType.OTHER.getType());
        }
        if (RecordStartType.INTERRUPT.getType().equals(str)) {
            OperationReportUtils.getInstance().getDelayTimeRecord().setInterruptRecordStartTime(System.currentTimeMillis());
            return;
        }
        OperationReportUtils.getInstance().reportDelayTimeRecord();
        OperationReportUtils.getInstance().getDelayTimeRecord().resetCache();
        OperationReportUtils.getInstance().getDelayTimeRecord().setInteraction(String.valueOf((int) BusinessFlowId.getInstance().getInteractionId())).setRecordStartTime(z ? CommonRejection.Helper.calculator().getVadEndTimestamp() : System.currentTimeMillis());
    }

    private void resetAudioState() {
        this.cacheAudioLists.clear();
        cancelSpeechCheck();
        this.lastVolume = -1;
    }

    private void resetFullDuplexAudioState() {
        cancelSpeechCheck();
        this.lastVolume = -1;
    }

    private void saveContextIfNeeded(AssistantMessage<?> assistantMessage) {
        Optional.ofNullable(assistantMessage).map(new dl()).filter(new Predicate() {
            @Override
            public final boolean test(Object obj) {
                return ((CommonHeader) obj).isFullDuplexMode();
            }
        }).ifPresent(new Consumer() {
            @Override
            public final void accept(Object obj) {
                AudioAcquisition.lambda$saveContextIfNeeded$1((CommonHeader) obj);
            }
        });
    }

    private void setVadEndTimestampIfCommonRejection(final Session session) {
        if (CommonRejection.Helper.stateManager().isCommonRejectionMode((InteractionIdInfo) Optional.ofNullable(session).map(new Function() {
            @Override
            public final Object apply(Object obj) {
                InteractionIdInfo lambda$setVadEndTimestampIfCommonRejection$32;
                lambda$setVadEndTimestampIfCommonRejection$32 = AudioAcquisition.lambda$setVadEndTimestampIfCommonRejection$32(Session.this, (Session) obj);
                return lambda$setVadEndTimestampIfCommonRejection$32;
            }
        }).orElse(null))) {
            CommonRejection.Helper.calculator().setVadEndTimestamp(System.currentTimeMillis());
        }
    }

    private void speechCheckStart(final Session session) {
        CountDown countDown = this.countDown;
        if (countDown == null) {
            return;
        }
        countDown.d(new CountDown.CountDownFinishListener() {
            @Override
            public final void onFinish() {
                AudioAcquisition.this.lambda$speechCheckStart$37(session);
            }
        }, SPEECH_CHECK_DELAY);
    }

    private boolean startAudioIfNeeded(int i, AssistantMessage<?> assistantMessage) {
        int i2 = this.audioState | i;
        this.audioState = i2;
        KitLog.debug(TAG, "startAudioIfNeeded state = {} ,flag = {}", Integer.valueOf(i2), Integer.valueOf(i));
        if (this.audioState != 0) {
            return ((Boolean) Optional.ofNullable(assistantMessage).map(new Function() {
                @Override
                public final Object apply(Object obj) {
                    Boolean lambda$startAudioIfNeeded$0;
                    lambda$startAudioIfNeeded$0 = AudioAcquisition.this.lambda$startAudioIfNeeded$0((AssistantMessage) obj);
                    return lambda$startAudioIfNeeded$0;
                }
            }).orElse(Boolean.FALSE)).booleanValue();
        }
        return true;
    }

    private void startAudioProcess(Intent intent, AssistantMessage<?> assistantMessage) {
        CommonHeader header = assistantMessage.getHeader();
        if (header.isFullDuplexMode() && this.isRecognizeRecording) {
            KitLog.debug(TAG, "startAudioProcess,post audioStreamId={}", assistantMessage.getHeader().getAudioStreamId());
            VoiceKitSdkContext.getInstance().setRecognizeParam(RecognizerIntent.EXT_VOICE_CONTEXT, fillRecognizeContextString(intent, assistantMessage.getHeader()));
            ModuleInstanceFactory.Ability.recognize().sendFullDuplexRecognizeEvent(assistantMessage.getHeader(), intent);
            return;
        }
        boolean startAudioRecord = startAudioRecord(intent, assistantMessage);
        if (startAudioRecord) {
            OperationReportUtils.getInstance().reportStartRecord();
            OperationReportUtils.getInstance().getIntentionExecuteRecord().setStartTime(String.valueOf(System.currentTimeMillis()));
        }
        if (!header.isFullDuplexMode()) {
            ModuleInstanceFactory.Ability.recognize().startRecognize(assistantMessage.getHeader(), intent);
            if (IAssistantConfig.getInstance().sdkConfig().isNeedVolumeCallback()) {
                speechCheckStart(assistantMessage.getHeader());
            }
            this.acquisitionCallback.onStart(AssistantMessage.builder(Boolean.TRUE, assistantMessage.getHeader()).build(), startAudioRecord);
            return;
        }
        FullDuplexUiStateUtil.setIsAudioRecording(startAudioRecord);
        KitLog.debug(TAG, "startAudioProcess,start audio and post audioStreamId={}", header.getAudioStreamId());
        this.currentSession = assistantMessage.getHeader();
        ModuleInstanceFactory.Ability.recognize().sendFullDuplexAudioEvent(header);
        if (IAssistantConfig.getInstance().sdkConfig().isNeedVolumeCallback() && VoiceKitSdkContext.getInstance().getRecognizeContext(RecognizerIntent.EXT_CLOUD_BUSINESS_RESP_TIMEOUT, Integer.class).isPresent()) {
            resetFullDuplexAudioState();
            fullDuplexSpeechCheckStart(assistantMessage.getHeader());
        }
        ModuleInstanceFactory.Ability.recognize().sendFullDuplexStreamRequestBodyEvent(assistantMessage.getHeader(), intent);
        this.acquisitionCallback.onStart(AssistantMessage.builder(Boolean.TRUE, assistantMessage.getHeader()).build(), startAudioRecord);
        VoiceKitSdkContext.getInstance().setRecognizeParam(RecognizerIntent.EXT_VOICE_CONTEXT, fillRecognizeContextString(intent, assistantMessage.getHeader()));
        ModuleInstanceFactory.Ability.recognize().sendFullDuplexRecognizeEvent(assistantMessage.getHeader(), intent);
    }

    private boolean startAudioRecord(Intent intent, AssistantMessage<?> assistantMessage) {
        this.isAppRecord = SecureIntentUtil.getSecureIntentBoolean(intent, RecognizerIntent.EXT_SELF_RECORDING, false);
        this.recordTimeoutTime = SecureIntentUtil.getSecureIntentInt(intent, RecognizerIntent.RECORD_TIME, IAssistantConfig.getInstance().sdkConfig().isSupportV5() ? 30000 : 20000);
        boolean secureIntentBoolean = SecureIntentUtil.getSecureIntentBoolean(intent, RecognizerIntent.CONTINUE_FRONT_VAD, false);
        KitLog.info(TAG, "continueFrontVad is " + secureIntentBoolean);
        if (!secureIntentBoolean) {
            this.processCacheAudioDataQueue.clear();
        }
        boolean z = true;
        reportStartAudioRecord(SecureIntentUtil.getSecureIntentString(intent, RecognizerIntent.RECORD_TYPE), secureIntentBoolean && CommonRejection.Helper.stateManager().isCommonRejectionMode(InteractionIdInfo.build(assistantMessage)));
        KitLog.info(TAG, "isAppRecord is " + this.isAppRecord);
        this.hasRecordData = false;
        this.hasReadInstantCached = false;
        int judgeRecordState = judgeRecordState(assistantMessage);
        boolean z2 = this.isRecognizeRecording;
        KitLog.info(TAG, "startAudioRecord recordState=" + judgeRecordState);
        if (this.isAppRecord) {
            if (!z2) {
                this.acquisitionCallback.startRecord();
            }
            this.audioState = judgeRecordState | this.audioState;
            this.isRecognizeRecording = true;
            this.isRecording = true;
            this.startTime = SystemClock.elapsedRealtime();
        } else {
            boolean startAudioIfNeeded = startAudioIfNeeded(judgeRecordState, assistantMessage);
            if (startAudioIfNeeded) {
                this.isRecognizeRecording = true;
                if (z2) {
                    this.startTime = SystemClock.elapsedRealtime();
                } else {
                    this.acquisitionCallback.onRecordStart();
                }
            }
            z = startAudioIfNeeded;
        }
        if (CommonRejection.Helper.stateManager().isCommonRejectionMode(InteractionIdInfo.build(assistantMessage))) {
            CommonRejection.Helper.calculator().setVadStartTimestamp(System.currentTimeMillis());
        }
        OperationReportUtils.getInstance().getOperationNluIntentionCollect().setSpeechStartTime(System.currentTimeMillis());
        return z;
    }

    private void startDmDataProcess(Intent intent, VoiceKitMessage voiceKitMessage) {
        Session sessionInstanceWithInitialData = CommonDataUtil.getSessionInstanceWithInitialData();
        sessionInstanceWithInitialData.setMessageName(MessageConstants.MessageName.MSG_NAME_DIALOG_RESULT);
        voiceKitMessage.setSession(sessionInstanceWithInitialData);
        this.acquisitionCallback.onStart(AssistantMessage.builder(Boolean.TRUE, voiceKitMessage.getSession()).build(), true);
        this.acquisitionCallback.onStop(voiceKitMessage.getSession(), true);
        this.acquisitionCallback.onDmResult(intent, voiceKitMessage);
    }

    private void startNluDataProcess(Intent intent, AssistantMessage<?> assistantMessage) {
        this.acquisitionCallback.onStart(AssistantMessage.builder(Boolean.TRUE, assistantMessage.getHeader()).build(), true);
        this.acquisitionCallback.onStop(assistantMessage.getHeader(), true);
        this.acquisitionCallback.onNluResult(assistantMessage.getHeader(), intent);
    }

    private void startRecognize(AssistantMessage<?> assistantMessage, Intent intent) {
        String str;
        boolean isAudioFullDuplexRecognize = isAudioFullDuplexRecognize(assistantMessage.getHeader(), intent);
        if (isAudioFullDuplexRecognize || IAssistantConfig.getInstance().sdkConfig().isSupportV5()) {
            String preprocessAsrContext = preprocessAsrContext(SecureIntentUtil.getSecureIntentString(intent, RecognizerIntent.EXT_VOICE_CONTEXT));
            VoiceKitSdkContext.getInstance().setRecognizeParam(RecognizerIntent.EXT_APP_CONTEXT, preprocessAsrContext);
            if (isAudioFullDuplexRecognize) {
                str = preprocessAsrContext;
            } else {
                str = fillRecognizeContextString(intent, assistantMessage.getHeader());
                VoiceKitSdkContext.getInstance().setRecognizeParam(RecognizerIntent.EXT_VOICE_CONTEXT, str);
                VoiceKitSdkContext.getInstance().setRecognizeParam(RecognizerIntent.EXT_APP_CONTEXT, str);
            }
        } else {
            str = fillRecognizeContextString(intent, assistantMessage.getHeader());
            VoiceKitSdkContext.getInstance().setRecognizeParam(RecognizerIntent.EXT_VOICE_CONTEXT, str);
        }
        if (intent.hasExtra(RecognizerIntent.EXT_SPEECH_ACCENT)) {
            VoiceKitSdkContext.getInstance().setRecognizeParam(RecognizerIntent.EXT_SPEECH_ACCENT, SecureIntentUtil.getSecureIntentString(intent, RecognizerIntent.EXT_SPEECH_ACCENT));
        }
        cacheDataType(SecureIntentUtil.getSecureIntentString(intent, "dataType"));
        if (isCmdOrScenarioType(intent)) {
            startTextProcess(intent, assistantMessage);
            return;
        }
        if (TextUtils.equals(SecureIntentUtil.getSecureIntentString(intent, "dataType"), RecognizerIntent.NLU_DATA_TYPE)) {
            intent.putExtra(RecognizerIntent.EXT_VOICE_CONTEXT, str);
            startNluDataProcess(intent, assistantMessage);
            return;
        }
        if (intent.hasExtra("text")) {
            if (SecureIntentUtil.getSecureIntentString(intent, "text") != null) {
                try {
                    if (r1.getBytes("UTF-8").length > TEXT_RECOGNIZE_LEN_LIMIT) {
                        KitLog.warn(TAG, "text is too long");
                        this.acquisitionCallback.onError(this.currentSession, DataAcquisitionInterface.ErrorType.ACQUISITION_ILLEGAL_INPUT, new ErrorInfo(2, "text is too long"));
                        return;
                    }
                } catch (UnsupportedEncodingException unused) {
                    KitLog.error(TAG, "UnsupportedEncodingException");
                }
            }
            startTextProcess(intent, assistantMessage);
            return;
        }
        if (!intent.hasExtra(RecognizerIntent.DM_DATA_TYPE)) {
            if (isNeedTurnOnAec(intent, (Session) Optional.ofNullable(assistantMessage).map(new Function() {
                @Override
                public final Object apply(Object obj) {
                    CommonHeader header;
                    header = ((AssistantMessage) obj).getHeader();
                    return header;
                }
            }).orElse(null))) {
                AecStateUtil.tryUpdateAecState(true);
            }
            startAudioProcess(intent, assistantMessage);
            OperationReportUtils.getInstance().getOperationNluIntentionCollect().setReqType("voice");
            return;
        }
        VoiceKitMessage voiceKitMessage = (VoiceKitMessage) GsonUtils.toBean(SecureIntentUtil.getSecureIntentString(intent, RecognizerIntent.DM_DATA_TYPE), VoiceKitMessage.class);
        if (voiceKitMessage == null) {
            KitLog.warn(TAG, "invalid directiveStr");
        } else {
            startDmDataProcess(intent, voiceKitMessage);
        }
    }

    private boolean startRecord(Session session) {
        KitLog.info(TAG, "start Recording begin");
        if (this.isRecording) {
            KitLog.error(TAG, "start already in recording");
            return this.isRecording;
        }
        if (!checkRecordAudioPermission()) {
            KitLog.error(TAG, "no record permission");
            this.acquisitionCallback.onError(this.currentSession, DataAcquisitionInterface.ErrorType.ACQUISITION_NO_PERMISSION, new ErrorInfo(4, "no record permission"));
            return false;
        }
        this.isRecording = tryToStartRecording();
        if (!this.isRecording) {
            KitLog.error(TAG, "start record failed");
            this.acquisitionCallback.onError(this.currentSession, DataAcquisitionInterface.ErrorType.ACQUISITION_ERROR, new ErrorInfo(18, "start record failed"));
            return false;
        }
        this.startTime = SystemClock.elapsedRealtime();
        Handler handler = this.audioAcquisitionHandler;
        if (handler != null) {
            handler.obtainMessage(1, session).sendToTarget();
        }
        KitLog.info(TAG, "startRecording end");
        return this.isRecording;
    }

    private void startTextProcess(Intent intent, AssistantMessage<?> assistantMessage) {
        this.acquisitionCallback.onStart(AssistantMessage.builder(Boolean.valueOf(!SecureIntentUtil.getSecureIntentBoolean(intent, RecognizerIntent.AUTO_MODE_RETRY, false)), assistantMessage.getHeader()).build(), true);
        this.acquisitionCallback.onStop(assistantMessage.getHeader(), true);
        saveContextIfNeeded(assistantMessage);
        this.acquisitionCallback.onResult(assistantMessage.getHeader(), intent);
    }

    private void stopAudioIfNeeded(int i) {
        int i2 = this.audioState & (~i);
        this.audioState = i2;
        KitLog.debug(TAG, "stopAudioIfNeeded state = {} ,flag = {}", Integer.valueOf(i2), Integer.valueOf(i));
        if (this.audioState == 0) {
            stopRecord();
        }
    }

    private void stopRecognizeRecord() {
        if ((this.audioState & 13) == 0) {
            this.isRecognizeRecording = false;
            if (this.isAppRecord) {
                this.acquisitionCallback.stopRecord();
            } else {
                AecStateUtil.tryUpdateAecState(false);
                this.acquisitionCallback.onRecordEnd();
            }
            FullDuplexUiStateUtil.setIsAudioRecording(false);
        }
    }

    private void stopRecord() {
        if (RecordStartType.INTERRUPT.getType().equals(OperationReportUtils.getInstance().getRecordTypeRecord().getRecordType())) {
            OperationReportUtils.getInstance().getDelayTimeRecord().setInterruptRecordEndTime(System.currentTimeMillis());
        } else {
            OperationReportUtils.getInstance().getDelayTimeRecord().setRecordEndTime(System.currentTimeMillis());
        }
        OperationReportUtils.getInstance().reportRecordTypeRecord();
        OperationReportUtils.getInstance().reportEndRecord();
        this.isRecording = false;
        this.audioRecordImpl.f();
    }

    private boolean tryToStartRecording() {
        if (this.audioRecordImpl.b() == null) {
            this.audioRecordImpl.d();
        }
        try {
            AudioRecord b = this.audioRecordImpl.b();
            if (b == null) {
                KitLog.warn(TAG, "audioRecorder is null");
                return false;
            }
            KitLog.info(TAG, "tryToStartRecording start");
            this.audioRecordImpl.e();
            int i = 0;
            while (b.getRecordingState() != 3) {
                int i2 = i + 1;
                if (i >= 10) {
                    KitLog.error(TAG, "tryToStartRecording: timeout");
                    return false;
                }
                SystemClock.sleep(20L);
                KitLog.debug(TAG, "tryToStartRecording: {}", Integer.valueOf(i2));
                this.audioRecordImpl.e();
                i = i2;
            }
            KitLog.info(TAG, "tryToStartRecording end");
            return true;
        } catch (IllegalStateException unused) {
            KitLog.error(TAG, "startRecording IllegalStateException");
            return false;
        }
    }

    private void updateVadFrontTime(RecognizeContext recognizeContext, final long j) {
        KitLog.info(TAG, "updateVadFrontTime, new vad front time = " + j);
        Optional.ofNullable(recognizeContext).map(new Function() {
            @Override
            public final Object apply(Object obj) {
                HeaderPayload lambda$updateVadFrontTime$24;
                lambda$updateVadFrontTime$24 = AudioAcquisition.lambda$updateVadFrontTime$24((RecognizeContext) obj);
                return lambda$updateVadFrontTime$24;
            }
        }).map(new Function() {
            @Override
            public final Object apply(Object obj) {
                JsonObject lambda$updateVadFrontTime$25;
                lambda$updateVadFrontTime$25 = AudioAcquisition.lambda$updateVadFrontTime$25((HeaderPayload) obj);
                return lambda$updateVadFrontTime$25;
            }
        }).ifPresent(new Consumer() {
            @Override
            public final void accept(Object obj) {
                AudioAcquisition.lambda$updateVadFrontTime$26(j, (JsonObject) obj);
            }
        });
    }

    @Override
    public void cancel(Session session, boolean z) {
        resetAudioState();
        if (this.isAppRecord) {
            externalCancel();
        } else {
            innerCancel();
        }
        this.acquisitionCallback.onCancel(session, z);
    }

    @Override
    public void init(DataAcquisitionInterface.CallBack callBack) {
        if (callBack == null) {
            KitLog.error(TAG, "acquisitionCallback is null");
            return;
        }
        this.acquisitionCallback = callBack;
        if (checkRecordAudioPermission()) {
            this.audioRecordImpl.c(IAssistantConfig.getInstance().getAppContext());
        }
    }

    @Override
    public void judgeUpdateInteractionId(AssistantMessage<?> assistantMessage) {
        KitLog.debug(TAG, "judgeUpdateInteractionId", new Object[0]);
        if (assistantMessage == null || assistantMessage.getHeader() == null) {
            return;
        }
        Intent intent = (Intent) assistantMessage.getBody(Intent.class).orElse(new Intent());
        if (isAudioFullDuplexRecognize(assistantMessage.getHeader(), intent) && this.isRecognizeRecording) {
            intent.putExtra(RecognizerIntent.EXT_FULLDUPLEX_NEED_UPDATE_ID, false);
        } else {
            intent.putExtra(RecognizerIntent.EXT_FULLDUPLEX_NEED_UPDATE_ID, true);
        }
        this.acquisitionCallback.onJudgeUpdateIdResult(assistantMessage);
    }

    @Override
    public void release() {
        KitLog.debug(TAG, "release", new Object[0]);
        this.audioRecordImpl.g();
        this.isRecording = false;
        this.isRecognizeRecording = false;
        this.audioState = 0;
        FullDuplexUiStateUtil.setIsAudioRecording(false);
        this.processCacheAudioDataQueue.clear();
        this.instantCacheAudioDataQueue.clear();
        Handler handler = this.audioAcquisitionHandler;
        if (handler != null) {
            handler.removeCallbacksAndMessages(null);
            this.audioAcquisitionHandler = null;
        }
        HandlerThread handlerThread = this.audioAcquisitionThread;
        if (handlerThread != null) {
            handlerThread.quit();
            this.audioAcquisitionThread = null;
        }
        resetAudioState();
    }

    @Override
    public void restartVolumeDetection(AssistantMessage<?> assistantMessage) {
        KitLog.info(TAG, "restartVolumeDetection");
        if (assistantMessage == null || this.currentSession == null || !TextUtils.equals(assistantMessage.getHeader().getSessionId(), this.currentSession.getSessionId()) || assistantMessage.getHeader().getInteractionId() != this.currentSession.getInteractionId()) {
            return;
        }
        KitLog.info(TAG, "restartVolumeDetection speechCheck restart");
        this.speechCheck.a();
    }

    @Override
    public void start(AssistantMessage<?> assistantMessage) {
        KitLog.debug(TAG, "start acquisition begin", new Object[0]);
        if (assistantMessage == null) {
            return;
        }
        if (assistantMessage.getHeader() != null && !assistantMessage.getHeader().isFullDuplexMode()) {
            resetAudioState();
        }
        this.currentSession = assistantMessage.getHeader();
        if (!(assistantMessage.getBody() instanceof Intent)) {
            KitLog.warn(TAG, "error msg body is not intent " + assistantMessage.getType() + " msgName->" + MessageSparse.getName(assistantMessage.getType()));
            return;
        }
        OperationReportUtils.getInstance().getOperationNluIntentionCollect().reset();
        Intent intent = (Intent) assistantMessage.getBody();
        if (!intent.hasExtra(RecognizerIntent.EXT_VOICE_CONTEXT) || GsonUtils.isJsonValid(SecureIntentUtil.getSecureIntentString(intent, RecognizerIntent.EXT_VOICE_CONTEXT))) {
            removeVoiceKitSdkContext();
            OperationReportUtils.getInstance().getIntentionExecuteRecord().setSpeechAccent((String) VoiceKitSdkContext.getInstance().get(RecognizerIntent.EXT_SPEECH_ACCENT, String.class).orElse("mandarin"));
            startRecognize(assistantMessage, intent);
        } else {
            this.acquisitionCallback.onError(this.currentSession, DataAcquisitionInterface.ErrorType.ACQUISITION_ILLEGAL_INPUT, new ErrorInfo(2, "invalid input parameter."));
            KitLog.error(TAG, "invalid input parameter: EXT_VOICE_CONTEXT");
        }
    }

    @Override
    public void startByFullduplex(AssistantMessage<?> assistantMessage) {
        boolean z;
        KitLog.debug(TAG, "final Asr start acquisition begin", new Object[0]);
        if (assistantMessage == null || assistantMessage.getHeader() == null) {
            return;
        }
        KitLog.debug(TAG, "sendFullDuplexAudioEvent, lastAsr send audio event audioStreamId = {}", assistantMessage.getHeader().getAudioStreamId());
        this.currentSession = assistantMessage.getHeader();
        ModuleInstanceFactory.Ability.recognize().sendFullDuplexAudioEvent(assistantMessage.getHeader());
        this.hasReadInstantCached = false;
        if (this.isAppRecord) {
            this.audioState |= 5;
            z = true;
        } else {
            z = startAudioIfNeeded(5, assistantMessage);
        }
        if (IAssistantConfig.getInstance().sdkConfig().isNeedVolumeCallback() && VoiceKitSdkContext.getInstance().getRecognizeContext(RecognizerIntent.EXT_CLOUD_BUSINESS_RESP_TIMEOUT, Integer.class).isPresent()) {
            resetFullDuplexAudioState();
            fullDuplexSpeechCheckStart(assistantMessage.getHeader());
        }
        OperationReportUtils.getInstance().getIntentionExecuteRecord().setStartTime(String.valueOf(System.currentTimeMillis()));
        this.acquisitionCallback.onStart(AssistantMessage.builder(Boolean.TRUE, assistantMessage.getHeader()).build(), z);
    }

    @Override
    public void startByWakeUp(AssistantMessage<?> assistantMessage) {
        KitLog.debug(TAG, "WakeUp start acquisition begin", new Object[0]);
        if (assistantMessage == null) {
            return;
        }
        this.currentSession = assistantMessage.getHeader();
        if (assistantMessage.getBody() instanceof Intent) {
            boolean secureIntentBoolean = SecureIntentUtil.getSecureIntentBoolean((Intent) assistantMessage.getBody(), WakeupIntent.EXT_WAKEUP_SELF_RECORD, false);
            this.isAppSelfWakeupRecord = secureIntentBoolean;
            KitLog.debug(TAG, "isAppSelfWakeupRecord :{}", Boolean.valueOf(secureIntentBoolean));
            if (!this.isAppSelfWakeupRecord) {
                startAudioIfNeeded(2, assistantMessage);
                return;
            }
            int i = this.audioState | 2;
            this.audioState = i;
            KitLog.debug(TAG, "startByWakeUp state = {}", Integer.valueOf(i));
            this.isRecording = true;
            this.acquisitionCallback.startFreeWakeupRecord();
        }
    }

    @Override
    public boolean stop(Session session) {
        resetAudioState();
        setVadEndTimestampIfCommonRejection(session);
        return this.isAppRecord ? externalStop(session) : innerStop(session);
    }

    @Override
    public void stopByFullduplex(AssistantMessage<?> assistantMessage) {
        KitLog.debug(TAG, "vad stop acquisition begin", new Object[0]);
        if (assistantMessage == null) {
            return;
        }
        stop(assistantMessage.getHeader());
    }

    @Override
    public void stopByWakeUp(AssistantMessage<?> assistantMessage) {
        KitLog.debug(TAG, "freeWakeUp stop acquisition begin", new Object[0]);
        if (!this.isAppSelfWakeupRecord) {
            stopAudioIfNeeded(2);
            return;
        }
        int i = this.audioState & (-3);
        this.audioState = i;
        KitLog.debug(TAG, "stopByWakeUp state = {}", Integer.valueOf(i));
        if (this.audioState == 0) {
            this.isRecording = false;
            this.acquisitionCallback.stopFreeWakeupRecord();
        }
    }

    @Override
    public void stopSpeechCheck() {
        KitLog.debug(TAG, "stopSpeechCheck", new Object[0]);
        cancelSpeechCheck();
    }

    @Override
    public void writeAudio(AssistantMessage<?> assistantMessage) {
        if (assistantMessage == null) {
            return;
        }
        handleVadCachedIfNeeded();
        handleProcessCachedIfNeeded();
        byte[] bArr = (byte[]) assistantMessage.getBody(byte[].class).orElse(new byte[0]);
        AudioDataMessage audioDataMessage = new AudioDataMessage();
        int length = bArr.length;
        audioDataMessage.setBuffers(bArr);
        audioDataMessage.setSize(length);
        if (length < 0) {
            KitLog.error(TAG, "onRecordError error : " + length);
            return;
        }
        if (length <= 0) {
            KitLog.debug(TAG, "readRecordData read 0 count", new Object[0]);
            return;
        }
        handleInstantCachedIfNeeded(audioDataMessage);
        int i = this.audioState;
        if ((i & 1) != 0) {
            handleRecordData(audioDataMessage);
            return;
        }
        if ((i & 4) != 0) {
            handleRecordDataToCache(audioDataMessage);
        } else if ((i & 8) != 0) {
            handleRecordDataToCache(audioDataMessage);
        } else {
            KitLog.debug(TAG, "ERROR", new Object[0]);
        }
    }
}