IBM Watson speech to text for Android: NoClassDefFoundError

427 views Asked by At

Has anyone seen this error? Know how to fix it? Please help me.

I am using the IBM Watson speech to text library, and trying to make it record the voice when button pressed, and stop record when button released.

I have read the code sample given, but I get this error:

java.lang.NoClassDefFoundError: com.ibm.watson.developer_cloud.android.library.audio.opus.JNAOpus

fragment where i record the voice

import android.os.Bundle;
import android.support.annotation.Nullable;
import android.support.design.widget.Snackbar;
import android.support.v4.app.Fragment;
import android.util.Log;
import android.view.LayoutInflater;
import android.view.MotionEvent;
import android.view.View;
import android.view.ViewGroup;
import android.widget.Button;
import android.widget.ImageView;
import android.widget.ProgressBar;
import android.widget.RelativeLayout;
import android.widget.TextView;

import com.ibm.watson.developer_cloud.android.library.audio.MicrophoneInputStream;
import com.ibm.watson.developer_cloud.android.library.audio.utils.ContentType;
import com.ibm.watson.developer_cloud.speech_to_text.v1.SpeechToText;
import com.ibm.watson.developer_cloud.speech_to_text.v1.model.RecognizeOptions;
import com.ibm.watson.developer_cloud.speech_to_text.v1.model.SpeechResults;
import com.ibm.watson.developer_cloud.speech_to_text.v1.websocket.BaseRecognizeCallback;
import com.zainalfahrudin.noteapp.R;

import java.io.IOException;
import java.io.InputStream;

public class AddNoteSpeechFragment extends Fragment {

    private ImageView ivRecord;
    private RelativeLayout viewBg;
    private Button button;
    private SpeechToText speechService;
    private InputStream myInputStream;
    private RecognizeOptions options;
    private TextView tvResult;
    private ProgressBar progress;
    private boolean isRecording;
    private String TAG = getClass().getSimpleName();

    @Nullable
    @Override
    public View onCreateView(LayoutInflater inflater, @Nullable ViewGroup container, @Nullable Bundle savedInstanceState) {
        View view = inflater.inflate(R.layout.add_speech_layout, container, false);
        ivRecord = (ImageView) view.findViewById(R.id.ivRecord);
        viewBg = (RelativeLayout) view.findViewById(R.id.rvBgRecord);
        button = (Button) view.findViewById(R.id.btnSubmit);
        tvResult = (TextView) view.findViewById(R.id.tvVoiceResult);
        progress = (ProgressBar) view.findViewById(R.id.progressRecord);
        return view;
    }

    public static AddNoteSpeechFragment newInstance(Bundle args) {
        AddNoteSpeechFragment fr = new AddNoteSpeechFragment();
        fr.setArguments(args);
        return fr;
    }


    @Override
    public void onActivityCreated(@Nullable Bundle savedInstanceState) {
        super.onActivityCreated(savedInstanceState);
        speechService = new SpeechToText();

        speechService.setUsernameAndPassword("60b15d7e-84c3-4a35-b3c4-dc50ddf9d445", "4ugHjjouy5TH");
        System.out.println(speechService.getModels());


        options = new RecognizeOptions.Builder()
                .continuous(true)
                .interimResults(true)
                .model("en-US_BroadbandMode1")
                .contentType(ContentType.OPUS.toString())
                .inactivityTimeout(2000)
                .build();

        viewBg.setOnTouchListener(new View.OnTouchListener() {
            @Override
            public boolean onTouch(View v, MotionEvent event) {
                switch (event.getAction()) {
                    case MotionEvent.ACTION_DOWN:
                        //pressed
                        myInputStream = new MicrophoneInputStream();
                        ivRecord.setImageDrawable(getResources().getDrawable(R.drawable.ic_mic_black_48dp));
                        progress.setVisibility(View.VISIBLE);
                        new Thread(new Runnable() {
                            @Override
                            public void run() {
                                if (!isRecording) {
                                    isRecording = true;
                                    translateVoice();
                                } else {
                                    Log.d(getTag(), "run: has recording ...");
                                }
                            }
                        });
                        Log.d(TAG, "onTouch: isrecording = " + isRecording);
                        break;
                    case MotionEvent.ACTION_UP:
                        progress.setVisibility(View.INVISIBLE);
                        ivRecord.setImageDrawable(getResources().getDrawable(R.drawable.ic_mic_none_black_48dp));
                        isRecording = false;
                        Log.d(TAG, "onTouch: isrecording = " + isRecording);
                        try {
                            myInputStream.close();
                        } catch (IOException e) {
                            e.printStackTrace();
                        }
                        break;
                }

                v.onTouchEvent(event);
                return true;
            }
        });
    }

    private void translateVoice() {
        Log.d(TAG, "translateVoice: run!");
        try {
            speechService.recognizeUsingWebSocket(myInputStream,
                    options, new BaseRecognizeCallback() {
                        @Override
                        public void onTranscription(SpeechResults speechResults) {
                            String text = speechResults.getResults().get(0).getAlternatives().get(0).getTranscript();
                            System.out.println(text);
                            tvResult.setText(text);
                        }

                        @Override
                        public void onError(Exception e) {
                            showSnackBar(e.getMessage());
                        }

                        @Override
                        public void onDisconnected() {
                            showSnackBar("disconected");
                        }

                    });
        } catch (Exception e) {
            e.printStackTrace();
            showSnackBar(e.getMessage());
        }
    }

    private void showSnackBar(String message) {
        Snackbar.make(button, message, Snackbar.LENGTH_SHORT).show();
    }
}

gradle

dependencies {
    compile fileTree(include: ['*.jar'], dir: 'libs')
    compile(name: 'core-release', ext: 'aar')
    //compile project(path: ':core')
    compile(name: 'cloudant', ext: 'aar')
    //compile project(path: ':cloudant')
    compile 'com.cloudant:cloudant-sync-datastore-android:0.15.5'
    compile 'com.android.support:multidex:1.0.1'
    //compile project(path: ':analytics')
    compile(name: 'analytics-release', ext: 'aar')
    compile(
            "com.android.support:support-v4:$supportLibraryVersion",
            "com.android.support:gridlayout-v7:$supportLibraryVersion",
            "com.android.support:appcompat-v7:$supportLibraryVersion",
            "com.android.support:cardview-v7:$supportLibraryVersion",
            "com.android.support:design:$supportLibraryVersion"
    )
    // transitive dependencies are not pulled from local .aar, so we must include all of them:
    compile 'com.squareup.okhttp:okhttp:2.5.0'
    compile 'com.squareup.retrofit:retrofit:1.9.0'
    compile 'com.squareup.picasso:picasso:2.5.2'
    compile 'com.doomonafireball.betterpickers:library:1.6.0'
    compile 'com.ibm.watson.developer_cloud:text-to-speech:3.5.1'
    compile 'com.ibm.watson.developer_cloud:speech-to-text:3.5.2'
    compile 'com.ibm.watson.developer_cloud:language-translator:3.5.2'

    compile project(':android-sdk-0.2.1-aar-with-dependencies')
}

logcat

09:34:56.876 8480-8714/zainalfahrudin.noteapp E/AndroidRuntime: FATAL EXCEPTION: Thread-780
                                                                          Process: zainalfahrudin.noteapp, PID: 8480
                                                                          java.lang.NoClassDefFoundError: com.ibm.watson.developer_cloud.android.library.audio.opus.JNAOpus
                                                                              at com.ibm.watson.developer_cloud.android.library.audio.opus.OggOpusEnc.initEncoder(OggOpusEnc.java:64)
                                                                              at com.ibm.watson.developer_cloud.android.library.audio.opus.OggOpusEnc.<init>(OggOpusEnc.java:43)
                                                                              at com.ibm.watson.developer_cloud.android.library.audio.MicrophoneCaptureThread.run(MicrophoneCaptureThread.java:66)
0

There are 0 answers