从android使用外部Jar调用web服务方法时出错



我正在尝试从android设备调用一些java方法。我正在通过axis2 web服务访问我的java方法。

这是我的完整Java类,因为我已经编写了两个方法来调用From Adnroid Device。不知何故,它只是成功地调用了get_wav_byte()方法,但对于另一个方法,它给了我如下错误

位于java.lang.Thread.run(Thread.java:662)引起原因:java.lang.NoClassDefFoundError:edu/cmu/sphinx/util/props/ConfigurationManager

这是我在recogize_wave(String-wavePath)方法中使用的外部jar库中的类。我还检查了jar文件m中的edu/cmu/sphinx/util/props/ConfigurationManager是否可用,包括still它给我的错误。我给你我的java和android的完整代码如下

JAVA方法:

package edu.cmu.sphinx.demo.transcriber;


import edu.cmu.sphinx.frontend.util.AudioFileDataSource;
import edu.cmu.sphinx.recognizer.Recognizer;
import edu.cmu.sphinx.result.Result;
//import edu.cmu.sphinx.util.props.ConfigurationManager;
import edu.cmu.sphinx.util.props.ConfigurationManager;

import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.net.MalformedURLException;
import java.net.URL;
import javax.sound.sampled.UnsupportedAudioFileException;
/** A simple example that shows how to transcribe a continuous audio file that has multiple utterances in it. */
public class Transcriber {
//  private static final String PATH = "file:///D:\Sound\";


@SuppressWarnings("null")
public static String recognize_wave(String wavePath) throws MalformedURLException{


String resultText="";
URL audioURL;

audioURL = new URL(wavePath);
URL configURL = Transcriber.class.getResource("config.xml");
ConfigurationManager cm = new ConfigurationManager(configURL);
Recognizer recognizer = (Recognizer) cm.lookup("recognizer");
/* allocate the resource necessary for the recognizer */
recognizer.allocate();
// configure the audio input for the recognizer
AudioFileDataSource dataSource = (AudioFileDataSource) cm.lookup("audioFileDataSource");
dataSource.setAudioFile(audioURL, null);
// Loop until last utterance in the audio file has been decoded, in which case the recognizer will return null.
Result result;
while ((result = recognizer.recognize())!= null) {
resultText  = result.getBestResultNoFiller();
System.out.println(resultText);
}
return resultText;
}
public String get_wav_byte(byte[] wavbite,String path) throws IOException
{

String result1="null";
//return result1;
final String PATH = "file:///D:\Sound\";
//System.out.println(bhavik1111);

try
{
File dstFile = new File(path);
FileOutputStream out = new FileOutputStream(dstFile);
out.write(wavbite, 0, wavbite.length);
out.close();
}
catch (IOException e)
{
System.out.println("IOException : " + e);
}

try {
result1=recognize_wave(path);
} catch (MalformedURLException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}

return result1;

}

}

我用KSOAP2调用该方法的ANDROID代码如下:

package com.varma.samples.audiorecorder;
import java.io.BufferedInputStream;
import java.io.ByteArrayOutputStream;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import org.ksoap2.SoapEnvelope;
import org.ksoap2.SoapFault;
import org.ksoap2.serialization.MarshalBase64;
import org.ksoap2.serialization.SoapObject;
import org.ksoap2.serialization.SoapSerializationEnvelope;
import org.ksoap2.transport.HttpTransportSE;
import android.annotation.SuppressLint;
import android.app.Activity;
import android.media.AudioFormat;
import android.media.AudioRecord;
import android.media.MediaRecorder;
import android.os.Bundle;
import android.os.Environment;
import android.util.Base64;
import android.util.Log;
import android.view.View;
import android.view.ViewDebug.FlagToString;
import android.widget.Button;
import android.widget.TextView;
public class RecorderActivity extends Activity {
private static final int RECORDER_BPP =16;
private static final String AUDIO_RECORDER_FILE_EXT_WAV = ".wav";
private static final String AUDIO_RECORDER_FOLDER = "AudioRecorder";
private static final String AUDIO_RECORDER_TEMP_FILE = "record_temp.raw";
private static String AUDIO_WAV_FILE = "";
private static final int RECORDER_SAMPLERATE = 16000;
private static final int RECORDER_CHANNELS = AudioFormat.CHANNEL_CONFIGURATION_MONO;
private static final int RECORDER_AUDIO_ENCODING = AudioFormat.ENCODING_PCM_16BIT;
private AudioRecord recorder = null;
private int bufferSize = 0;
private Thread recordingThread = null;
private boolean isRecording = false;
@SuppressLint("NewApi")
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.main);
setButtonHandlers();
enableButtons(false);



bufferSize = AudioRecord.getMinBufferSize(RECORDER_SAMPLERATE,RECORDER_CHANNELS,RECORDER_AUDIO_ENCODING);
}
private void setButtonHandlers() {
((Button)findViewById(R.id.btnStart)).setOnClickListener(btnClick);
((Button)findViewById(R.id.btnStop)).setOnClickListener(btnClick);
}
private void enableButton(int id,boolean isEnable){
((Button)findViewById(id)).setEnabled(isEnable);
}
private void enableButtons(boolean isRecording) {
enableButton(R.id.btnStart,!isRecording);
enableButton(R.id.btnStop,isRecording);
}
private String getFilename(){
String filepath = Environment.getExternalStorageDirectory().getPath();
File file = new File(filepath,AUDIO_RECORDER_FOLDER);
if(!file.exists()){
file.mkdirs();
}
return (file.getAbsolutePath() + "/" + System.currentTimeMillis() + AUDIO_RECORDER_FILE_EXT_WAV);
}
private String getTempFilename(){
String filepath = Environment.getExternalStorageDirectory().getPath();
File file = new File(filepath,AUDIO_RECORDER_FOLDER);
if(!file.exists()){
file.mkdirs();
}
File tempFile = new File(filepath,AUDIO_RECORDER_TEMP_FILE);
if(tempFile.exists())
tempFile.delete();
return (file.getAbsolutePath() + "/" + AUDIO_RECORDER_TEMP_FILE);
}
@SuppressLint({ "NewApi", "NewApi" })
private void startRecording(){
recorder = new AudioRecord(MediaRecorder.AudioSource.MIC,
RECORDER_SAMPLERATE, RECORDER_CHANNELS,RECORDER_AUDIO_ENCODING, bufferSize);
recorder.startRecording();
isRecording = true;
recordingThread = new Thread(new Runnable() {
@Override
public void run() {
writeAudioDataToFile();
}
},"AudioRecorder Thread");
recordingThread.start();
}
@SuppressLint({ "NewApi", "NewApi", "NewApi" })
private void writeAudioDataToFile(){
byte data[] = new byte[bufferSize];
String filename = getTempFilename();
FileOutputStream os = null;
try {
os = new FileOutputStream(filename);
} catch (FileNotFoundException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
int read = 0;
if(null != os){
while(isRecording){
read = recorder.read(data, 0, bufferSize);
if(AudioRecord.ERROR_INVALID_OPERATION != read){
try {
os.write(data);
} catch (IOException e) {
e.printStackTrace();
}
}
}
try {
os.close();
} catch (IOException e) {
e.printStackTrace();
}
}
}
@SuppressLint({ "NewApi", "NewApi" })
private void stopRecording(){
if(null != recorder){
isRecording = false;
recorder.stop();
recorder.release();
recorder = null;
recordingThread = null;
}
copyWaveFile(getTempFilename(),getFilename());
deleteTempFile();
}
private void deleteTempFile() {
File file = new File(getTempFilename());
file.delete();
}
@SuppressLint("NewApi")
private void copyWaveFile(String inFilename,String outFilename){
FileInputStream in = null;
FileOutputStream out = null;
long totalAudioLen = 0;
long totalDataLen = totalAudioLen + 36;
long longSampleRate = 16000;
int channels = 1;
long byteRate = RECORDER_BPP * RECORDER_SAMPLERATE * channels/8;
/// long byteRate = 256;
byte[] data = new byte[bufferSize];
try {
in = new FileInputStream(inFilename);
out = new FileOutputStream(outFilename);
totalAudioLen = in.getChannel().size();
totalDataLen = totalAudioLen + 36;
AppLog.logString("File size: " + totalDataLen);
WriteWaveFileHeader(out, totalAudioLen, totalDataLen,
longSampleRate, channels, byteRate);
while(in.read(data) != -1){
out.write(data);
}

in.close();
out.close();
//////////////////

AUDIO_WAV_FILE=outFilename;
/////////////////

} catch (FileNotFoundException e) {
e.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
}
}
/////////////read wav file and convert to byte////////////////////
public static byte[] getBytesFromFile(File file) throws IOException {
/*
InputStream is = new FileInputStream(file);
// Get the size of the file
long length = file.length();
// You cannot create an array using a long type.
// It needs to be an int type.
// Before converting to an int type, check
// to ensure that file is not larger than Integer.MAX_VALUE.
if (length > Integer.MAX_VALUE) {
// File is too large
}
// Create the byte array to hold the data
byte[] bytes = new byte[(int)length];
// Read in the bytes
int offset = 0;
int numRead = 0;
while (offset < bytes.length
&& (numRead=is.read(bytes, offset, bytes.length-offset)) >= 0) {
offset += numRead;
}
// Ensure all the bytes have been read in
if (offset < bytes.length) {
throw new IOException("Could not completely read file "+file.getName());
}
// Close the input stream and return bytes
is.close();
return bytes;
*/
ByteArrayOutputStream out = new ByteArrayOutputStream();
BufferedInputStream in = new BufferedInputStream(new FileInputStream(file));
int read;
byte[] buff = new byte[1024];
while ((read = in.read(buff)) > 0)
{
out.write(buff, 0, read);
}
out.flush();
byte[] audioBytes = out.toByteArray();

return audioBytes;

}
//////////////////////////////////////
private void WriteWaveFileHeader(
FileOutputStream out, long totalAudioLen,
long totalDataLen, long longSampleRate, int channels,
long byteRate) throws IOException {
byte[] header = new byte[44];
header[0] = 'R';  // RIFF/WAVE header
header[1] = 'I';
header[2] = 'F';
header[3] = 'F';
header[4] = (byte) (totalDataLen & 0xff);
header[5] = (byte) ((totalDataLen >> 8) & 0xff);
header[6] = (byte) ((totalDataLen >> 16) & 0xff);
header[7] = (byte) ((totalDataLen >> 24) & 0xff);
header[8] = 'W';
header[9] = 'A';
header[10] = 'V';
header[11] = 'E';
header[12] = 'f';  // 'fmt ' chunk
header[13] = 'm';
header[14] = 't';
header[15] = ' ';
header[16] = 16;  // 4 bytes: size of 'fmt ' chunk
header[17] = 0;
header[18] = 0;
header[19] = 0;
header[20] = 1;  // format = 1
header[21] = 0;
header[22] = (byte) channels;
header[23] = 0;
header[24] = (byte) (longSampleRate & 0xff);
header[25] = (byte) ((longSampleRate >> 8) & 0xff);
header[26] = (byte) ((longSampleRate >> 16) & 0xff);
header[27] = (byte) ((longSampleRate >> 24) & 0xff);
header[28] = (byte) (byteRate & 0xff);
header[29] = (byte) ((byteRate >> 8) & 0xff);
header[30] = (byte) ((byteRate >> 16) & 0xff);
header[31] = (byte) ((byteRate >> 24) & 0xff);
header[32] = (byte) (2 * 16 / 8);  // block align
header[33] = 0;
header[34] = RECORDER_BPP;  // bits per sample
header[35] = 0;
header[36] = 'd';
header[37] = 'a';
header[38] = 't';
header[39] = 'a';
header[40] = (byte) (totalAudioLen & 0xff);
header[41] = (byte) ((totalAudioLen >> 8) & 0xff);
header[42] = (byte) ((totalAudioLen >> 16) & 0xff);
header[43] = (byte) ((totalAudioLen >> 24) & 0xff);
out.write(header, 0, 44);

}
private View.OnClickListener btnClick = new View.OnClickListener() {
@Override
public void onClick(View v) {
switch(v.getId()){
case R.id.btnStart:{
AppLog.logString("Start Recording");


enableButtons(true);


startRecording();
break;
}
case R.id.btnStop:{
AppLog.logString("Start Recording");
enableButtons(false);
stopRecording();
File source_for_byte=new File( AUDIO_WAV_FILE);
byte[] temp = new byte[(int) source_for_byte.length()];


try {
temp=getBytesFromFile(source_for_byte);
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}

//byte[] strBase64 = Base64.encode(temp, Base64.NO_WRAP);
//Request.addProperty("image", strBase64); 
//////////////////////WebService Activity ///////////////////////
String METHOD_NAME = "";
// our webservice method name
String NAMESPACE = "http://test.com";
String SOAP_ACTION = NAMESPACE + METHOD_NAME;
// NAMESPACE + method name

//final String URL = "http://192.168.3.106:8080/axis2/services/speechmain?wsdl";
final String URL="http://192.168.3.106:8080/axis2/services/VoiceService?wsdl";
METHOD_NAME = "get_wav_byte";
try {
SoapObject request = new SoapObject(NAMESPACE, METHOD_NAME);

request.addProperty("wavbite", temp);
request.addProperty("path", "D:\sound\latest_recognizer.wav");
SoapSerializationEnvelope envelope = new SoapSerializationEnvelope(
SoapEnvelope.VER11);
new MarshalBase64().register(envelope); // serialization
envelope.encodingStyle = SoapEnvelope.ENC;
envelope.dotNet = true;
envelope.setOutputSoapObject(request);
HttpTransportSE androidHttpTransport = new HttpTransportSE(URL);
androidHttpTransport.call(SOAP_ACTION, envelope);
Object result = envelope.getResponse();
// Object  result = (SoapObject) envelope.bodyIn;



((TextView) findViewById(R.id.gettext1)).setText("NUMBER IS :->   "
+ result.toString());
} catch (Exception E) {
E.printStackTrace();
((TextView) findViewById(R.id.gettext1)).setText("ERROR:"
+ E.getClass().getName() + ":" + E.getMessage());
}


/////////////////////////






break;
}
}
}
}; 
}

我能理解的是"它是如何不起作用的",因为我在公共静态String identification_wave(String wavePath)方法中使用外部jar文件。

我搜索了很多,但仍然没有好的指导。。

希望你能帮我…

提前谢谢。。

检查以确保外部jar文件包含在构建路径中以及libs文件夹中。如果您没有libs文件夹,您可以在项目的根目录中创建一个。然后将您的jar文件复制并粘贴到此位置。然后右键单击Eclipse中的jar文件,转到Build Path,然后单击Add To Build Path。如果您的libs文件夹中有jar文件,则不会出现任何编译器错误。但这只适用于编译时。如果jar不在构建路径中,它将在运行时失败。

相关内容

最新更新