我正在尝试使用Azure教程中的face API制作一个检测人脸的应用程序。当我尝试运行应用程序时,我会收到一个错误:
java.lang.NoClassDefFoundError:解析失败:Lorg/apache/http/impl/client/DefaultHttpClient;
网址:com.microsoft.projectoxford.face.rest.WebServiceRequest.(WebServiceRequest.java:67(
网址:com.microsoft.projectoxford.face.FaceServiceRestClient.(FaceServiceRestClient.java:99(
访问com.contoso.facetutorial.MainActivity.(MainActivity.java:28(
这是我的代码:
MainActivity.java
package com.contoso.facetutorial;
// <snippet_imports>
import java.io.*;
import java.lang.Object.*;
import android.app.*;
import android.content.*;
import android.net.*;
import android.os.*;
import android.view.*;
import android.graphics.*;
import android.widget.*;
import android.provider.*;
// </snippet_imports>
// <snippet_face_imports>
import com.microsoft.projectoxford.face.*;
import com.microsoft.projectoxford.face.contract.*;
// </snippet_face_imports>
public class MainActivity extends Activity {
// <snippet_mainactivity_fields>
// Add your Face endpoint to your environment variables.
private final String apiEndpoint = "https://ceranfaceapi.cognitiveservices.azure.com/";
// Add your Face subscription key to your environment variables.
private final String subscriptionKey = "xxxx";
private final FaceServiceClient faceServiceClient =
new FaceServiceRestClient(apiEndpoint, subscriptionKey);
private final int PICK_IMAGE = 1;
private ProgressDialog detectionProgressDialog;
// </snippet_mainactivity_fields>
// <snippet_mainactivity_methods>
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
Button button1 = findViewById(R.id.button1);
button1.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
Intent intent = new Intent(Intent.ACTION_GET_CONTENT);
intent.setType("image/*");
startActivityForResult(Intent.createChooser(
intent, "Select Picture"), PICK_IMAGE);
}
});
detectionProgressDialog = new ProgressDialog(this);
}
@Override
protected void onActivityResult(int requestCode, int resultCode, Intent data) {
super.onActivityResult(requestCode, resultCode, data);
if (requestCode == PICK_IMAGE && resultCode == RESULT_OK &&
data != null && data.getData() != null) {
Uri uri = data.getData();
try {
Bitmap bitmap = MediaStore.Images.Media.getBitmap(
getContentResolver(), uri);
ImageView imageView = findViewById(R.id.imageView1);
imageView.setImageBitmap(bitmap);
// Comment out for tutorial
detectAndFrame(bitmap);
} catch (IOException e) {
e.printStackTrace();
}
}
// </snippet_mainactivity_methods>
}
// <snippet_detection_methods>
// Detect faces by uploading a face image.
// Frame faces after detection.
private void detectAndFrame(final Bitmap imageBitmap) {
ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
imageBitmap.compress(Bitmap.CompressFormat.JPEG, 100, outputStream);
ByteArrayInputStream inputStream =
new ByteArrayInputStream(outputStream.toByteArray());
AsyncTask<InputStream, String, Face[]> detectTask =
new AsyncTask<InputStream, String, Face[]>() {
String exceptionMessage = "";
@Override
protected Face[] doInBackground(InputStream... params) {
try {
publishProgress("Detecting...");
Face[] result = faceServiceClient.detect(
params[0],
true, // returnFaceId
false, // returnFaceLandmarks
null // returnFaceAttributes:
/* new FaceServiceClient.FaceAttributeType[] {
FaceServiceClient.FaceAttributeType.Age,
FaceServiceClient.FaceAttributeType.Gender }
*/
);
if (result == null){
publishProgress(
"Detection Finished. Nothing detected");
return null;
}
publishProgress(String.format(
"Detection Finished. %d face(s) detected",
result.length));
return result;
} catch (Exception e) {
exceptionMessage = String.format(
"Detection failed: %s", e.getMessage());
return null;
}
}
@Override
protected void onPreExecute() {
//TODO: show progress dialog
detectionProgressDialog.show();
}
@Override
protected void onProgressUpdate(String... progress) {
//TODO: update progress
detectionProgressDialog.setMessage(progress[0]);
}
@Override
protected void onPostExecute(Face[] result) {
//TODO: update face frames
detectionProgressDialog.dismiss();
if(!exceptionMessage.equals("")){
showError(exceptionMessage);
}
if (result == null) return;
ImageView imageView = findViewById(R.id.imageView1);
imageView.setImageBitmap(
drawFaceRectanglesOnBitmap(imageBitmap, result));
imageBitmap.recycle();
}
};
detectTask.execute(inputStream);
}
private void showError(String message) {
new AlertDialog.Builder(this)
.setTitle("Error")
.setMessage(message)
.setPositiveButton("OK", new DialogInterface.OnClickListener() {
public void onClick(DialogInterface dialog, int id) {
}})
.create().show();
}
// </snippet_detection_methods>
// <snippet_drawrectangles>
private static Bitmap drawFaceRectanglesOnBitmap(
Bitmap originalBitmap, Face[] faces) {
Bitmap bitmap = originalBitmap.copy(Bitmap.Config.ARGB_8888, true);
Canvas canvas = new Canvas(bitmap);
Paint paint = new Paint();
paint.setAntiAlias(true);
paint.setStyle(Paint.Style.STROKE);
paint.setColor(Color.RED);
paint.setStrokeWidth(10);
if (faces != null) {
for (Face face : faces) {
FaceRectangle faceRectangle = face.faceRectangle;
canvas.drawRect(
faceRectangle.left,
faceRectangle.top,
faceRectangle.left + faceRectangle.width,
faceRectangle.top + faceRectangle.height,
paint);
}
}
return bitmap;
}
// </snippet_drawrectangles>
}
要继续使用Apache HTTP客户端,以Android 9及以上版本为目标的应用程序可以在其AndroidManifest.xml
:中添加以下内容
<uses-library android:name="org.apache.http.legacy" android:required="false"/>