我正在使用libstreaming流媒体库开发一个android应用程序。该应用程序正在Wowza(移动到Wowza)上向上游发送。我创建了一个表面视图,它具有相机预览。它运行良好,但我想添加三个功能(放大/缩小、自动对焦和Flash)。
我不知道Libstreaming是否可行。
我使用过的SurfaceView属于封装CCD_ 1。
下面是我的活动代码:
public class LiveStreamingActivity extends Activity implements RtspClient.Callback, Session.Callback, SurfaceHolder.Callback {
private static SurfaceView mSurfaceView;
private SurfaceHolder mHolder;
private Session mSession;// Rtsp session
private static RtspClient mClient;
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
getWindow().addFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON);
getWindow().addFlags(WindowManager.LayoutParams.FLAG_FULLSCREEN);
requestWindowFeature(Window.FEATURE_NO_TITLE);
setContentView(R.layout.activity_main);
if (!LibsChecker.checkVitamioLibs(this))
return;
mSurfaceView = (SurfaceView) findViewById(R.id.surface_view);
mHolder = mSurfaceView.getHolder();
mHolder.addCallback(this);
mHolder.setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS);
}
@SuppressWarnings("deprecation")
private void initRtspClient() {
// Configures the SessionBuilder
mSession = SessionBuilder
.getInstance()
.setContext(getApplicationContext())
.setAudioEncoder(SessionBuilder.AUDIO_AAC)
.setAudioQuality(new AudioQuality(8000, 16000))
.setVideoEncoder(SessionBuilder.VIDEO_H264)
//.setVideoQuality(new VideoQuality(352, 288, 30, 300000))
.setCamera(CameraInfo.CAMERA_FACING_BACK)
.setSurfaceView(mSurfaceView).setPreviewOrientation(0)
.setCallback(this).build();
mClient = new RtspClient();
mClient.setSession(mSession);
mClient.setCallback(this);
mClient.setTransportMode(RtspClient.TRANSPORT_TCP);
mSurfaceView.setAspectRatioMode(SurfaceView.ASPECT_RATIO_PREVIEW);
String ip, port, path;
Pattern uri = Pattern.compile("rtsp://(.+):(\d+)/(.+)");
Matcher m = uri.matcher("rtsp://219.65.90.226:1935/app2/myStream");
m.find();
ip = m.group(1);
port = m.group(2);
path = m.group(3);
mClient.setCredentials(AppConfig.PUBLISHER_USERNAME,
AppConfig.PUBLISHER_PASSWORD);
mClient.setServerAddress(ip, Integer.parseInt(port));
mClient.setStreamPath("/" + path);
}
@Override
protected void onResume() {System.out.println("on Resume activity 2");
super.onResume();
try{
if(null != mSurfaceView){
/* Broadcastreceiver: check network connectivity */
IntentFilter intentFilter = new IntentFilter();
intentFilter.addAction("android.net.conn.CONNECTIVITY_CHANGE");
registerReceiver(receiver, intentFilter);
/* Start audio streaming background thread: AsyncTask */
vmPlayer = null;
vmPlayer = new MediaPlayer(this);
audioStream= new AudioStreamTask(this);
audioStream.execute("push","push","push");
}
}catch(Exception ex){
ex.printStackTrace();
}
}
@Override
protected void onPause() {
super.onPause();
try{
/* release the surface view */
if(null != mSurfaceView){
mClient.release();
mSession.release();
mSurfaceView.getHolder().removeCallback(this);
}
}catch(Exception ex){
ex.printStackTrace();
}
}
@Override
public void onDestroy() {
try {
super.onDestroy();
if (mClient != null) {
mClient.release();
}
if (mSession != null) {
mSession.release();
}
mSurfaceView.getHolder().removeCallback(this);
} catch (Exception e) {
System.out.println("Error while destroying activity " + e);
}
}
private void toggleStreaming() {
if (!mClient.isStreaming()) {
// Start camera preview
mSession.startPreview();
// mFrontSession.startPreview();
// Start video stream
mClient.startStream();
//startRtmpStream();
} else {
// already streaming, stop streaming
// stop camera preview
mSession.stopPreview();
// mFrontSession.stopPreview();
// stop streaming
mClient.stopStream();
}
}}
activity_main.xml
<FrameLayout xmlns:android="http://schemas.android.com/apk/res/android"
android:id="@+id/surface_layout"
android:layout_width="match_parent"
android:layout_height="match_parent"
android:background="@android:color/black"
android:orientation="vertical" >
<LinearLayout
android:id="@+id/surface_view_layout"
android:layout_width="fill_parent"
android:layout_height="fill_parent"
android:layout_weight="1"
android:orientation="vertical" >
<net.majorkernelpanic.streaming.gl.SurfaceView
android:id="@+id/surface_view"
android:layout_width="fill_parent"
android:layout_height="fill_parent"
android:layout_gravity="center" />
</LinearLayout></FrameLayout>
我需要完整的描述来添加所有这三个相机功能。
我做到了!:)
转到VideoStream.Java并更改:protected Camera mCamera到公共静态Camera mCamera。
转到您的MainActivity,在您的情况下为LiveStreamingActivity并粘贴:
private float mDist;
@Override
public boolean onTouchEvent(MotionEvent event) {
// Get the pointer ID
Camera.Parameters params = VideoStream.mCamera.getParameters();
int action = event.getAction();
if (event.getPointerCount() > 1) {
// handle multi-touch events
if (action == MotionEvent.ACTION_POINTER_DOWN) {
mDist = getFingerSpacing(event);
} else if (action == MotionEvent.ACTION_MOVE && params.isZoomSupported()) {
VideoStream.mCamera.cancelAutoFocus();
handleZoom(event, params);
}
} else {
// handle single touch events
if (action == MotionEvent.ACTION_UP) {
handleFocus(event, params);
}
}
return true;
}
private void handleZoom(MotionEvent event, Camera.Parameters params) {
int maxZoom = params.getMaxZoom();
int zoom = params.getZoom();
float newDist = getFingerSpacing(event);
if (newDist > mDist) {
//zoom in
if (zoom < maxZoom)
zoom++;
} else if (newDist < mDist) {
//zoom out
if (zoom > 0)
zoom--;
}
mDist = newDist;
params.setZoom(zoom);
VideoStream.mCamera.setParameters(params);
}
public void handleFocus(MotionEvent event, Camera.Parameters params) {
int pointerId = event.getPointerId(0);
int pointerIndex = event.findPointerIndex(pointerId);
// Get the pointer's current position
float x = event.getX(pointerIndex);
float y = event.getY(pointerIndex);
List<String> supportedFocusModes = params.getSupportedFocusModes();
if (supportedFocusModes != null && supportedFocusModes.contains(Camera.Parameters.FOCUS_MODE_AUTO)) {
VideoStream.mCamera.autoFocus(new Camera.AutoFocusCallback() {
@Override
public void onAutoFocus(boolean b, Camera camera) {
// currently set to auto-focus on single touch
}
});
}
}
/**
* Determine the space between the first two fingers
*/
private float getFingerSpacing(MotionEvent event) {
// ...
float x = event.getX(0) - event.getX(1);
float y = event.getY(0) - event.getY(1);
return FloatMath.sqrt(x * x + y * y);
}
总部设在这里。
如果有帮助,请告诉我!
感谢@JoséCunha Fonte您的代码很棒!
对我来说(与棉花糖SDK一起使用)return FloatMath.sqrt(x * x + y * y);
已经过时了,所以我只改为return (float)Math.sqrt(x * x + y * y);
希望它能帮助到某人:)