/*
* Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
package org.webrtc.webrtcdemo;
import org.webrtc.videoengine.ViERenderer;
import org.webrtc.videoengine.VideoCaptureAndroid;
import android.app.AlertDialog;
import android.content.BroadcastReceiver;
import android.content.Context;
import android.content.DialogInterface;
import android.content.Intent;
import android.content.IntentFilter;
import android.hardware.Camera.CameraInfo;
import android.hardware.Camera;
import android.hardware.SensorManager;
import android.media.AudioManager;
import android.os.Environment;
import android.util.Log;
import android.view.OrientationEventListener;
import android.view.SurfaceView;
import java.io.File;
public class MediaEngine implements VideoDecodeEncodeObserver {
// TODO(henrike): Most of these should be moved to xml (since static).
private static final int VCM_VP8_PAYLOAD_TYPE = 100;
private static final int SEND_CODEC_FPS = 30;
// TODO(henrike): increase INIT_BITRATE_KBPS to 2000 and ensure that
// 720p30fps can be acheived (on hardware that can handle it). Note that
// setting 2000 currently leads to failure, so that has to be resolved first.
private static final int INIT_BITRATE_KBPS = 500;
private static final int MAX_BITRATE_KBPS = 3000;
private static final String LOG_DIR = "webrtc";
private static final int WIDTH_IDX = 0;
private static final int HEIGHT_IDX = 1;
private static final int[][] RESOLUTIONS = {
{176,144}, {320,240}, {352,288}, {640,480}, {1280,720}
};
// Arbitrary choice of 4/5 volume (204/256).
private static final int volumeLevel = 204;
public static int numberOfResolutions() { return RESOLUTIONS.length; }
public static String[] resolutionsAsString() {
String[] retVal = new String[numberOfResolutions()];
for (int i = 0; i < numberOfResolutions(); ++i) {
retVal[i] = RESOLUTIONS[i][0] + "x" + RESOLUTIONS[i][1];
}
return retVal;
}
// Checks for and communicate failures to user (logcat and popup).
private void check(boolean value, String message) {
if (value) {
return;
}
Log.e("WEBRTC-CHECK", message);
AlertDialog alertDialog = new AlertDialog.Builder(context).create();
alertDialog.setTitle("WebRTC Error");
alertDialog.setMessage(message);
alertDialog.setButton(DialogInterface.BUTTON_POSITIVE,
"OK",
new DialogInterface.OnClickListener() {
public void onClick(DialogInterface dialog, int which) {
dialog.dismiss();
return;
}
}
);
alertDialog.show();
}
// Converts device rotation to camera rotation. Rotation depends on if the
// camera is back facing and rotate with the device or front facing and
// rotating in the opposite direction of the device.
private static int rotationFromRealWorldUp(CameraInfo info,
int deviceRotation) {
int coarseDeviceOrientation =
(int)(Math.round((double)deviceRotation / 90) * 90) % 360;
if (info.facing == CameraInfo.CAMERA_FACING_FRONT) {
// The front camera rotates in the opposite direction of the
// device.
int inverseDeviceOrientation = 360 - coarseDeviceOrientation;
return (inverseDeviceOrientation + info.orientation) % 360;
}
return (coarseDeviceOrientation + info.orientation) % 360;
}
// Shared Audio/Video members.
private final Context context;
private String remoteIp;
private boolean enableTrace;
// Audio
private VoiceEngine voe;
private int audioChannel;
private boolean audioEnabled;
private boolean voeRunning;
private int audioCodecIndex;
private int audioTxPort;
private int audioRxPort;
private boolean speakerEnabled;
private boolean headsetPluggedIn;
private boolean enableAgc;
private boolean enableNs;
private boolean enableAecm;
private BroadcastReceiver headsetListener;
private boolean audioRtpDump;
private boolean apmRecord;
// Video
private VideoEngine vie;
private int videoChannel;
private boolean receiveVideo;
private boolean sendVideo;
private boolean vieRunning;
private int videoCodecIndex;
private int resolutionIndex;
private int videoTxPort;
private int videoRxPort;
// Indexed by CameraInfo.CAMERA_FACING_{BACK,FRONT}.
private CameraInfo cameras[];
private boolean useFrontCamera;
private int currentCameraHandle;
private boolean enableNack;
// openGl, surfaceView or mediaCodec (integers.xml)
private int viewSelection;
private boolean videoRtpDump;
private SurfaceView svLocal;
private SurfaceView svRemote;
MediaCodecVideoDecoder externalCodec;
private int inFps;
private int inKbps;
private int outFps;
private int outKbps;
private int inWidth;
private int inHeight;
private OrientationEventListener orientationListener;
private int deviceOrientation = OrientationEventListener.ORIENTATION_UNKNOWN;
public MediaEngine(Context context) {
this.context = context;
voe = new VoiceEngine();
check(voe.init() == 0, "Failed voe Init");
audioChannel = voe.createChannel();
check(audioChannel >= 0, "Failed voe CreateChannel");
vie = new VideoEngine();
check(vie.init() == 0, "Failed voe Init");
check(vie.setVoiceEngine(voe) == 0, "Failed setVoiceEngine");
videoChannel = vie.createChannel();
check(audioChannel >= 0, "Failed voe CreateChannel");
check(vie.connectAudioChannel(videoChannel, audioChannel) == 0,
"Failed ConnectAudioChannel");
cameras = new CameraInfo[2];
CameraInfo info = new CameraInfo();
for (int i = 0; i < Camera.getNumberOfCameras(); ++i) {
Camera.getCameraInfo(i, info);
cameras[info.facing] = info;
}
setDefaultCamera();
check(voe.setSpeakerVolume(volumeLevel) == 0,
"Failed setSpeakerVolume");
check(voe.setAecmMode(VoiceEngine.AecmModes.SPEAKERPHONE, false) == 0,
"VoE set Aecm speakerphone mode failed");
check(vie.setKeyFrameRequestMethod(videoChannel,
VideoEngine.VieKeyFrameRequestMethod.
KEY_FRAME_REQUEST_PLI_RTCP) == 0,
"Failed setKeyFrameRequestMethod");
check(vie.registerObserver(videoChannel, this) == 0,
"Failed registerObserver");
// TODO(hellner): SENSOR_DELAY_NORMAL?
// Listen to changes in device orientation.
orientationListener =
new OrientationEventListener(context, SensorManager.SENSOR_DELAY_UI) {
public void onOrientationChanged (int orientation) {
deviceOrientation = orientation;
compensateRotation();
}
};
orientationListener.enable();
// Set audio mode to communication
AudioManager audioManager =
((AudioManager) context.getSystemService(Context.AUDIO_SERVICE));
audioManager.setMode(AudioManager.MODE_IN_COMMUNICATION);
// Listen to headset being plugged in/out.
IntentFilter receiverFilter = new IntentFilter(Intent.ACTION_HEADSET_PLUG);
headsetListener = new BroadcastReceiver() {
@Override
public void onReceive(Context context, Intent intent) {
if (intent.getAction().compareTo(Intent.ACTION_HEADSET_PLUG) == 0) {
headsetPluggedIn = intent.getIntExtra("state", 0) == 1;
updateAudioOutput();
}
}
};
context.registerReceiver(headsetListener, receiverFilter);
}
public void dispose() {
check(!voeRunning && !voeRunning, "Engines must be stopped before dispose");
context.unregisterReceiver(headsetListener);
orientationListener.disable();
check(vie.deregisterObserver(videoCh