Commit df8596c5 authored by Jiachen,Chen's avatar Jiachen,Chen

Modified from MCOP SDK. Added functions for TechToProtect:

1. Move audio recorder and player to the application.
2. Patch through SDK to allow application to access audio/video.
3. Buffer/replicate/record data in the application.

Address 3 issues:
1. Need repeat (solution: receiver can replay the messages).
2. Chatty messages, cannot focus (solution: pause the audio and play later).
3. Difficult to synchronize (solution: async-sending).
parent 82600e0a
Pipeline #240 failed with stages
......@@ -623,8 +623,8 @@ public class NgnApplication extends Application{
// FIXME: one-way audio on Galaxy SII and Nexus
//return isSLEs2Supported() && Arrays.asList(sSLEs2FriendlyBuildModels).contains(sBuildModel); // AcceptOnlyIn(Array)
//return isSLEs2Supported() && !Arrays.asList(sSLEs2UnFriendlyBuildModels).contains(sBuildModel); // AcceptAllExceptIn(Array)
//return false;
return true;
return false;
// return true;
//return isHovis(); //false;
}
......
......@@ -6,7 +6,7 @@ android {
defaultConfig {
applicationId "org.mcopenplatform.muoapi"
minSdkVersion 17
minSdkVersion 23
targetSdkVersion 28
versionCode 1
versionName "0.1"
......@@ -17,18 +17,18 @@ android {
}
buildTypes {
release {
buildConfigField "boolean", "LOG_SHOW", "false"
buildConfigField "boolean", "LOG_SHOW", "false"
minifyEnabled false
minifyEnabled true
minifyEnabled true
shrinkResources true
zipAlignEnabled true
proguardFiles getDefaultProguardFile('proguard-android.txt'), 'proguard-rules.pro'
flavorDimensions "default"
}
debug {
buildConfigField "boolean", "LOG_SHOW", "true"
buildConfigField "boolean", "LOG_SHOW", "true"
minifyEnabled false
minifyEnabled false
minifyEnabled false
shrinkResources false
zipAlignEnabled false
proguardFiles getDefaultProguardFile('proguard-android.txt'), 'proguard-rules.pro'
......@@ -70,7 +70,6 @@ android {
}
dependencies {
implementation project(':android-ngn-stack')
api fileTree(include: ['*.jar'], dir: 'libs')
......@@ -82,6 +81,8 @@ dependencies {
api 'com.android.support:support-v4:' + rootProject.supportLibVersion
api 'com.android.support:recyclerview-v7:' + rootProject.supportLibVersion
// Required -- JUnit 4 framework
implementation 'com.android.support:support-v4:28.0.0'
implementation 'android.arch.lifecycle:extensions:1.1.1'
testImplementation 'junit:junit:4.12'
// Optional -- Mockito framework
testImplementation 'org.mockito:mockito-core:2.7.22'
......
This source diff could not be displayed because it is too large. You can view the blob instead.
package patch;
import java.util.ArrayList;
public class Audio {
private static final long MS_PER_DATA = 20;
private String sender;
private boolean mine;
private ArrayList<byte[]> data = new ArrayList<>();
private int curr = 0;
Audio(String sender, boolean mine) {
String tmp = FragmentPttViewModel.SIDNames.get(sender);
if (tmp == null) tmp = sender;
this.sender = tmp;
this.mine = mine;
}
String getSender() {
return sender;
}
boolean isMine() {
return mine;
}
void addData(byte[] buf) {
synchronized (data) {
data.add(buf);
}
}
long getLength() {
return data.size();
}
long getLengthMS() {
return data.size() * MS_PER_DATA;
}
long getCurrMS() {
return curr * MS_PER_DATA;
}
void seek(long pos) {
synchronized (data) {
long dataIdx = pos / MS_PER_DATA;
if (dataIdx >= data.size()) dataIdx = data.size() - 1;
curr = (int) dataIdx;
}
}
byte[] get(int pos) {
synchronized (data) {
return data.get(pos);
}
}
byte[] getNext() {
synchronized (data) {
if (curr >= data.size()) {
curr = data.size();
return null;
}
return data.get(curr++);
}
}
void resetCurr() {
curr = 0;
}
}
package patch;
import android.content.Context;
import android.content.Intent;
import android.media.AudioFormat;
import android.media.AudioRecord;
import android.media.AudioTrack;
import android.media.MediaRecorder;
import android.os.Handler;
import android.os.Looper;
import android.util.Log;
import org.doubango.ngn.media.NgnProxyAudioProducer;
import org.mcopenplatform.muoapi.mcopsdk.MainActivity;
import java.util.ArrayList;
import java.util.List;
public class AudioManager {
private AudioTrack mAudioTrack;
private AudioRecord mAudioRecord;
private boolean paused = false;
private boolean recording = false;
private boolean connected = false;
private boolean dirty;
private final List<Audio> allAudios = new ArrayList<>();
private final ArrayList<Audio> toPlays = new ArrayList<>();
private final ArrayList<Audio> toSends = new ArrayList<>();
// someone else is sending, I'm receiving
private Audio incomingAudio;
// I am saying
private Audio recordingAudio;
// Pushing out
private Audio sendingAudio;
private int sendingAudioFrame = 0;
private Audio playingAudio;
private Audio userClick = null;
private MainActivity activity;
private Handler mainHandler = new Handler(Looper.getMainLooper());
AudioManager() {
mAudioTrack = new AudioTrack(android.media.AudioManager.STREAM_VOICE_CALL, 16000, AudioFormat.CHANNEL_OUT_MONO, AudioFormat.ENCODING_PCM_16BIT, 100, AudioTrack.MODE_STREAM);
mAudioRecord = new AudioRecord(MediaRecorder.AudioSource.MIC, 16000, AudioFormat.CHANNEL_IN_MONO, AudioFormat.ENCODING_PCM_16BIT, 2560);
new Thread(playAudioThread).start();
new Thread(recordAudioThread).start();
}
void setContext(Context context) {
if (!(context instanceof MainActivity)) {
throw new RuntimeException();
}
activity = (MainActivity) context;
}
void setConnected(boolean connected) {
this.connected = connected;
}
void setPause(boolean paused) {
this.paused = paused;
}
boolean clearDirty() {
boolean ret = dirty;
dirty = false;
return ret;
}
List<Audio> getAudios() {
return allAudios;
}
boolean isPending(Audio audio) {
return audio == incomingAudio || toPlays.contains(audio);
}
synchronized void startRecordIncoming(String sender) {
if (incomingAudio != null) return;
incomingAudio = new Audio(sender, false);
allAudios.add(incomingAudio);
}
synchronized void stopRecordIncoming() {
if (incomingAudio == null) return;
synchronized (toPlays) {
toPlays.add(incomingAudio);
incomingAudio = null;
}
}
synchronized void writeIncomingData(byte[] data) {
if (incomingAudio == null) return;
incomingAudio.addData(data);
dirty = true;
}
synchronized void clear() {
allAudios.clear();
toPlays.clear();
incomingAudio = null;
userClick = null;
dirty = true;
}
private void setPlayingAudio(Audio audio) {
if (playingAudio != audio) {
playingAudio = audio;
Log.d("PATCH", "audio=" + (audio == null ? "null" : audio.getSender()));
dirty = true;
}
}
void setClickAudio(Audio audio) {
userClick = audio;
}
private Runnable playAudioThread = new Runnable() {
@Override
public void run() {
mAudioTrack.play();
// android.os.Process.setThreadPriority(android.os.Process.THREAD_PRIORITY_AUDIO);
while (true) {
// recording, do not play at all
if (recording) {
setPlayingAudio(null);
try {
Thread.sleep(20);
} catch (InterruptedException e) {
}
continue;
}
Audio audio = null;
boolean isUserClick = false;
boolean isCurrent = false;
// user clicked, play no matter if it is in pause state
if (userClick != null) {
audio = userClick;
isUserClick = true;
} else {
// no user click, pause
if (paused) {
setPlayingAudio(null);
try {
Thread.sleep(20);
} catch (InterruptedException e) {
}
continue;
}
// get pending plays
synchronized (toPlays) {
if (!toPlays.isEmpty()) {
audio = toPlays.get(0);
}
}
// no pending play, check if there is an audio going on
if (audio == null) {
isCurrent = true;
audio = incomingAudio;
}
}
setPlayingAudio(audio);
// nothing to play
if (audio == null) {
try {
Thread.sleep(20);
} catch (InterruptedException e) {
}
continue;
}
byte[] buf;
buf = audio.getNext();
if (buf != null) {
mAudioTrack.write(buf, 0, buf.length);
dirty = true;
} else {
if (isUserClick) {
audio.resetCurr();
dirty = true;
userClick = null;
} else if (!isCurrent) {
synchronized (toPlays) {
toPlays.remove(0);
}
audio.resetCurr();
dirty = true;
}
}
}
}
};
synchronized void startRecordOutgoing() {
if (recordingAudio != null) return;
recordingAudio = new Audio("Myself", true);
allAudios.add(recordingAudio);
toSends.add(recordingAudio);
Log.d("PATCH", "startRecordOutgoing, toSends.length:" + toSends.size());
if (toSends.size() == 1) {
if (incomingAudio == null) {
// it is idle
mainHandler.postDelayed(new Runnable() {
@Override
public void run() {
checkOutgoing();
}
}, 100);
}
}
dirty = true;
}
synchronized void checkOutgoing() {
Log.d("PATCH", "checkOutgoing, connected:" + connected + ", toSendsEmpty:" + toSends.isEmpty());
if (connected && !toSends.isEmpty()) {
// request floor
activity.requestFloor(true);
}
}
void floorGranted() {
synchronized (this) {
if (toSends.isEmpty()) {
// release floor
activity.requestFloor(false);
} else {
sendingAudioFrame = 0;
sendingAudio = toSends.get(0);
}
}
}
synchronized void stopRecordOutgoing() {
if (recordingAudio == null) return;
synchronized (this) {
recordingAudio = null;
}
dirty = true;
}
private Runnable recordAudioThread = new Runnable() {
@Override
public void run() {
mAudioRecord.startRecording();
while (true) {
byte[] buf = new byte[640];
int nRead = mAudioRecord.read(buf, 0, buf.length);
// Log.d("PATCH", "nRead=" + nRead);
if (nRead > 0) {
if (recordingAudio != null) {
if (nRead != buf.length) {
byte[] tmp = new byte[nRead];
System.arraycopy(buf, 0, tmp, 0, nRead);
buf = tmp;
}
recordingAudio.addData(buf);
dirty = true;
}
}
if (sendingAudio != null) {
if (sendingAudioFrame == sendingAudio.getLength()) {
// after last frame
synchronized (this) {
toSends.remove(0);
sendingAudio = null;
activity.requestFloor(false);
dirty = true;
}
} else {
activity.sendBroadcast(new Intent(NgnProxyAudioProducer.ACTION_ADD_SEND).putExtra(NgnProxyAudioProducer.EXTRA_DATA, sendingAudio.get(sendingAudioFrame++)));
}
}
}
}
};
Audio getPlayingAudio() {
return playingAudio;
}
public boolean isToSend(Audio audio) {
return toSends.contains(audio);
}
}
This diff is collapsed.
package patch;
import android.arch.lifecycle.MutableLiveData;
import android.arch.lifecycle.ViewModel;
import android.content.BroadcastReceiver;
import android.content.Context;
import android.content.Intent;
import android.content.IntentFilter;
import android.util.Log;
import org.doubango.ngn.media.NgnProxyAudioConsumer;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
public class FragmentPttViewModel extends ViewModel {
// key: group name, value: true if proup, false if private
static final HashMap<String, Boolean> callDestinations;
static final HashMap<String, String> nameSIDs;
static final HashMap<String, String> SIDNames;
static {
nameSIDs = new HashMap<>();
nameSIDs.put("Demo Group", "sip:jiachen_group@organization.org");
nameSIDs.put("Demo A", "sip:mcptt_id_jiachen_A@organization.org");
nameSIDs.put("Demo B", "sip:mcptt_id_jiachen_B@organization.org");
nameSIDs.put("Demo C", "sip:mcptt_id_jiachen_C@organization.org");
nameSIDs.put("Demo D", "sip:mcptt_id_jiachen_D@organization.org");
SIDNames = new HashMap<>();
for (Map.Entry<String, String> nameSID : nameSIDs.entrySet()) {
SIDNames.put(nameSID.getValue(), nameSID.getKey());
}
callDestinations = new HashMap<>();
callDestinations.put("Demo Group", true);
callDestinations.put("Demo A", false);
callDestinations.put("Demo B", false);
callDestinations.put("Demo C", false);
callDestinations.put("Demo D", false);
}
MutableLiveData<Boolean> isCalling = new MutableLiveData<>();
MutableLiveData<Boolean> isPaused = new MutableLiveData<>();
MutableLiveData<Boolean> isRecording = new MutableLiveData<>();
MutableLiveData<String> currentDestination = new MutableLiveData<>();
MutableLiveData<String> token = new MutableLiveData<>();
AudioManager audioManager = new AudioManager();
public FragmentPttViewModel() {
isCalling.setValue(false);
isPaused.setValue(false);
isRecording.setValue(false);
}
void togglePause() {
Boolean paused = isPaused.getValue();
assert paused != null;
audioManager.setPause(!paused);
isPaused.postValue(!paused);
}
boolean clearDirty() {
return audioManager.clearDirty();
}
List<Audio> getAudios() {
return audioManager.getAudios();
}
Audio getPlayingAudio() {
return audioManager.getPlayingAudio();
}
void setClickAudio(Audio audio) {
audioManager.setClickAudio(audio);
}
void clearAudios() {
audioManager.clear();
}
boolean isPending(Audio audio) {
return audioManager.isPending(audio);
}
boolean isToSend(Audio audio) {
return audioManager.isToSend(audio);
}
synchronized void setContext(Context context) {
audioManager.setContext(context);
IntentFilter filter = new IntentFilter();
filter.addAction(NgnProxyAudioConsumer.ACTION_DATA_RECEIVED);
filter.addAction("CALL_STATE_CHANGED");
filter.addAction("FLOOR_STATE_CHANGED");
context.registerReceiver(new BroadcastReceiver() {
@Override
public void onReceive(Context context, Intent intent) {
if (intent.getAction() == null) return;
switch (intent.getAction()) {
case NgnProxyAudioConsumer.ACTION_DATA_RECEIVED: {
byte[] buf = intent.getByteArrayExtra(NgnProxyAudioConsumer.EXTRA_DATA);
audioManager.writeIncomingData(buf);
break;
}
case "CALL_STATE_CHANGED": {
String state = intent.getStringExtra("STATE");
Log.d("PATCH", "state=" + state);
if (state == null) break;
switch (state) {
case "CONNECTED":
// update name
// and fall through
// Log.d("PATCH", "currDest=" + intent.getStringExtra("ID"));
currentDestination.postValue(intent.getStringExtra("ID"));
case "INPROGRESS":
isCalling.postValue(true);
audioManager.setConnected("CONNECTED".equals(state));
break;
default:
audioManager.setConnected(false);
isCalling.postValue(false);
break;
}
audioManager.stopRecordIncoming();
break;
}
case "FLOOR_STATE_CHANGED": {
String state = intent.getStringExtra("STATE");
if (state == null) break;
switch (state) {
case "TAKEN":
audioManager.startRecordIncoming(intent.getStringExtra("ID"));
break;
case "IDLE":
audioManager.stopRecordIncoming();
audioManager.checkOutgoing();
break;
case "GRANTED":
audioManager.floorGranted();
break;
}
}
}
}
}, filter);
}
void setRecording(boolean recording) {
if (recording) {
audioManager.startRecordOutgoing();
} else {
audioManager.stopRecordOutgoing();
}
isRecording.postValue(recording);
}
}
package patch;
import android.content.Context;
import android.support.v7.widget.AppCompatButton;
import android.util.AttributeSet;
import android.view.MotionEvent;
public class PntButton extends AppCompatButton {
public interface OnPressListener {
void onPress(PntButton btn, boolean pressed);
}
public PntButton(Context context) {
super(context);
}
public PntButton(Context context, AttributeSet attrs) {
super(context, attrs);
}
public PntButton(Context context, AttributeSet attrs, int defStyleAttr) {
super(context, attrs, defStyleAttr);
}
@Override
public boolean onTouchEvent(MotionEvent event) {
switch (event.getAction()) {
case MotionEvent.ACTION_DOWN:
if (onPressListener != null) onPressListener.onPress(this, true);
break;
case MotionEvent.ACTION_UP:
if (onPressListener != null) onPressListener.onPress(this, false);
break;
}
return super.onTouchEvent(event);
}
private OnPressListener onPressListener;
public void setOnPressListener(OnPressListener listener) {
onPressListener = listener;
}
@Override
public boolean performClick() {
return super.performClick();
}}
package patch;
import android.content.Context;
import android.support.v7.widget.LinearLayoutManager;
import android.support.v7.widget.RecyclerView;
import android.util.Log;
public class WrapLinearLayoutManager extends LinearLayoutManager {
public WrapLinearLayoutManager(Context context) {
super(context);
}
//
// public WrapLinearLayoutManager(Context context, int orientation, boolean reverseLayout) {
// super(context, orientation, reverseLayout);
// }
//
// public WrapLinearLayoutManager(Context context, AttributeSet attrs, int defStyleAttr, int defStyleRes) {
// super(context, attrs, defStyleAttr, defStyleRes);
// }
@Override
public void onLayoutChildren(RecyclerView.Recycler recycler, RecyclerView.State state) {
try {
super.onLayoutChildren(recycler, state);
} catch (IndexOutOfBoundsException e) {
Log.d("WrapLinearLayoutManager", "meet a IOOBE in RecyclerView");
}
}
}
<vector xmlns:android="http://schemas.android.com/apk/res/android"
android:width="24dp"
android:height="24dp"
android:tint="#008000"
android:viewportWidth="24.0"
android:viewportHeight="24.0">
<path
android:fillColor="#FF000000"
android:pathData="M6.62,10.79c1.44,2.83 3.76,5.14 6.59,6.59l2.2,-2.2c0.27,-0.27 0.67,-0.36 1.02,-0.24 1.12,0.37 2.33,0.57 3.57,0.57 0.55,0 1,0.45 1,1V20c0,0.55 -0.45,1 -1,1 -9.39,0 -17,-7.61 -17,-17 0,-0.55 0.45,-1 1,-1h3.5c0.55,0 1,0.45 1,1 0,1.25 0.2,2.45 0.57,3.57 0.11,0.35 0.03,0.74 -0.25,1.02l-2.2,2.2z" />
</vector>
<vector xmlns:android="http://schemas.android.com/apk/res/android"
android:width="24dp"
android:height="24dp"
android:tint="#C00000"
android:viewportWidth="24.0"
android:viewportHeight="24.0">
<path
android:fillColor="#FF000000"
android:pathData="M12,9c-1.6,0 -3.15,0.25 -4.6,0.72v3.1c0,0.39 -0.23,0.74 -0.56,0.9 -0.98,0.49 -1.87,1.12 -2.66,1.85 -0.18,0.18 -0.43,0.28 -0.7,0.28 -0.28,0 -0.53,-0.11 -0.71,-0.29L0.29,13.08c-0.18,-0.17 -0.29,-0.42 -0.29,-0.7 0,-0.28 0.11,-0.53 0.29,-0.71C3.34,8.78 7.46,7 12,7s8.66,1.78 11.71,4.67c0.18,0.18 0.29,0.43 0.29,0.71 0,0.28 -0.11,0.53 -0.29,0.71l-2.48,2.48c-0.18,0.18 -0.43,0.29 -0.71,0.29 -0.27,0 -0.52,-0.11 -0.7,-0.28 -0.79,-0.74 -1.69,-1.36 -2.67,-1.85 -0.33,-0.16 -0.56,-0.5 -0.56,-0.9v-3.1C15.15,9.25 13.6,9 12,9z" />
</vector>
<vector android:height="24dp" android:tint="#C00000"
android:viewportHeight="24.0" android:viewportWidth="24.0"
android:width="24dp" xmlns:android="http://schemas.android.com/apk/res/android">
<path android:fillColor="#FF000000" android:pathData="M19,6.41L17.59,5 12,10.59 6.41,5 5,6.41 10.59,12 5,17.59 6.41,19 12,13.41 17.59,19 19,17.59 13.41,12z"/>
</vector>
<vector android:height="24dp" android:tint="#C00000"
android:viewportHeight="24.0" android:viewportWidth="24.0"
android:width="24dp" xmlns:android="http://schemas.android.com/apk/res/android">
<path android:fillColor="#FF000000" android:pathData="M6,19h4L10,5L6,5v14zM14,5v14h4L18,5h-4z"/>