Merge pull request #3298 from nextcloud/feature/88/detectTalkingAndSendDataChannelMessage

detect when user is talking and send data channel message
This commit is contained in:
Marcel Hibbe 2023-09-06 13:29:12 +02:00 committed by GitHub
commit bc7939ffd8
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
5 changed files with 111 additions and 11 deletions

View file

@ -39,7 +39,10 @@ import android.graphics.Color
import android.graphics.Typeface
import android.graphics.drawable.Icon
import android.media.AudioAttributes
import android.media.AudioFormat
import android.media.AudioRecord
import android.media.MediaPlayer
import android.media.MediaRecorder
import android.net.Uri
import android.os.Build
import android.os.Bundle
@ -181,6 +184,7 @@ import java.util.Objects
import java.util.concurrent.TimeUnit
import java.util.concurrent.atomic.AtomicInteger
import javax.inject.Inject
import kotlin.math.abs
import kotlin.math.roundToInt
@AutoInjector(NextcloudTalkApplication::class)
@ -363,6 +367,15 @@ class CallActivity : CallBaseActivity() {
private var reactionAnimator: ReactionAnimator? = null
private var othersInCall = false
private lateinit var micInputAudioRecorder: AudioRecord
private var micInputAudioRecordThread: Thread? = null
private var isMicInputAudioThreadRunning: Boolean = false
private val bufferSize = AudioRecord.getMinBufferSize(
SAMPLE_RATE,
AudioFormat.CHANNEL_IN_MONO,
AudioFormat.ENCODING_PCM_16BIT
)
@SuppressLint("ClickableViewAccessibility")
override fun onCreate(savedInstanceState: Bundle?) {
Log.d(TAG, "onCreate")
@ -523,6 +536,19 @@ class CallActivity : CallBaseActivity() {
override fun onStop() {
super.onStop()
active = false
if (isMicInputAudioThreadRunning) {
stopMicInputDetection()
}
}
private fun stopMicInputDetection() {
if (micInputAudioRecordThread != null) {
micInputAudioRecorder.stop()
micInputAudioRecorder.release()
isMicInputAudioThreadRunning = false
micInputAudioRecordThread = null
}
}
private fun enableBluetoothManager() {
@ -999,6 +1025,8 @@ class CallActivity : CallBaseActivity() {
}
private fun microphoneInitialization() {
startMicInputDetection()
// create an AudioSource instance
audioSource = peerConnectionFactory!!.createAudioSource(audioConstraints)
localAudioTrack = peerConnectionFactory!!.createAudioTrack("NCa0", audioSource)
@ -1006,6 +1034,62 @@ class CallActivity : CallBaseActivity() {
localStream!!.addTrack(localAudioTrack)
}
@SuppressLint("MissingPermission")
private fun startMicInputDetection() {
if (permissionUtil!!.isMicrophonePermissionGranted() && micInputAudioRecordThread == null) {
var isSpeakingLongTerm = false
micInputAudioRecorder = AudioRecord(
MediaRecorder.AudioSource.MIC,
SAMPLE_RATE,
AudioFormat.CHANNEL_IN_MONO,
AudioFormat.ENCODING_PCM_16BIT,
bufferSize
)
isMicInputAudioThreadRunning = true
micInputAudioRecorder.startRecording()
micInputAudioRecordThread = Thread(
Runnable {
while (isMicInputAudioThreadRunning) {
val byteArr = ByteArray(bufferSize / 2)
micInputAudioRecorder.read(byteArr, 0, byteArr.size)
val isCurrentlySpeaking = abs(byteArr[0].toDouble()) > MICROPHONE_VALUE_THRESHOLD
if (isCurrentlySpeaking && !isSpeakingLongTerm) {
isSpeakingLongTerm = true
sendIsSpeakingMessage(true)
} else if (!isCurrentlySpeaking && isSpeakingLongTerm) {
isSpeakingLongTerm = false
sendIsSpeakingMessage(false)
}
Thread.sleep(MICROPHONE_VALUE_SLEEP)
}
}
)
micInputAudioRecordThread!!.start()
}
}
@Suppress("Detekt.NestedBlockDepth")
private fun sendIsSpeakingMessage(isSpeaking: Boolean) {
val isSpeakingMessage: String =
if (isSpeaking) SIGNALING_MESSAGE_SPEAKING_STARTED else SIGNALING_MESSAGE_SPEAKING_STOPPED
if (isConnectionEstablished && othersInCall) {
if (!hasMCU) {
for (peerConnectionWrapper in peerConnectionWrapperList) {
peerConnectionWrapper.sendChannelData(DataChannelMessage(isSpeakingMessage))
}
} else {
for (peerConnectionWrapper in peerConnectionWrapperList) {
if (peerConnectionWrapper.sessionId == webSocketClient!!.sessionId) {
peerConnectionWrapper.sendChannelData(DataChannelMessage(isSpeakingMessage))
break
}
}
}
}
}
private fun createCameraCapturer(enumerator: CameraEnumerator?): VideoCapturer? {
val deviceNames = enumerator!!.deviceNames
@ -1151,10 +1235,10 @@ class CallActivity : CallBaseActivity() {
private fun toggleMedia(enable: Boolean, video: Boolean) {
var message: String
if (video) {
message = "videoOff"
message = SIGNALING_MESSAGE_VIDEO_OFF
if (enable) {
binding!!.cameraButton.alpha = OPACITY_ENABLED
message = "videoOn"
message = SIGNALING_MESSAGE_VIDEO_ON
startVideoCapture()
} else {
binding!!.cameraButton.alpha = OPACITY_DISABLED
@ -1175,9 +1259,9 @@ class CallActivity : CallBaseActivity() {
binding!!.selfVideoRenderer.visibility = View.INVISIBLE
}
} else {
message = "audioOff"
message = SIGNALING_MESSAGE_AUDIO_OFF
if (enable) {
message = "audioOn"
message = SIGNALING_MESSAGE_AUDIO_ON
binding!!.microphoneButton.alpha = OPACITY_ENABLED
} else {
binding!!.microphoneButton.alpha = OPACITY_DISABLED
@ -2967,7 +3051,7 @@ class CallActivity : CallBaseActivity() {
val newX = event.rawX - binding!!.selfVideoViewWrapper.width / 2f
binding!!.selfVideoViewWrapper.y = newY
binding!!.selfVideoViewWrapper.x = newX
} else if (event.actionMasked == MotionEvent.ACTION_UP && duration < 100) {
} else if (event.actionMasked == MotionEvent.ACTION_UP && duration < SWITCH_CAMERA_THRESHOLD_DURATION) {
switchCamera()
}
return true
@ -3000,5 +3084,18 @@ class CallActivity : CallBaseActivity() {
const val CALL_TIME_ONE_HOUR = 3600
const val CALL_DURATION_EMPTY = "--:--"
const val API_RETRIES: Long = 3
const val SWITCH_CAMERA_THRESHOLD_DURATION = 100
private const val SAMPLE_RATE = 8000
private const val MICROPHONE_VALUE_THRESHOLD = 20
private const val MICROPHONE_VALUE_SLEEP: Long = 1000
private const val SIGNALING_MESSAGE_SPEAKING_STARTED = "speaking"
private const val SIGNALING_MESSAGE_SPEAKING_STOPPED = "stoppedSpeaking"
private const val SIGNALING_MESSAGE_VIDEO_ON = "videoOn"
private const val SIGNALING_MESSAGE_VIDEO_OFF = "videoOff"
private const val SIGNALING_MESSAGE_AUDIO_ON = "audioOn"
private const val SIGNALING_MESSAGE_AUDIO_OFF = "audioOff"
}
}

View file

@ -93,7 +93,7 @@ abstract class SharedItemsViewHolder(
item.id,
item.mimeType,
true,
FileViewerUtils.ProgressUi(progressBar, null, image),
FileViewerUtils.ProgressUi(progressBar, null, image)
)
}

View file

@ -358,9 +358,11 @@ class FileViewerUtils(private val context: Context, private val user: User) {
if (progressUi.previewImage.isShown && openWhenDownloaded) {
openFileByMimetype(fileName, mimetype)
} else {
Log.d(TAG, "file " + fileName +
" was downloaded but it's not opened because view is not shown on screen or " +
"openWhenDownloaded is false"
Log.d(
TAG,
"file " + fileName +
" was downloaded but it's not opened because view is not shown on screen or " +
"openWhenDownloaded is false"
)
}
progressUi.messageText?.text = fileName

View file

@ -33,7 +33,8 @@
android:layout_marginStart="@dimen/standard_margin"
android:layout_marginTop="@dimen/standard_eighth_margin"
android:layout_marginEnd="@dimen/standard_margin"
android:layout_marginBottom="@dimen/standard_eighth_margin">
android:layout_marginBottom="@dimen/standard_eighth_margin"
tools:ignore="UselessParent">
<ImageView
android:id="@+id/expandCollapseIcon"

View file

@ -1,2 +1,2 @@
DO NOT TOUCH; GENERATED BY DRONE
<span class="mdl-layout-title">Lint Report: 94 warnings</span>
<span class="mdl-layout-title">Lint Report: 93 warnings</span>