This commit is contained in:
isp
2024-09-10 19:25:20 -07:00
parent f159349cae
commit c04123ea84
24 changed files with 20588 additions and 63 deletions

View File

@@ -47,8 +47,12 @@ val Any.TAG: String
class MainActivity : ComponentActivity() {
override fun onCreate(savedInstanceState: Bundle?) {
installSplashScreen()
super.onCreate(savedInstanceState)
setTheme(android.R.style.Theme_DeviceDefault)
setContent {
@@ -111,6 +115,7 @@ fun WearApp() {
SwipeDismissableNavHost(navController = navController, startDestination = "speaker") {
composable("speaker") {
StartRecordingScreen(
context = context,
appState = mainState.appState,
isPermissionDenied = mainState.isPermissionDenied,
onMicClicked = {

View File

@@ -2,6 +2,7 @@ package com.birdsounds.identify.presentation
import android.Manifest
import android.app.Activity
import android.content.Context
import android.content.pm.PackageManager
import android.util.Log
import androidx.annotation.RequiresPermission
@@ -9,13 +10,24 @@ import androidx.compose.foundation.MutatorMutex
import androidx.compose.runtime.getValue
import androidx.compose.runtime.mutableStateOf
import androidx.compose.runtime.setValue
import androidx.compose.ui.platform.LocalContext
import androidx.core.content.ContextCompat
import com.google.android.gms.tasks.Tasks
import com.google.android.gms.wearable.ChannelClient
import com.google.android.gms.wearable.ChannelClient.ChannelCallback
import com.google.android.gms.wearable.Wearable
import kotlinx.coroutines.Dispatchers.IO
import kotlinx.coroutines.coroutineScope
import kotlinx.coroutines.delay
import kotlinx.coroutines.flow.Flow
import kotlinx.coroutines.flow.flow
import kotlinx.coroutines.launch
import kotlinx.coroutines.tasks.await
import kotlinx.coroutines.withContext
import java.time.Duration
import java.time.LocalDateTime
import java.util.concurrent.ExecutionException
class MainState(private val activity: Activity, private val requestPermission: () -> Unit) {
private val playbackStateMutatorMutex = MutatorMutex()
@@ -34,7 +46,9 @@ class MainState(private val activity: Activity, private val requestPermission: (
(ContextCompat.checkSelfPermission(activity, Manifest.permission.RECORD_AUDIO) == PackageManager.PERMISSION_GRANTED) -> {
Log.e(TAG, "Permissions granted, continuing to record");
appState = AppState.Recording
record(soundRecorder = soundRecorder, setProgress = { progress -> recordingProgress = progress })
record(activity = activity, soundRecorder = soundRecorder, setProgress = { progress -> recordingProgress = progress })
}
else -> {
@@ -55,7 +69,8 @@ class MainState(private val activity: Activity, private val requestPermission: (
playbackStateMutatorMutex.mutate {
if (ContextCompat.checkSelfPermission(activity, Manifest.permission.RECORD_AUDIO) == PackageManager.PERMISSION_GRANTED) {
appState = AppState.Recording
record(soundRecorder = soundRecorder, setProgress = { progress ->
record(activity=activity, soundRecorder = soundRecorder, setProgress = { progress ->
recordingProgress = progress
})
appState = AppState.Ready
@@ -73,22 +88,40 @@ sealed class AppState {
}
@RequiresPermission(Manifest.permission.RECORD_AUDIO)
private suspend fun record(soundRecorder: SoundRecorder,
private suspend fun record(activity: (Activity),soundRecorder: SoundRecorder,
setProgress: (progress: Float) -> Unit,
maxRecordingDuration: Duration = Duration.ofSeconds(10),
numberTicks: Int = 10) {
val messagePath = "/message_path"
coroutineScope { // Kick off a parallel job to
Log.e(TAG, "Mock recording"); // val recordingJob = launch { soundRecorder.record() }
val ByteFlow: Flow<String> = flow{
for (i in 1..3) {
var string_send = LocalDateTime.now().toString()
emit(string_send);
delay(250);
Log.e(TAG, "Emitting " + string_send)
}
}
Log.e(TAG, "Start recording"); //
val recordingJob = launch { soundRecorder.record() }
// val recordingJob = launch { soundRecorder.record() }
// SoundRecorder.record();
// val ByteFlow: Flow<String> = flow {
// while (true) {
// var string_send = LocalDateTime.now().toString()
// emit(string_send);
// delay(250);
// Log.e(TAG, "Emitting " + string_send)
// }
// }
//
// val c_channel = channelClient.openChannel("Main","Audio");
// val channel = c_channel.await();
// val OutputStream = channelClient.getOutputStream(channel).await();
// OutputStream.write(4);
// ByteFlow.collect {
//
//
//
// Log.e(TAG, "Received " + it)};
//
// val delayPerTickMs = maxRecordingDuration.toMillis() / numberTicks
@@ -105,3 +138,9 @@ private suspend fun record(soundRecorder: SoundRecorder,
}
}
object channelCallback : ChannelClient.ChannelCallback() {
override fun onChannelOpened(channel: ChannelClient.Channel) {
super.onChannelOpened(channel)
Log.e(TAG,"Opened channel")}
}

View File

@@ -0,0 +1,58 @@
package com.birdsounds.identify.presentation
import android.content.Context
import android.util.Log
import com.google.android.gms.tasks.Tasks
import com.google.android.gms.wearable.Wearable
import kotlinx.coroutines.CoroutineScope
import kotlinx.coroutines.Dispatchers
import kotlinx.coroutines.Job
import kotlinx.coroutines.launch
import java.util.concurrent.ExecutionException
object MessageSender {
const val tag = "MessageSender"
private val job = Job()
private val coroutineScope = CoroutineScope(Dispatchers.IO + job)
fun sendMessage(path: String, message: ByteArray, context: Context) {
coroutineScope.launch {
sendMessageInBackground(path, message, context)
}
}
private fun sendMessageInBackground(path: String, message: ByteArray, context: Context) {
//first get all the nodes, ie connected wearable devices.
val nodeListTask = Wearable.getNodeClient(context).connectedNodes
try {
// Block on a task and get the result synchronously (because this is on a background
// thread).
val nodes = Tasks.await(nodeListTask)
if(nodes.isEmpty()) {
Log.i(tag,"No Node found to send message")
}
//Now send the message to each device.
for (node in nodes) {
val sendMessageTask = Wearable.getMessageClient(context)
.sendMessage(node.id, path, message)
try {
// Block on a task and get the result synchronously (because this is on a background
// thread).
val result = Tasks.await(sendMessageTask)
Log.v(tag, "SendThread: message send to " + node.displayName)
} catch (exception: ExecutionException) {
Log.e(tag, "Task failed: $exception")
} catch (exception: InterruptedException) {
Log.e(tag, "Interrupt occurred: $exception")
}
}
} catch (exception: ExecutionException) {
Log.e(tag, "Task failed: $exception")
} catch (exception: InterruptedException) {
Log.e(
tag, "Interrupt occurred: $exception"
)
}
}
}

View File

@@ -3,27 +3,24 @@ package com.birdsounds.identify.presentation
import android.Manifest
import android.content.Context
import android.media.AudioFormat
import android.media.AudioRecord
import android.media.MediaRecorder
import android.util.Log
import androidx.annotation.RequiresPermission
import com.google.android.gms.wearable.Wearable
import kotlinx.coroutines.channels.Channel
import kotlinx.coroutines.flow.Flow
import kotlinx.coroutines.flow.flow
import java.io.File
import kotlinx.coroutines.suspendCancellableCoroutine
import java.time.LocalDateTime
/**
* A helper class to provide methods to record audio input from the MIC to the internal storage.
*/
@Suppress("DEPRECATION")
class SoundRecorder(
context: Context,
context_in: Context,
outputFileName: String
) {
private val audioFile = File(context.filesDir, outputFileName)
private var state = State.IDLE
private var context = context_in
private enum class State {
IDLE, RECORDING
@@ -31,39 +28,49 @@ class SoundRecorder(
@RequiresPermission(Manifest.permission.RECORD_AUDIO)
suspend fun record() {
suspendCancellableCoroutine<Unit> { cont ->
@Suppress("DEPRECATION")
val audioSource = MediaRecorder.AudioSource.DEFAULT
val sampleRateInHz = 48000
val channelConfig = AudioFormat.CHANNEL_IN_MONO
val audioFormat = AudioFormat.ENCODING_PCM_8BIT
val bufferSizeInBytes =
sampleRateInHz * 1 * 1; // 3 second sample, 2 bytes for each sample
val audio_bytes_array = ByteArray(bufferSizeInBytes)
val audioRecord = AudioRecord(
/* audioSource = */ audioSource,
/* sampleRateInHz = */ sampleRateInHz,
/* channelConfig = */ channelConfig,
/* audioFormat = */ audioFormat,
/* bufferSizeInBytes = */ bufferSizeInBytes
)
val thread = Thread {
while (true) {
val out = audioRecord.read(
/* audioData = */ audio_bytes_array,
/* offsetInBytes = */ 0,
/* sizeInBytes = */ bufferSizeInBytes,
/* readMode = */ AudioRecord.READ_BLOCKING
)
// val audio_u_byte = audio_bytes_array.toUByteArray();
// Log.w(TAG, audio_bytes_array.size.toString());
val str_beg = audio_bytes_array[0].toString()
val str_end = audio_bytes_array[bufferSizeInBytes-1].toString()
Log.w(TAG, str_beg + ", " + str_end);
// MessageSender.sendMessage("/audio",audio_bytes_array, context)
val mediaRecorder = MediaRecorder().apply {
setAudioSource(MediaRecorder.AudioSource.MIC)
setOutputFormat(MediaRecorder.OutputFormat.OGG)
setAudioEncoder(MediaRecorder.AudioEncoder.OPUS)
setOutputFile(audioFile.path)
setOnInfoListener { mr, what, extra ->
println("info: $mr $what $extra")
}
setOnErrorListener { mr, what, extra ->
println("error: $mr $what $extra")
}
}
};
thread.start();
cont.invokeOnCancellation {
mediaRecorder.stop()
state = State.IDLE
}
mediaRecorder.prepare()
mediaRecorder.start()
Log.e("com.birdsounds.identify","Hey I'm recording")
state = State.RECORDING
}
}

View File

@@ -1,6 +1,7 @@
package com.birdsounds.identify.presentation
import android.content.Context
import androidx.compose.runtime.Composable
import androidx.compose.ui.tooling.preview.PreviewParameter
import androidx.compose.ui.tooling.preview.datasource.CollectionPreviewParameterProvider
@@ -10,6 +11,7 @@ import com.google.android.horologist.compose.layout.ScreenScaffold
@Composable
fun StartRecordingScreen(
context: Context,
appState: AppState,
isPermissionDenied: Boolean,
onMicClicked: () -> Unit
@@ -53,16 +55,3 @@ private class PlaybackStatePreviewProvider : CollectionPreviewParameterProvider<
AppState.Ready
)
)
@WearPreviewDevices
@WearPreviewFontScales
@Composable
fun SpeakerScreenPreview(
@PreviewParameter(PlaybackStatePreviewProvider::class) appState: AppState
) {
StartRecordingScreen(
appState = appState,
isPermissionDenied = true,
onMicClicked = {}
)
}