This commit is contained in:
2025-03-07 21:00:42 -05:00
parent bb17c0651e
commit 65380da39a
32 changed files with 7070 additions and 397 deletions

View File

@@ -39,6 +39,11 @@ android {
}
dependencies {
api(fileTree("libs") {
include("*.jar")
})
api(files("libs/opus.aar"))
implementation("androidx.lifecycle:lifecycle-viewmodel-compose:2.8.4")
implementation("androidx.compose.ui:ui-tooling:1.3.1")
implementation("androidx.navigation:navigation-compose:2.8.0-rc01")
@@ -47,7 +52,7 @@ dependencies {
implementation("com.google.android.horologist:horologist-audio:0.6.18")
implementation("com.google.android.horologist:horologist-compose-tools:0.6.18")
implementation("com.google.android.horologist:horologist-compose-tools:0.6.18")
implementation("com.google.android.horologist:horologist-compose-layout:0.6.18")
implementation("com.google.android.horologist:horolo+++_gist-compose-layout:0.6.18")
implementation("androidx.compose.material:material-icons-core:1.6.8")
implementation("androidx.compose.material:material-icons-extended:1.6.8")
implementation("com.google.android.horologist:horologist-compose-material:0.6.8")

BIN
wear/libs/opus.aar Normal file

Binary file not shown.

View File

@@ -1,28 +0,0 @@
package com.birdsounds.identify.presentation
import com.google.android.gms.wearable.MessageEvent
import com.google.android.gms.wearable.WearableListenerService
import java.nio.ByteBuffer
class MessageListenerService : WearableListenerService() {
private val tag = "MessageListenerService"
override fun onMessageReceived(p0: MessageEvent) {
super.onMessageReceived(p0)
val t_scored = ByteBuffer.wrap(p0.data).getLong()
var byte_strings: ByteArray = p0.data.copyOfRange(8, p0.data.size)
var score_species_string = byte_strings.decodeToString()
var list_strings: List<String> = score_species_string.split(';')
list_strings.map({
var split_str = it.split(',')
if (split_str.size == 2) {
var out = AScore(split_str[0], split_str[1].toFloat(), t_scored)
if (out.score > 0.05) {
SpeciesList.add_observation(out)
}
}
})
MessageSender.messageLog.add(t_scored)
}
}

View File

@@ -0,0 +1,140 @@
import android.content.ContentValues.TAG
import android.media.MediaCodec
import android.media.MediaCodecInfo
import android.media.MediaCodecList
import android.media.MediaFormat
import android.util.Log
/**
* Encodes PCM audio data to AAC format
* @param pcmData The raw PCM audio data to encode
* @param sampleRate Sample rate of the audio (e.g., 44100)
* @param channelCount Number of audio channels (1 for mono, 2 for stereo)
* @return ByteArray containing the encoded AAC audio data
*/
fun listMediaCodecEncoders() {
val codecList = MediaCodecList(MediaCodecList.ALL_CODECS) // List all codecs
val codecs = codecList.codecInfos
println("Available MediaCodec Encoders:")
for (codec in codecs) {
if (codec.isEncoder) {
Log.e(TAG, "Encoder: ${codec.name}")
// List supported types for this encoder
val supportedTypes = codec.supportedTypes
Log.w(TAG, " Supported Types:")
for (type in supportedTypes) {
Log.w(TAG, " $type")
}
}
}
}
fun encodePcmToAac(pcmData: ByteArray): ByteArray {
// Create a format for the encoder
var sampleRate = 48000
var channelCount = 1
// listMediaCodecEncoders();
val format = MediaFormat.createAudioFormat(MediaFormat.MIMETYPE_AUDIO_OPUS, sampleRate, channelCount)
format.setInteger(MediaFormat.KEY_BIT_RATE, 64000) // 128kbps
// format.setInteger(MediaFormat.KEY_AAC_PROFILE, MediaCodecInfo.CodecProfileLevel.AACObjectLC)
// format.setInteger(MediaFormat.KEY_MAX_INPUT_SIZE, pcmData.size)
// Create and configure the encoder
val codec = MediaCodec.createEncoderByType(MediaFormat.MIMETYPE_AUDIO_OPUS)
codec.configure(format, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE)
codec.start()
// Use ByteArrayOutputStream to collect encoded data
val encodedBytes = mutableListOf<Byte>()
val bufferInfo = MediaCodec.BufferInfo()
var allInputSubmitted = false
var inputOffset = 0
var presentationTimeUs = 0L
val frameSize = 1024 * channelCount * 2 // Typical frame size for AAC encoding (1024 samples, 16-bit PCM)
try {
while (!allInputSubmitted || bufferInfo.flags != MediaCodec.BUFFER_FLAG_END_OF_STREAM) {
// Submit input data to encoder
if (!allInputSubmitted) {
val inputBufferId = codec.dequeueInputBuffer(10000)
if (inputBufferId >= 0) {
val inputBuffer = codec.getInputBuffer(inputBufferId)
inputBuffer?.clear()
// Calculate how many bytes to read
val bytesToRead = if (inputOffset < pcmData.size) {
minOf(inputBuffer!!.capacity(), pcmData.size - inputOffset)
} else {
0
}
if (bytesToRead > 0) {
// Copy data from byte array to input buffer
inputBuffer!!.put(pcmData, inputOffset, bytesToRead)
inputOffset += bytesToRead
// Calculate presentation time in microseconds
// (samples / sample rate) * 1_000_000
val samples = bytesToRead / (2 * channelCount) // 16-bit samples
presentationTimeUs += samples * 1_000_000L / sampleRate
codec.queueInputBuffer(
inputBufferId,
0,
bytesToRead,
presentationTimeUs,
0
)
} else {
// End of input data
codec.queueInputBuffer(
inputBufferId,
0,
0,
presentationTimeUs,
MediaCodec.BUFFER_FLAG_END_OF_STREAM
)
allInputSubmitted = true
}
}
}
// Get encoded data from encoder
val outputBufferId = codec.dequeueOutputBuffer(bufferInfo, 10000)
when {
outputBufferId >= 0 -> {
val outputBuffer = codec.getOutputBuffer(outputBufferId)
if (outputBuffer != null && bufferInfo.size > 0) {
// Copy encoded data to our result buffer
val encodedChunk = ByteArray(bufferInfo.size)
outputBuffer.position(bufferInfo.offset)
outputBuffer.limit(bufferInfo.offset + bufferInfo.size)
outputBuffer.get(encodedChunk)
encodedBytes.addAll(encodedChunk.toList())
}
codec.releaseOutputBuffer(outputBufferId, false)
if ((bufferInfo.flags and MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
break
}
}
outputBufferId == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED -> {
// You might want to store the format for muxing
val newFormat = codec.outputFormat
// Log.d("MediaCodec", "Output format changed: $newFormat")
}
}
}
} finally {
codec.stop()
codec.release()
}
return encodedBytes.toByteArray()
}

View File

@@ -0,0 +1,27 @@
package com.birdsounds.identify.presentation
import com.google.android.gms.wearable.MessageEvent
import com.google.android.gms.wearable.WearableListenerService
class MessageListenerService : WearableListenerService() {
private val tag = "MessageListenerService"
override fun onMessageReceived(p0: MessageEvent) {
super.onMessageReceived(p0)
// val t_scored = ByteBuffer.wrap(p0.data).getLong()
// var byte_strings: ByteArray = p0.data.copyOfRange(8, p0.data.size)
// var score_species_string = byte_strings.decodeToString()
// var list_strings: List<String> = score_species_string.split(';')
// list_strings.map({
// var split_str = it.split(',')
// if (split_str.size == 2) {
// var out = AScore(split_str[0], split_str[1].toFloat(), t_scored)
// if (out.score > 0.05) {
// SpeciesList.add_observation(out)
// }
// }
// })
// MessageSender.messageLog.add(t_scored)
}
}

View File

@@ -10,24 +10,32 @@ import kotlinx.coroutines.CoroutineScope
import kotlinx.coroutines.Dispatchers
import kotlinx.coroutines.Job
import kotlinx.coroutines.launch
import java.io.ByteArrayOutputStream
import java.util.concurrent.ExecutionException
import java.util.zip.GZIPOutputStream
object MessageSender {
const val tag = "MessageSender"
private val job = Job()
private val coroutineScope = CoroutineScope(Dispatchers.IO + job)
var messageLog = ConcurrentMutableSet<Long>()
fun sendMessage(path: String, message: ByteArray, context: Context) {
coroutineScope.launch {
sendMessageInBackground(path, message, context)
}
}
fun compressByteArray(input: ByteArray): ByteArray {
val outputStream = ByteArrayOutputStream()
GZIPOutputStream(outputStream).use { gzip ->
gzip.write(input) // Compress the byte array
}
return outputStream.toByteArray() // Return the compressed data
}
private fun sendMessageInBackground(path: String, message: ByteArray, context: Context) {
//first get all the nodes, ie connected wearable devices.
val nodeListTask = Wearable.getNodeClient(context).connectedNodes
@@ -38,6 +46,19 @@ object MessageSender {
if(nodes.isEmpty()) {
Log.i(tag,"No Node found to send message")
}
// var compressed_message = audio_encoder.encodePCMToAAC(message)
// Log.w(tag, "Uncompressed message size "+message.size.toString())
// Log.w(tag, "Compressed message size "+compressed_message.size.toString())
//Now send the message to each device.
for (node in nodes) {
val sendMessageTask = Wearable.getMessageClient(context)

View File

@@ -1,5 +1,6 @@
package com.birdsounds.identify.presentation
import com.theeasiestway.opus.Constants
import com.theeasiestway.opus.Opus
import android.Manifest
import android.content.Context
@@ -8,6 +9,7 @@ import android.media.AudioRecord
import android.media.MediaRecorder
import android.util.Log
import androidx.annotation.RequiresPermission
import encodePcmToAac
import kotlinx.coroutines.suspendCancellableCoroutine
import java.nio.ByteBuffer
import java.time.Instant
@@ -21,6 +23,7 @@ class SoundRecorder(
outputFileName: String
) {
private val codec = Opus();
private var state = State.IDLE
private var context = context_in
@@ -63,41 +66,32 @@ class SoundRecorder(
val thread = Thread {
// var sent_first: Boolean = false
var ignore_warmup: Boolean = true
var num_chunked_since_last_send = 0
var last_tstamp: Long = Instant.now().toEpochMilli()
var do_send_message: Boolean = false
while (true) {
var last_tstamp: Long = Instant.now().toEpochMilli();
while (true) /**/{
if (Thread.interrupted()) {
// check for the interrupted flag, reset it, and throw exception
Log.w(TAG, "Finished thread")
break
}
chunk_index = chunk_index.mod(num_chunks)
if (chunk_index == 0) {
codec.encoderInit(48000, 1, Constants.Application.audio());
}
val out = audioRecord.read(
/* audioData = */ chunked_audio_bytes[chunk_index],
/* offsetInBytes = */ 0,
/* sizeInBytes = */ chunk_size,
/* readMode = */ AudioRecord.READ_BLOCKING
)
num_chunked_since_last_send += 1
do_send_message = false
if (num_chunked_since_last_send >= num_chunks) {
do_send_message = true
Log.w("MSG","sending message because full 3s have passed")
} else if ((last_tstamp in MessageSender.messageLog) && (num_chunked_since_last_send>4)) {
do_send_message = true
Log.w("MSG","Send message because the phone has finished")
} else if ((ignore_warmup) && (num_chunked_since_last_send > 2)) {
do_send_message = true
Log.w("MSG","Sent message because ignoring warmup")
}
chunk_index += 1
if ((do_send_message)) {
var tstamp: Long = Instant.now().toEpochMilli()
val tstamp_buffer = ByteBuffer.allocate(Long.SIZE_BYTES)
val tstamp_bytes = tstamp_buffer.putLong(tstamp).array()
@@ -111,9 +105,13 @@ class SoundRecorder(
byte_send += chunked_audio_bytes[c_index]
}
// do_send_message = false;
num_chunked_since_last_send = 0
// num_chunked_since_last_send = 0
// ignore_warmup = false;
MessageSender.messageLog.clear()
MessageSender.sendMessage("/audio", tstamp_bytes + byte_send, context)
val compressed = encodePcmToAac(byte_send)
Log.i(TAG,"Size pre-compression "+byte_send.size.toString())
Log.i(TAG,"Size post-compression "+compressed.size.toString())
MessageSender.sendMessage("/audio", compressed, context)
last_tstamp = tstamp
}

View File

@@ -61,8 +61,6 @@ object SpeciesList {
} else {
internal_list.add(species_in)
}
internal_list = internal_list.sortedBy({ (it.age()) }).toMutableList()