This commit is contained in:
isp
2024-09-10 19:25:20 -07:00
parent f159349cae
commit c04123ea84
24 changed files with 20588 additions and 63 deletions

View File

@@ -4,10 +4,10 @@
<selectionStates> <selectionStates>
<SelectionState runConfigName="wear"> <SelectionState runConfigName="wear">
<option name="selectionMode" value="DROPDOWN" /> <option name="selectionMode" value="DROPDOWN" />
<DropdownSelection timestamp="2024-09-04T15:12:51.439753961Z"> <DropdownSelection timestamp="2024-09-10T00:57:13.348042Z">
<Target type="DEFAULT_BOOT"> <Target type="DEFAULT_BOOT">
<handle> <handle>
<DeviceId pluginId="LocalEmulator" identifier="path=/home/isp/.config/.android/avd/Wear_OS_Large_Round_API_34.avd" /> <DeviceId pluginId="LocalEmulator" identifier="path=/Users/isp/.android/avd/Wear_OS_Large_Round_API_34.avd" />
</handle> </handle>
</Target> </Target>
</DropdownSelection> </DropdownSelection>
@@ -15,6 +15,14 @@
</SelectionState> </SelectionState>
<SelectionState runConfigName="mobile"> <SelectionState runConfigName="mobile">
<option name="selectionMode" value="DROPDOWN" /> <option name="selectionMode" value="DROPDOWN" />
<DropdownSelection timestamp="2024-09-10T01:00:38.270417Z">
<Target type="DEFAULT_BOOT">
<handle>
<DeviceId pluginId="LocalEmulator" identifier="path=/Users/isp/.android/avd/Wear_OS_Large_Round_API_34.avd" />
</handle>
</Target>
</DropdownSelection>
<DialogSelection />
</SelectionState> </SelectionState>
</selectionStates> </selectionStates>
</component> </component>

View File

@@ -37,6 +37,10 @@
<option name="composableFile" value="true" /> <option name="composableFile" value="true" />
<option name="previewFile" value="true" /> <option name="previewFile" value="true" />
</inspection_tool> </inspection_tool>
<inspection_tool class="PreviewDeviceShouldUseNewSpec" enabled="true" level="WEAK WARNING" enabled_by_default="true">
<option name="composableFile" value="true" />
<option name="previewFile" value="true" />
</inspection_tool>
<inspection_tool class="PreviewFontScaleMustBeGreaterThanZero" enabled="true" level="ERROR" enabled_by_default="true"> <inspection_tool class="PreviewFontScaleMustBeGreaterThanZero" enabled="true" level="ERROR" enabled_by_default="true">
<option name="composableFile" value="true" /> <option name="composableFile" value="true" />
<option name="previewFile" value="true" /> <option name="previewFile" value="true" />

1
.idea/misc.xml generated
View File

@@ -1,3 +1,4 @@
<?xml version="1.0" encoding="UTF-8"?>
<project version="4"> <project version="4">
<component name="ExternalStorageConfigurationManager" enabled="true" /> <component name="ExternalStorageConfigurationManager" enabled="true" />
<component name="ProjectRootManager" version="2" languageLevel="JDK_21" default="true" project-jdk-name="jbr-21" project-jdk-type="JavaSDK"> <component name="ProjectRootManager" version="2" languageLevel="JDK_21" default="true" project-jdk-name="jbr-21" project-jdk-type="JavaSDK">

View File

@@ -20,6 +20,8 @@ composeNavigation = "1.4.0-rc01"
media3Common = "1.4.0" media3Common = "1.4.0"
composeMaterial3 = "1.0.0-alpha23" composeMaterial3 = "1.0.0-alpha23"
workRuntimeKtx = "2.9.1" workRuntimeKtx = "2.9.1"
lifecycleRuntimeKtx = "2.6.1"
litert = "1.0.1"
[libraries] [libraries]
androidx-core-ktx = { group = "androidx.core", name = "core-ktx", version.ref = "coreKtx" } androidx-core-ktx = { group = "androidx.core", name = "core-ktx", version.ref = "coreKtx" }
@@ -47,6 +49,9 @@ androidx-compose-navigation = { group = "androidx.wear.compose", name = "compose
androidx-media3-common = { group = "androidx.media3", name = "media3-common", version.ref = "media3Common" } androidx-media3-common = { group = "androidx.media3", name = "media3-common", version.ref = "media3Common" }
androidx-compose-material3 = { group = "androidx.wear.compose", name = "compose-material3", version.ref = "composeMaterial3" } androidx-compose-material3 = { group = "androidx.wear.compose", name = "compose-material3", version.ref = "composeMaterial3" }
androidx-work-runtime-ktx = { group = "androidx.work", name = "work-runtime-ktx", version.ref = "workRuntimeKtx" } androidx-work-runtime-ktx = { group = "androidx.work", name = "work-runtime-ktx", version.ref = "workRuntimeKtx" }
androidx-lifecycle-runtime-ktx = { group = "androidx.lifecycle", name = "lifecycle-runtime-ktx", version.ref = "lifecycleRuntimeKtx" }
androidx-material3 = { group = "androidx.compose.material3", name = "material3" }
litert = { group = "com.google.ai.edge.litert", name = "litert", version.ref = "litert" }
[plugins] [plugins]
android-application = { id = "com.android.application", version.ref = "agp" } android-application = { id = "com.android.application", version.ref = "agp" }

View File

@@ -1,6 +1,7 @@
plugins { plugins {
alias(libs.plugins.android.application) alias(libs.plugins.android.application)
alias(libs.plugins.kotlin.android) alias(libs.plugins.kotlin.android)
alias(libs.plugins.kotlin.compose)
} }
android { android {
@@ -33,6 +34,9 @@ android {
kotlinOptions { kotlinOptions {
jvmTarget = "1.8" jvmTarget = "1.8"
} }
buildFeatures {
compose = true
}
} }
dependencies { dependencies {
@@ -42,8 +46,22 @@ dependencies {
implementation(libs.material) implementation(libs.material)
implementation(libs.androidx.activity) implementation(libs.androidx.activity)
implementation(libs.androidx.constraintlayout) implementation(libs.androidx.constraintlayout)
implementation(libs.androidx.work.runtime.ktx)
implementation(libs.androidx.lifecycle.runtime.ktx)
implementation(libs.androidx.activity.compose)
implementation(platform(libs.androidx.compose.bom))
implementation(libs.androidx.ui)
implementation("uk.me.berndporr:iirj:1.7")
implementation(libs.androidx.ui.graphics)
implementation(libs.androidx.ui.tooling.preview)
implementation(libs.androidx.material3)
implementation(libs.litert)
testImplementation(libs.junit) testImplementation(libs.junit)
androidTestImplementation(libs.androidx.junit) androidTestImplementation(libs.androidx.junit)
androidTestImplementation(libs.androidx.espresso.core) androidTestImplementation(libs.androidx.espresso.core)
androidTestImplementation(platform(libs.androidx.compose.bom))
androidTestImplementation(libs.androidx.ui.test.junit4)
debugImplementation(libs.androidx.ui.tooling)
debugImplementation(libs.androidx.ui.test.manifest)
wearApp(project(":wear")) wearApp(project(":wear"))
} }

View File

@@ -1,6 +1,10 @@
<?xml version="1.0" encoding="utf-8"?> <?xml version="1.0" encoding="utf-8"?>
<manifest xmlns:android="http://schemas.android.com/apk/res/android" <manifest xmlns:android="http://schemas.android.com/apk/res/android"
xmlns:tools="http://schemas.android.com/tools"> xmlns:tools="http://schemas.android.com/tools" >
<uses-permission android:name="android.permission.INTERNET"></uses-permission>
<uses-permission android:name="android.permission.ACCESS_COARSE_LOCATION"></uses-permission>
<uses-permission android:name="android.permission.ACCESS_FINE_LOCATION"></uses-permission>
<application <application
android:allowBackup="true" android:allowBackup="true"
@@ -11,16 +15,31 @@
android:roundIcon="@mipmap/ic_launcher_round" android:roundIcon="@mipmap/ic_launcher_round"
android:supportsRtl="true" android:supportsRtl="true"
android:theme="@style/Theme.Identify" android:theme="@style/Theme.Identify"
tools:targetApi="31"> tools:targetApi="31" >
<activity <activity
android:name=".MainActivity" android:name=".MainActivity"
android:exported="true"> android:exported="true" >
<intent-filter> <intent-filter>
<action android:name="android.intent.action.MAIN" /> <action android:name="android.intent.action.MAIN" />
<category android:name="android.intent.category.LAUNCHER" /> <category android:name="android.intent.category.LAUNCHER" />
</intent-filter> </intent-filter>
</activity> </activity>
<service
android:name=".MessageListenerService"
android:enabled="true"
android:exported="true" >
<intent-filter>
<action android:name="com.google.android.gms.wearable.MESSAGE_RECEIVED" />
<data
android:host="*"
android:pathPrefix="/audio"
android:scheme="wear" />
</intent-filter>
</service>
</application> </application>
</manifest> </manifest>

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,189 @@
package com.birdsounds.identify;
import android.app.Activity;
import android.content.Context;
import android.util.Log;
import android.view.View;
import android.widget.Toast;
import java.io.BufferedInputStream;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.math.BigInteger;
import java.net.URL;
import java.net.URLConnection;
import java.nio.file.Files;
import java.nio.file.Paths;
import java.security.MessageDigest;
import java.security.NoSuchAlgorithmException;
@SuppressWarnings("ResultOfMethodCallIgnored")
public class Downloader {
static final String modelFILE = "model.tflite";
static final String metaModelFILE = "metaModel.tflite";
static final String modelURL = "https://raw.githubusercontent.com/woheller69/whoBIRD-TFlite/master/BirdNET_GLOBAL_6K_V2.4_Model_FP16.tflite";
static final String model32URL = "https://raw.githubusercontent.com/woheller69/whoBIRD-TFlite/master/BirdNET_GLOBAL_6K_V2.4_Model_FP32.tflite";
static final String metaModelURL = "https://raw.githubusercontent.com/woheller69/whoBIRD-TFlite/master/BirdNET_GLOBAL_6K_V2.4_MData_Model_FP16.tflite";
static final String modelMD5 = "b1c981fe261910b473b9b7eec9ebcd4e";
static final String model32MD5 = "6c7c42106e56550fc8563adb31bc120e";
static final String metaModelMD5 ="f1a078ae0f244a1ff5a8f1ccb645c805";
public static boolean checkModels(final Activity activity) {
File modelFile = new File(activity.getDir("filesdir", Context.MODE_PRIVATE) + "/" + modelFILE);
File metaModelFile = new File(activity.getDir("filesdir", Context.MODE_PRIVATE) + "/" + metaModelFILE);
String calcModelMD5 = "";
String calcMetaModelMD5 = "";
if (modelFile.exists()) {
try {
byte[] data = Files.readAllBytes(Paths.get(modelFile.getPath()));
byte[] hash = MessageDigest.getInstance("MD5").digest(data);
calcModelMD5 = new BigInteger(1, hash).toString(16);
} catch (IOException | NoSuchAlgorithmException e) {
throw new RuntimeException(e);
}
}
if (metaModelFile.exists()) {
try {
byte[] data = Files.readAllBytes(Paths.get(metaModelFile.getPath()));
byte[] hash = MessageDigest.getInstance("MD5").digest(data);
calcMetaModelMD5 = new BigInteger(1, hash).toString(16);
} catch (IOException | NoSuchAlgorithmException e) {
throw new RuntimeException(e);
}
}
if (modelFile.exists() && !(calcModelMD5.equals(modelMD5) || calcModelMD5.equals(model32MD5))) modelFile.delete();
if (metaModelFile.exists() && !calcMetaModelMD5.equals(metaModelMD5)) metaModelFile.delete();
return (calcModelMD5.equals(modelMD5) || calcModelMD5.equals(model32MD5)) && calcMetaModelMD5.equals(metaModelMD5);
}
public static void downloadModels(final Activity activity) {
File modelFile = new File(activity.getDir("filesdir", Context.MODE_PRIVATE) + "/" + modelFILE);
Log.d("Heyy","Model file checking");
if (!modelFile.exists()) {
Log.d("whoBIRD", "model file does not exist");
Thread thread = new Thread(() -> {
try {
URL url;
if (false) url = new URL(model32URL);
else url = new URL(modelURL);
Log.d("whoBIRD", "Download model");
URLConnection ucon = url.openConnection();
ucon.setReadTimeout(5000);
ucon.setConnectTimeout(10000);
InputStream is = ucon.getInputStream();
BufferedInputStream inStream = new BufferedInputStream(is, 1024 * 5);
modelFile.createNewFile();
FileOutputStream outStream = new FileOutputStream(modelFile);
byte[] buff = new byte[5 * 1024];
int len;
while ((len = inStream.read(buff)) != -1) {
outStream.write(buff, 0, len);
}
outStream.flush();
outStream.close();
inStream.close();
String calcModelMD5="";
if (modelFile.exists()) {
byte[] data = Files.readAllBytes(Paths.get(modelFile.getPath()));
byte[] hash = MessageDigest.getInstance("MD5").digest(data);
calcModelMD5 = new BigInteger(1, hash).toString(16);
} else {
throw new IOException(); //throw exception if there is no modelFile at this point
}
if (!(calcModelMD5.equals(modelMD5) || calcModelMD5.equals(model32MD5) )){
modelFile.delete();
activity.runOnUiThread(() -> {
Toast.makeText(activity, activity.getResources().getString(R.string.error_download), Toast.LENGTH_SHORT).show();
});
} else {
activity.runOnUiThread(() -> {
});
}
} catch (NoSuchAlgorithmException | IOException i) {
activity.runOnUiThread(() -> Toast.makeText(activity, activity.getResources().getString(R.string.error_download), Toast.LENGTH_SHORT).show());
modelFile.delete();
Log.w("whoBIRD", activity.getResources().getString(R.string.error_download), i);
}
});
thread.start();
} else {
Log.d("whoBIRD","model exists");
activity.runOnUiThread(() -> {
});
}
File metaModelFile = new File(activity.getDir("filesdir", Context.MODE_PRIVATE) + "/" + metaModelFILE);
if (!metaModelFile.exists()) {
Log.d("whoBIRD", "meta model file does not exist");
Thread thread = new Thread(() -> {
try {
URL url = new URL(metaModelURL);
Log.d("whoBIRD", "Download meta model");
URLConnection ucon = url.openConnection();
ucon.setReadTimeout(5000);
ucon.setConnectTimeout(10000);
InputStream is = ucon.getInputStream();
BufferedInputStream inStream = new BufferedInputStream(is, 1024 * 5);
metaModelFile.createNewFile();
FileOutputStream outStream = new FileOutputStream(metaModelFile);
byte[] buff = new byte[5 * 1024];
int len;
while ((len = inStream.read(buff)) != -1) {
outStream.write(buff, 0, len);
}
outStream.flush();
outStream.close();
inStream.close();
String calcMetaModelMD5="";
if (metaModelFile.exists()) {
byte[] data = Files.readAllBytes(Paths.get(metaModelFile.getPath()));
byte[] hash = MessageDigest.getInstance("MD5").digest(data);
calcMetaModelMD5 = new BigInteger(1, hash).toString(16);
} else {
throw new IOException(); //throw exception if there is no modelFile at this point
}
if (!calcMetaModelMD5.equals(metaModelMD5)){
metaModelFile.delete();
activity.runOnUiThread(() -> {
Toast.makeText(activity, activity.getResources().getString(R.string.error_download), Toast.LENGTH_SHORT).show();
});
} else {
activity.runOnUiThread(() -> {
});
}
} catch (NoSuchAlgorithmException | IOException i) {
activity.runOnUiThread(() -> Toast.makeText(activity, activity.getResources().getString(R.string.error_download), Toast.LENGTH_SHORT).show());
metaModelFile.delete();
Log.w("whoBIRD", activity.getResources().getString(R.string.error_download), i);
}
});
thread.start();
} else {
Log.d("whoBIRD", "meta file exists");
activity.runOnUiThread(() -> {
});
}
}
}

View File

@@ -0,0 +1,59 @@
package com.birdsounds.identify;
import android.Manifest;
import android.content.Context;
import android.content.pm.PackageManager;
import android.location.LocationListener;
import android.location.LocationManager;
import android.os.Bundle;
import android.widget.Toast;
import androidx.core.app.ActivityCompat;
public class Location {
private static LocationListener locationListenerGPS;
static void stopLocation(Context context){
LocationManager locationManager = (LocationManager) context.getSystemService(Context.LOCATION_SERVICE);
if (locationListenerGPS!=null) locationManager.removeUpdates(locationListenerGPS);
locationListenerGPS=null;
}
static void requestLocation(Context context, SoundClassifier soundClassifier) {
if (ActivityCompat.checkSelfPermission(context, Manifest.permission.ACCESS_COARSE_LOCATION) == PackageManager.PERMISSION_GRANTED && checkLocationProvider(context)) {
LocationManager locationManager = (LocationManager) context.getSystemService(Context.LOCATION_SERVICE);
if (locationListenerGPS==null) locationListenerGPS = new LocationListener() {
@Override
public void onLocationChanged(android.location.Location location) {
soundClassifier.runMetaInterpreter(location);
}
@Deprecated
@Override
public void onStatusChanged(String provider, int status, Bundle extras) {
}
@Override
public void onProviderEnabled(String provider) {
}
@Override
public void onProviderDisabled(String provider) {
}
};
locationManager.requestLocationUpdates(LocationManager.GPS_PROVIDER, 60000, 0, locationListenerGPS);
}
}
public static boolean checkLocationProvider(Context context) {
LocationManager locationManager = (LocationManager) context.getSystemService(Context.LOCATION_SERVICE);
if (!locationManager.isProviderEnabled(LocationManager.GPS_PROVIDER)){
Toast.makeText(context, "Error no GPS", Toast.LENGTH_SHORT).show();
return false;
} else {
return true;
}
}
}

View File

@@ -1,20 +1,57 @@
package com.birdsounds.identify package com.birdsounds.identify
import android.content.pm.PackageManager
import android.os.Bundle import android.os.Bundle
import android.Manifest
import android.util.Log
import androidx.activity.enableEdgeToEdge import androidx.activity.enableEdgeToEdge
import androidx.appcompat.app.AppCompatActivity import androidx.appcompat.app.AppCompatActivity
import androidx.core.content.ContextCompat
import androidx.core.view.ViewCompat import androidx.core.view.ViewCompat
import androidx.core.view.WindowInsetsCompat import androidx.core.view.WindowInsetsCompat
import com.google.android.gms.wearable.ChannelClient
import com.google.android.gms.wearable.Wearable
class MainActivity : AppCompatActivity() { class MainActivity : AppCompatActivity() {
private lateinit var soundClassifier: SoundClassifier
override fun onCreate(savedInstanceState: Bundle?) { override fun onCreate(savedInstanceState: Bundle?) {
super.onCreate(savedInstanceState) super.onCreate(savedInstanceState)
enableEdgeToEdge() enableEdgeToEdge()
setContentView(R.layout.activity_main) setContentView(R.layout.activity_main)
Wearable.getChannelClient(this.applicationContext)
.registerChannelCallback(object : ChannelClient.ChannelCallback() {
override fun onChannelOpened(channel: ChannelClient.Channel) {
super.onChannelOpened(channel)
Log.d("HEY", "onChannelOpened")
}
}
)
Downloader.downloadModels(this);
requestPermissions();
soundClassifier = SoundClassifier(this, SoundClassifier.Options())
Location.requestLocation(this, soundClassifier)
ViewCompat.setOnApplyWindowInsetsListener(findViewById(R.id.main)) { v, insets -> ViewCompat.setOnApplyWindowInsetsListener(findViewById(R.id.main)) { v, insets ->
val systemBars = insets.getInsets(WindowInsetsCompat.Type.systemBars()) val systemBars = insets.getInsets(WindowInsetsCompat.Type.systemBars())
v.setPadding(systemBars.left, systemBars.top, systemBars.right, systemBars.bottom) v.setPadding(systemBars.left, systemBars.top, systemBars.right, systemBars.bottom)
insets insets
} }
} }
companion object {
const val REQUEST_PERMISSIONS = 1337
}
private fun requestPermissions() {
val perms = mutableListOf<String>()
if (ContextCompat.checkSelfPermission(
this,
Manifest.permission.ACCESS_COARSE_LOCATION
) != PackageManager.PERMISSION_GRANTED
) {
perms.add(Manifest.permission.ACCESS_COARSE_LOCATION)
perms.add(Manifest.permission.ACCESS_FINE_LOCATION)
}
if (!perms.isEmpty()) requestPermissions(perms.toTypedArray(), REQUEST_PERMISSIONS)
}
} }

View File

@@ -0,0 +1,8 @@
package com.birdsounds.identify
import android.content.Intent
object MessageConstants {
const val intentName = "WearableMessageDisplay"
const val message = "Message"
const val path = "audio"
}

View File

@@ -0,0 +1,20 @@
package com.birdsounds.identify
import android.content.Intent
import android.util.Log
import androidx.localbroadcastmanager.content.LocalBroadcastManager
import com.google.android.gms.wearable.MessageEvent
import com.google.android.gms.wearable.WearableListenerService
class MessageListenerService : WearableListenerService() {
private val tag = "MessageListenerService"
override fun onMessageReceived(p0: MessageEvent) {
super.onMessageReceived(p0)
// Log.i(tag ,p0.data.toString(Charsets.UTF_8))
// Log.i(tag, p0.data.toString(Charsets.US_ASCII))
// broadcastMessage(p0)
}
}

View File

@@ -0,0 +1,388 @@
package com.birdsounds.identify
import android.content.Context
import android.location.Location
import android.os.SystemClock
import android.preference.PreferenceManager
import android.util.Log
import org.tensorflow.lite.Interpreter
import java.io.BufferedReader
import java.io.File
import java.io.IOException
import java.io.InputStreamReader
import java.nio.ByteBuffer
import java.nio.FloatBuffer
import java.nio.channels.FileChannel
import java.nio.file.StandardOpenOption
import java.time.LocalDate
import java.util.Locale
import java.util.TimerTask
import java.util.Timer
import kotlin.concurrent.scheduleAtFixedRate
import kotlin.math.ceil
import kotlin.math.cos
import uk.me.berndporr.iirj.Butterworth
import kotlin.math.round
import kotlin.math.sin
class SoundClassifier(
context: Context,
private val options: Options = Options()
) {
internal var mContext: Context
val TAG = "Sound Classifier"
init {
this.mContext = context.applicationContext
}
class Options(
/** Path of the converted model label file, relative to the assets/ directory. */
val labelsBase: String = "labels",
/** Path of the converted .tflite file, relative to the assets/ directory. */
val assetFile: String = "assets.txt",
/** Path of the converted .tflite file, relative to the assets/ directory. */
val modelPath: String = "model.tflite",
/** Path of the meta model .tflite file, relative to the assets/ directory. */
val metaModelPath: String = "metaModel.tflite",
/** The required audio sample rate in Hz. */
val sampleRate: Int = 48000,
/** Multiplier for audio samples */
val warmupRuns: Int = 3,
/** Probability value above which a class in the meta model is labeled as active (i.e., detected) the display. (default 0.01) */
var metaProbabilityThreshold1: Float = 0.01f, //min must be > 0
var metaProbabilityThreshold2: Float = 0.008f, //min must be > 0
var metaProbabilityThreshold3: Float = 0.001f, //min must be > 0
/** Probability value above which a class is shown as image. (default 0.5) */
var displayImageThreshold: Float = 0.65f, //min must be > 0
)
/** Names of the model's output classes. */
lateinit var labelList: List<String>
/** Names of the model's output classes. */
lateinit var assetList: List<String>
/** How many milliseconds between consecutive model inference calls. */
private var inferenceInterval = 800L
/** The TFLite interpreter instance. */
private lateinit var interpreter: Interpreter
private lateinit var meta_interpreter: Interpreter
/** Audio length (in # of PCM samples) required by the TFLite model. */
private var modelInputLength = 0
/** input Length of the meta model */
private var metaModelInputLength = 0
/** Number of output classes of the TFLite model. */
private var modelNumClasses = 0
private var metaModelNumClasses = 0
/** Used to hold the real-time probabilities predicted by the model for the output classes. */
private lateinit var predictionProbs: FloatArray
private lateinit var metaPredictionProbs: FloatArray
/** Latest prediction latency in milliseconds. */
private var latestPredictionLatencyMs = 0f
private var recognitionTask: TimerTask? = null
/** Buffer that holds audio PCM sample that are fed to the TFLite model for inference. */
private lateinit var inputBuffer: FloatBuffer
private lateinit var metaInputBuffer: FloatBuffer
init {
loadLabels(context)
loadAssetList(context)
setupInterpreter(context)
setupMetaInterpreter(context)
warmUpModel()
}
/** Retrieve asset list from "asset_list" file */
private fun loadAssetList(context: Context) {
try {
val reader =
BufferedReader(InputStreamReader(context.assets.open(options.assetFile)))
val wordList = mutableListOf<String>()
reader.useLines { lines ->
lines.forEach {
wordList.add(it.trim())
}
}
assetList = wordList.map { it }
} catch (e: IOException) {
Log.e(TAG, "Failed to read labels ${options.assetFile}: ${e.message}")
}
}
/** Retrieve labels from "labels.txt" file */
private fun loadLabels(context: Context) {
val localeList = context.resources.configuration.locales
val language = localeList.get(0).language
var filename = options.labelsBase + "_${language}.txt"
//Check if file exists
val assetManager = context.assets // Replace 'assets' with actual AssetManager instance
try {
val mapList = assetManager.list("")?.toMutableList()
if (mapList != null) {
if (!mapList.contains(filename)) {
filename = options.labelsBase + "_en.txt"
}
}
} catch (ex: IOException) {
ex.printStackTrace()
filename = options.labelsBase + "_en.txt"
}
Log.i(TAG, filename)
try {
val reader =
BufferedReader(InputStreamReader(context.assets.open(filename)))
val wordList = mutableListOf<String>()
reader.useLines { lines ->
lines.forEach {
wordList.add(it)
}
}
labelList = wordList.map { it.toTitleCase() }
Log.i(TAG, "Label list entries: ${labelList.size}")
} catch (e: IOException) {
Log.e(TAG, "Failed to read labels ${filename}: ${e.message}")
}
}
private fun setupInterpreter(context: Context) {
try {
val modelFilePath = context.getDir("filesdir", Context.MODE_PRIVATE).absolutePath + "/"+ options.modelPath
Log.i(TAG, "Trying to create TFLite buffer from $modelFilePath")
val modelFile = File(modelFilePath)
val tfliteBuffer: ByteBuffer = FileChannel.open(modelFile.toPath(), StandardOpenOption.READ).use { channel ->
channel.map(FileChannel.MapMode.READ_ONLY, 0, channel.size())
}
Log.i(TAG, "Done creating TFLite buffer from $modelFilePath")
interpreter = Interpreter(tfliteBuffer, Interpreter.Options())
} catch (e: IOException) {
Log.e(TAG, "Failed to load TFLite model - ${e.message}")
return
}
// Inspect input and output specs.
val inputShape = interpreter.getInputTensor(0).shape()
Log.i(TAG, "TFLite model input shape: ${inputShape.contentToString()}")
modelInputLength = inputShape[1]
val outputShape = interpreter.getOutputTensor(0).shape()
Log.i(TAG, "TFLite output shape: ${outputShape.contentToString()}")
modelNumClasses = outputShape[1]
if (modelNumClasses != labelList.size) {
Log.e(
TAG,
"Mismatch between metadata number of classes (${labelList.size})" +
" and model output length ($modelNumClasses)"
)
}
// Fill the array with NaNs initially.
predictionProbs = FloatArray(modelNumClasses) { Float.NaN }
inputBuffer = FloatBuffer.allocate(modelInputLength)
}
private fun setupMetaInterpreter(context: Context) {
try {
val metaModelFilePath = context.getDir("filesdir", Context.MODE_PRIVATE).absolutePath + "/"+ options.metaModelPath
Log.i(TAG, "Trying to create TFLite buffer from $metaModelFilePath")
val metaModelFile = File(metaModelFilePath)
val tfliteBuffer: ByteBuffer = FileChannel.open(metaModelFile.toPath(), StandardOpenOption.READ).use { channel ->
channel.map(FileChannel.MapMode.READ_ONLY, 0, channel.size())
}
Log.i(TAG, "Done creating TFLite buffer from $metaModelFilePath")
meta_interpreter = Interpreter(tfliteBuffer, Interpreter.Options())
} catch (e: IOException) {
Log.e(TAG, "Failed to load TFLite model - ${e.message}")
return
}
// Inspect input and output specs.
val metaInputShape = meta_interpreter.getInputTensor(0).shape()
Log.i(TAG, "TFLite meta model input shape: ${metaInputShape.contentToString()}")
metaModelInputLength = metaInputShape[1]
val metaOutputShape = meta_interpreter.getOutputTensor(0).shape()
Log.i(TAG, "TFLite meta model output shape: ${metaOutputShape.contentToString()}")
metaModelNumClasses = metaOutputShape[1]
if (metaModelNumClasses != labelList.size) {
Log.e(
TAG,
"Mismatch between metadata number of classes (${labelList.size})" +
" and meta model output length ($metaModelNumClasses)"
)
}
// Fill the array with 1 initially.
metaPredictionProbs = FloatArray(metaModelNumClasses) { 1f }
metaInputBuffer = FloatBuffer.allocate(metaModelInputLength)
}
fun runMetaInterpreter(location: Location) {
val dayOfYear = LocalDate.now().dayOfYear
val week = ceil( dayOfYear*48.0/366.0) //model year has 48 weeks
lat = location.latitude.toFloat()
lon = location.longitude.toFloat()
val weekMeta = cos(Math.toRadians(week * 7.5)) + 1.0
metaInputBuffer.put(0, lat)
metaInputBuffer.put(1, lon)
metaInputBuffer.put(2, weekMeta.toFloat())
metaInputBuffer.rewind() // Reset position to beginning of buffer
val metaOutputBuffer = FloatBuffer.allocate(metaModelNumClasses)
metaOutputBuffer.rewind()
meta_interpreter.run(metaInputBuffer, metaOutputBuffer)
metaOutputBuffer.rewind()
metaOutputBuffer.get(metaPredictionProbs) // Copy data to metaPredictionProbs.
for (i in metaPredictionProbs.indices) {
metaPredictionProbs[i] =
if (metaPredictionProbs[i] >= options.metaProbabilityThreshold1) {
1f
} else if (metaPredictionProbs[i] >= options.metaProbabilityThreshold2) {
0.8f
} else if (metaPredictionProbs[i] >= options.metaProbabilityThreshold3) {
0.5f
} else {
0f
}
}
}
private fun warmUpModel() {
generateDummyAudioInput(inputBuffer)
for (n in 0 until options.warmupRuns) {
// Create input and output buffers.
val outputBuffer = FloatBuffer.allocate(modelNumClasses)
inputBuffer.rewind()
outputBuffer.rewind()
interpreter.run(inputBuffer, outputBuffer)
}
}
private fun generateDummyAudioInput(inputBuffer: FloatBuffer) {
val twoPiTimesFreq = 2 * Math.PI.toFloat() * 1000f
for (i in 0 until modelInputLength) {
val x = i.toFloat() / (modelInputLength - 1)
inputBuffer.put(i, sin(twoPiTimesFreq * x.toDouble()).toFloat())
}
}
private fun String.toTitleCase() =
splitToSequence("_")
.map { it.replaceFirstChar { if (it.isLowerCase()) it.titlecase(Locale.ROOT) else it.toString() } }
.joinToString("_")
.trim()
companion object {
private const val TAG = "SoundClassifier"
var lat: Float = 0.0f
var lon: Float = 0.0f
/** Number of nanoseconds in a millisecond */
private const val NANOS_IN_MILLIS = 1_000_000.toDouble()
}
private fun startRecognition() {
if (modelInputLength <= 0 || modelNumClasses <= 0) {
Log.e(TAG, "Switches: Cannot start recognition because model is unavailable.")
return
}
val sharedPref = PreferenceManager.getDefaultSharedPreferences(mContext)
val highPass = sharedPref.getInt("high_pass",0)
val butterworth = Butterworth()
butterworth.highPass(6, 48000.0, highPass.toDouble())
val circularBuffer = ShortArray(modelInputLength)
var j = 0 // Indices for the circular buffer next write
Log.w(TAG, "recognitionPeriod:"+inferenceInterval)
recognitionTask = Timer().scheduleAtFixedRate(inferenceInterval, inferenceInterval) task@{
val outputBuffer = FloatBuffer.allocate(modelNumClasses)
val recordingBuffer = ShortArray(modelInputLength)
// Load new audio samples
// val sampleCounts = loadAudio(recordingBuffer)
val sampleCounts = 0;
if (sampleCounts == 0) {
return@task
}
// Copy new data into the circular buffer
for (i in 0 until sampleCounts) {
circularBuffer[j] = recordingBuffer[i]
j = (j + 1) % circularBuffer.size
}
// Feed data to the input buffer.
var samplesAreAllZero = true
for (i in 0 until modelInputLength) {
val s = circularBuffer[(i + j) % modelInputLength]
if (samplesAreAllZero && s.toInt() != 0) {
samplesAreAllZero = false
}
if (highPass==0) inputBuffer.put(i, s.toFloat())
else inputBuffer.put(i, butterworth.filter(s.toDouble()).toFloat())
}
if (samplesAreAllZero) {
Log.w(TAG, mContext.resources.getString(R.string.samples_zero))
return@task
}
val t0 = SystemClock.elapsedRealtimeNanos()
inputBuffer.rewind()
outputBuffer.rewind()
interpreter.run(inputBuffer, outputBuffer)
outputBuffer.rewind()
outputBuffer.get(predictionProbs) // Copy data to predictionProbs.
val probList = mutableListOf<Float>()
// if (mBinding.checkIgnoreMeta.isChecked){
// for (value in predictionProbs) {
// probList.add(1 / (1 + kotlin.math.exp(-value))) //apply sigmoid
// }
// } else {
for (i in predictionProbs.indices) {
probList.add( metaPredictionProbs[i] / (1+kotlin.math.exp(-predictionProbs[i])) ) //apply sigmoid
}
// }
// if (mBinding.progressHorizontal.isIndeterminate){ //if start/stop button set to "running"
// probList.withIndex().also {
// val max = it.maxByOrNull { entry -> entry.value }
// updateTextView(max, mBinding.text1)
// updateImage(max)
//after finding the maximum probability and its corresponding label (max), we filter out that entry from the list of entries before finding the second highest probability (secondMax)
// val secondMax = it.filterNot { entry -> entry == max }.maxByOrNull { entry -> entry.value }
// updateTextView(secondMax,mBinding.text2)
// }
// }
latestPredictionLatencyMs =
((SystemClock.elapsedRealtimeNanos() - t0) / 1e6).toFloat()
}
}
}

View File

@@ -0,0 +1,11 @@
package com.birdsounds.identify.ui.theme
import androidx.compose.ui.graphics.Color
val Purple80 = Color(0xFFD0BCFF)
val PurpleGrey80 = Color(0xFFCCC2DC)
val Pink80 = Color(0xFFEFB8C8)
val Purple40 = Color(0xFF6650a4)
val PurpleGrey40 = Color(0xFF625b71)
val Pink40 = Color(0xFF7D5260)

View File

@@ -0,0 +1,57 @@
package com.birdsounds.identify.ui.theme
import android.app.Activity
import android.os.Build
import androidx.compose.foundation.isSystemInDarkTheme
import androidx.compose.material3.MaterialTheme
import androidx.compose.material3.darkColorScheme
import androidx.compose.material3.dynamicDarkColorScheme
import androidx.compose.material3.dynamicLightColorScheme
import androidx.compose.material3.lightColorScheme
import androidx.compose.runtime.Composable
import androidx.compose.ui.platform.LocalContext
private val DarkColorScheme = darkColorScheme(
primary = Purple80,
secondary = PurpleGrey80,
tertiary = Pink80
)
private val LightColorScheme = lightColorScheme(
primary = Purple40,
secondary = PurpleGrey40,
tertiary = Pink40
/* Other default colors to override
background = Color(0xFFFFFBFE),
surface = Color(0xFFFFFBFE),
onPrimary = Color.White,
onSecondary = Color.White,
onTertiary = Color.White,
onBackground = Color(0xFF1C1B1F),
onSurface = Color(0xFF1C1B1F),
*/
)
@Composable
fun IdentifyTheme(
darkTheme: Boolean = isSystemInDarkTheme(),
// Dynamic color is available on Android 12+
dynamicColor: Boolean = true,
content: @Composable () -> Unit
) {
val colorScheme = when {
dynamicColor && Build.VERSION.SDK_INT >= Build.VERSION_CODES.S -> {
val context = LocalContext.current
if (darkTheme) dynamicDarkColorScheme(context) else dynamicLightColorScheme(context)
}
darkTheme -> DarkColorScheme
else -> LightColorScheme
}
MaterialTheme(
colorScheme = colorScheme,
typography = Typography,
content = content
)
}

View File

@@ -0,0 +1,34 @@
package com.birdsounds.identify.ui.theme
import androidx.compose.material3.Typography
import androidx.compose.ui.text.TextStyle
import androidx.compose.ui.text.font.FontFamily
import androidx.compose.ui.text.font.FontWeight
import androidx.compose.ui.unit.sp
// Set of Material typography styles to start with
val Typography = Typography(
bodyLarge = TextStyle(
fontFamily = FontFamily.Default,
fontWeight = FontWeight.Normal,
fontSize = 16.sp,
lineHeight = 24.sp,
letterSpacing = 0.5.sp
)
/* Other default text styles to override
titleLarge = TextStyle(
fontFamily = FontFamily.Default,
fontWeight = FontWeight.Normal,
fontSize = 22.sp,
lineHeight = 28.sp,
letterSpacing = 0.sp
),
labelSmall = TextStyle(
fontFamily = FontFamily.Default,
fontWeight = FontWeight.Medium,
fontSize = 11.sp,
lineHeight = 16.sp,
letterSpacing = 0.5.sp
)
*/
)

View File

@@ -1,3 +1,6 @@
<resources> <resources>
<string name="app_name">identify</string> <string name="app_name">identify</string>
<string name="title_activity_download">DownloadActivity</string>
<string name="error_download">Download Error</string>
<string name="samples_zero">Samples are all zero</string>
</resources> </resources>

View File

@@ -47,8 +47,12 @@ val Any.TAG: String
class MainActivity : ComponentActivity() { class MainActivity : ComponentActivity() {
override fun onCreate(savedInstanceState: Bundle?) { override fun onCreate(savedInstanceState: Bundle?) {
installSplashScreen() installSplashScreen()
super.onCreate(savedInstanceState) super.onCreate(savedInstanceState)
setTheme(android.R.style.Theme_DeviceDefault) setTheme(android.R.style.Theme_DeviceDefault)
setContent { setContent {
@@ -111,6 +115,7 @@ fun WearApp() {
SwipeDismissableNavHost(navController = navController, startDestination = "speaker") { SwipeDismissableNavHost(navController = navController, startDestination = "speaker") {
composable("speaker") { composable("speaker") {
StartRecordingScreen( StartRecordingScreen(
context = context,
appState = mainState.appState, appState = mainState.appState,
isPermissionDenied = mainState.isPermissionDenied, isPermissionDenied = mainState.isPermissionDenied,
onMicClicked = { onMicClicked = {

View File

@@ -2,6 +2,7 @@ package com.birdsounds.identify.presentation
import android.Manifest import android.Manifest
import android.app.Activity import android.app.Activity
import android.content.Context
import android.content.pm.PackageManager import android.content.pm.PackageManager
import android.util.Log import android.util.Log
import androidx.annotation.RequiresPermission import androidx.annotation.RequiresPermission
@@ -9,13 +10,24 @@ import androidx.compose.foundation.MutatorMutex
import androidx.compose.runtime.getValue import androidx.compose.runtime.getValue
import androidx.compose.runtime.mutableStateOf import androidx.compose.runtime.mutableStateOf
import androidx.compose.runtime.setValue import androidx.compose.runtime.setValue
import androidx.compose.ui.platform.LocalContext
import androidx.core.content.ContextCompat import androidx.core.content.ContextCompat
import com.google.android.gms.tasks.Tasks
import com.google.android.gms.wearable.ChannelClient
import com.google.android.gms.wearable.ChannelClient.ChannelCallback
import com.google.android.gms.wearable.Wearable
import kotlinx.coroutines.Dispatchers.IO
import kotlinx.coroutines.coroutineScope import kotlinx.coroutines.coroutineScope
import kotlinx.coroutines.delay import kotlinx.coroutines.delay
import kotlinx.coroutines.flow.Flow import kotlinx.coroutines.flow.Flow
import kotlinx.coroutines.flow.flow import kotlinx.coroutines.flow.flow
import kotlinx.coroutines.launch
import kotlinx.coroutines.tasks.await
import kotlinx.coroutines.withContext
import java.time.Duration import java.time.Duration
import java.time.LocalDateTime import java.time.LocalDateTime
import java.util.concurrent.ExecutionException
class MainState(private val activity: Activity, private val requestPermission: () -> Unit) { class MainState(private val activity: Activity, private val requestPermission: () -> Unit) {
private val playbackStateMutatorMutex = MutatorMutex() private val playbackStateMutatorMutex = MutatorMutex()
@@ -34,7 +46,9 @@ class MainState(private val activity: Activity, private val requestPermission: (
(ContextCompat.checkSelfPermission(activity, Manifest.permission.RECORD_AUDIO) == PackageManager.PERMISSION_GRANTED) -> { (ContextCompat.checkSelfPermission(activity, Manifest.permission.RECORD_AUDIO) == PackageManager.PERMISSION_GRANTED) -> {
Log.e(TAG, "Permissions granted, continuing to record"); Log.e(TAG, "Permissions granted, continuing to record");
appState = AppState.Recording appState = AppState.Recording
record(soundRecorder = soundRecorder, setProgress = { progress -> recordingProgress = progress })
record(activity = activity, soundRecorder = soundRecorder, setProgress = { progress -> recordingProgress = progress })
} }
else -> { else -> {
@@ -55,7 +69,8 @@ class MainState(private val activity: Activity, private val requestPermission: (
playbackStateMutatorMutex.mutate { playbackStateMutatorMutex.mutate {
if (ContextCompat.checkSelfPermission(activity, Manifest.permission.RECORD_AUDIO) == PackageManager.PERMISSION_GRANTED) { if (ContextCompat.checkSelfPermission(activity, Manifest.permission.RECORD_AUDIO) == PackageManager.PERMISSION_GRANTED) {
appState = AppState.Recording appState = AppState.Recording
record(soundRecorder = soundRecorder, setProgress = { progress ->
record(activity=activity, soundRecorder = soundRecorder, setProgress = { progress ->
recordingProgress = progress recordingProgress = progress
}) })
appState = AppState.Ready appState = AppState.Ready
@@ -73,22 +88,40 @@ sealed class AppState {
} }
@RequiresPermission(Manifest.permission.RECORD_AUDIO) @RequiresPermission(Manifest.permission.RECORD_AUDIO)
private suspend fun record(soundRecorder: SoundRecorder, private suspend fun record(activity: (Activity),soundRecorder: SoundRecorder,
setProgress: (progress: Float) -> Unit, setProgress: (progress: Float) -> Unit,
maxRecordingDuration: Duration = Duration.ofSeconds(10), maxRecordingDuration: Duration = Duration.ofSeconds(10),
numberTicks: Int = 10) { numberTicks: Int = 10) {
val messagePath = "/message_path"
coroutineScope { // Kick off a parallel job to coroutineScope { // Kick off a parallel job to
Log.e(TAG, "Mock recording"); // val recordingJob = launch { soundRecorder.record() } Log.e(TAG, "Start recording"); //
val ByteFlow: Flow<String> = flow{ val recordingJob = launch { soundRecorder.record() }
for (i in 1..3) { // val recordingJob = launch { soundRecorder.record() }
var string_send = LocalDateTime.now().toString() // SoundRecorder.record();
emit(string_send); // val ByteFlow: Flow<String> = flow {
delay(250); // while (true) {
Log.e(TAG, "Emitting " + string_send) // var string_send = LocalDateTime.now().toString()
} // emit(string_send);
} // delay(250);
// Log.e(TAG, "Emitting " + string_send)
// }
// }
//
// val c_channel = channelClient.openChannel("Main","Audio");
// val channel = c_channel.await();
// val OutputStream = channelClient.getOutputStream(channel).await();
// OutputStream.write(4);
// ByteFlow.collect {
//
//
//
// Log.e(TAG, "Received " + it)};
// //
// val delayPerTickMs = maxRecordingDuration.toMillis() / numberTicks // val delayPerTickMs = maxRecordingDuration.toMillis() / numberTicks
@@ -105,3 +138,9 @@ private suspend fun record(soundRecorder: SoundRecorder,
} }
} }
object channelCallback : ChannelClient.ChannelCallback() {
override fun onChannelOpened(channel: ChannelClient.Channel) {
super.onChannelOpened(channel)
Log.e(TAG,"Opened channel")}
}

View File

@@ -0,0 +1,58 @@
package com.birdsounds.identify.presentation
import android.content.Context
import android.util.Log
import com.google.android.gms.tasks.Tasks
import com.google.android.gms.wearable.Wearable
import kotlinx.coroutines.CoroutineScope
import kotlinx.coroutines.Dispatchers
import kotlinx.coroutines.Job
import kotlinx.coroutines.launch
import java.util.concurrent.ExecutionException
object MessageSender {
const val tag = "MessageSender"
private val job = Job()
private val coroutineScope = CoroutineScope(Dispatchers.IO + job)
fun sendMessage(path: String, message: ByteArray, context: Context) {
coroutineScope.launch {
sendMessageInBackground(path, message, context)
}
}
private fun sendMessageInBackground(path: String, message: ByteArray, context: Context) {
//first get all the nodes, ie connected wearable devices.
val nodeListTask = Wearable.getNodeClient(context).connectedNodes
try {
// Block on a task and get the result synchronously (because this is on a background
// thread).
val nodes = Tasks.await(nodeListTask)
if(nodes.isEmpty()) {
Log.i(tag,"No Node found to send message")
}
//Now send the message to each device.
for (node in nodes) {
val sendMessageTask = Wearable.getMessageClient(context)
.sendMessage(node.id, path, message)
try {
// Block on a task and get the result synchronously (because this is on a background
// thread).
val result = Tasks.await(sendMessageTask)
Log.v(tag, "SendThread: message send to " + node.displayName)
} catch (exception: ExecutionException) {
Log.e(tag, "Task failed: $exception")
} catch (exception: InterruptedException) {
Log.e(tag, "Interrupt occurred: $exception")
}
}
} catch (exception: ExecutionException) {
Log.e(tag, "Task failed: $exception")
} catch (exception: InterruptedException) {
Log.e(
tag, "Interrupt occurred: $exception"
)
}
}
}

View File

@@ -3,27 +3,24 @@ package com.birdsounds.identify.presentation
import android.Manifest import android.Manifest
import android.content.Context import android.content.Context
import android.media.AudioFormat
import android.media.AudioRecord import android.media.AudioRecord
import android.media.MediaRecorder import android.media.MediaRecorder
import android.util.Log import android.util.Log
import androidx.annotation.RequiresPermission import androidx.annotation.RequiresPermission
import com.google.android.gms.wearable.Wearable
import kotlinx.coroutines.channels.Channel
import kotlinx.coroutines.flow.Flow
import kotlinx.coroutines.flow.flow
import java.io.File
import kotlinx.coroutines.suspendCancellableCoroutine import kotlinx.coroutines.suspendCancellableCoroutine
import java.time.LocalDateTime
/** /**
* A helper class to provide methods to record audio input from the MIC to the internal storage. * A helper class to provide methods to record audio input from the MIC to the internal storage.
*/ */
@Suppress("DEPRECATION")
class SoundRecorder( class SoundRecorder(
context: Context, context_in: Context,
outputFileName: String outputFileName: String
) { ) {
private val audioFile = File(context.filesDir, outputFileName)
private var state = State.IDLE private var state = State.IDLE
private var context = context_in
private enum class State { private enum class State {
IDLE, RECORDING IDLE, RECORDING
@@ -31,39 +28,49 @@ class SoundRecorder(
@RequiresPermission(Manifest.permission.RECORD_AUDIO) @RequiresPermission(Manifest.permission.RECORD_AUDIO)
suspend fun record() { suspend fun record() {
suspendCancellableCoroutine<Unit> { cont -> suspendCancellableCoroutine<Unit> { cont ->
@Suppress("DEPRECATION") val audioSource = MediaRecorder.AudioSource.DEFAULT
val sampleRateInHz = 48000
val channelConfig = AudioFormat.CHANNEL_IN_MONO
val audioFormat = AudioFormat.ENCODING_PCM_8BIT
val bufferSizeInBytes =
sampleRateInHz * 1 * 1; // 3 second sample, 2 bytes for each sample
val audio_bytes_array = ByteArray(bufferSizeInBytes)
val audioRecord = AudioRecord(
/* audioSource = */ audioSource,
/* sampleRateInHz = */ sampleRateInHz,
/* channelConfig = */ channelConfig,
/* audioFormat = */ audioFormat,
/* bufferSizeInBytes = */ bufferSizeInBytes
)
val mediaRecorder = MediaRecorder().apply {
setAudioSource(MediaRecorder.AudioSource.MIC)
setOutputFormat(MediaRecorder.OutputFormat.OGG)
setAudioEncoder(MediaRecorder.AudioEncoder.OPUS)
setOutputFile(audioFile.path)
setOnInfoListener { mr, what, extra ->
println("info: $mr $what $extra")
}
setOnErrorListener { mr, what, extra ->
println("error: $mr $what $extra")
}
}
cont.invokeOnCancellation {
mediaRecorder.stop()
state = State.IDLE
}
mediaRecorder.prepare() val thread = Thread {
mediaRecorder.start() while (true) {
Log.e("com.birdsounds.identify","Hey I'm recording") val out = audioRecord.read(
state = State.RECORDING /* audioData = */ audio_bytes_array,
/* offsetInBytes = */ 0,
/* sizeInBytes = */ bufferSizeInBytes,
/* readMode = */ AudioRecord.READ_BLOCKING
)
// val audio_u_byte = audio_bytes_array.toUByteArray();
// Log.w(TAG, audio_bytes_array.size.toString());
val str_beg = audio_bytes_array[0].toString()
val str_end = audio_bytes_array[bufferSizeInBytes-1].toString()
Log.w(TAG, str_beg + ", " + str_end);
// MessageSender.sendMessage("/audio",audio_bytes_array, context)
}
};
thread.start();
} }
} }

View File

@@ -1,6 +1,7 @@
package com.birdsounds.identify.presentation package com.birdsounds.identify.presentation
import android.content.Context
import androidx.compose.runtime.Composable import androidx.compose.runtime.Composable
import androidx.compose.ui.tooling.preview.PreviewParameter import androidx.compose.ui.tooling.preview.PreviewParameter
import androidx.compose.ui.tooling.preview.datasource.CollectionPreviewParameterProvider import androidx.compose.ui.tooling.preview.datasource.CollectionPreviewParameterProvider
@@ -10,6 +11,7 @@ import com.google.android.horologist.compose.layout.ScreenScaffold
@Composable @Composable
fun StartRecordingScreen( fun StartRecordingScreen(
context: Context,
appState: AppState, appState: AppState,
isPermissionDenied: Boolean, isPermissionDenied: Boolean,
onMicClicked: () -> Unit onMicClicked: () -> Unit
@@ -53,16 +55,3 @@ private class PlaybackStatePreviewProvider : CollectionPreviewParameterProvider<
AppState.Ready AppState.Ready
) )
) )
@WearPreviewDevices
@WearPreviewFontScales
@Composable
fun SpeakerScreenPreview(
@PreviewParameter(PlaybackStatePreviewProvider::class) appState: AppState
) {
StartRecordingScreen(
appState = appState,
isPermissionDenied = true,
onMicClicked = {}
)
}