This commit is contained in:
isp
2024-09-10 19:25:20 -07:00
parent f159349cae
commit c04123ea84
24 changed files with 20588 additions and 63 deletions

View File

@@ -1,6 +1,10 @@
<?xml version="1.0" encoding="utf-8"?>
<manifest xmlns:android="http://schemas.android.com/apk/res/android"
xmlns:tools="http://schemas.android.com/tools">
xmlns:tools="http://schemas.android.com/tools" >
<uses-permission android:name="android.permission.INTERNET"></uses-permission>
<uses-permission android:name="android.permission.ACCESS_COARSE_LOCATION"></uses-permission>
<uses-permission android:name="android.permission.ACCESS_FINE_LOCATION"></uses-permission>
<application
android:allowBackup="true"
@@ -11,16 +15,31 @@
android:roundIcon="@mipmap/ic_launcher_round"
android:supportsRtl="true"
android:theme="@style/Theme.Identify"
tools:targetApi="31">
tools:targetApi="31" >
<activity
android:name=".MainActivity"
android:exported="true">
android:exported="true" >
<intent-filter>
<action android:name="android.intent.action.MAIN" />
<category android:name="android.intent.category.LAUNCHER" />
</intent-filter>
</activity>
<service
android:name=".MessageListenerService"
android:enabled="true"
android:exported="true" >
<intent-filter>
<action android:name="com.google.android.gms.wearable.MESSAGE_RECEIVED" />
<data
android:host="*"
android:pathPrefix="/audio"
android:scheme="wear" />
</intent-filter>
</service>
</application>
</manifest>

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,189 @@
package com.birdsounds.identify;
import android.app.Activity;
import android.content.Context;
import android.util.Log;
import android.view.View;
import android.widget.Toast;
import java.io.BufferedInputStream;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.math.BigInteger;
import java.net.URL;
import java.net.URLConnection;
import java.nio.file.Files;
import java.nio.file.Paths;
import java.security.MessageDigest;
import java.security.NoSuchAlgorithmException;
@SuppressWarnings("ResultOfMethodCallIgnored")
public class Downloader {
static final String modelFILE = "model.tflite";
static final String metaModelFILE = "metaModel.tflite";
static final String modelURL = "https://raw.githubusercontent.com/woheller69/whoBIRD-TFlite/master/BirdNET_GLOBAL_6K_V2.4_Model_FP16.tflite";
static final String model32URL = "https://raw.githubusercontent.com/woheller69/whoBIRD-TFlite/master/BirdNET_GLOBAL_6K_V2.4_Model_FP32.tflite";
static final String metaModelURL = "https://raw.githubusercontent.com/woheller69/whoBIRD-TFlite/master/BirdNET_GLOBAL_6K_V2.4_MData_Model_FP16.tflite";
static final String modelMD5 = "b1c981fe261910b473b9b7eec9ebcd4e";
static final String model32MD5 = "6c7c42106e56550fc8563adb31bc120e";
static final String metaModelMD5 ="f1a078ae0f244a1ff5a8f1ccb645c805";
public static boolean checkModels(final Activity activity) {
File modelFile = new File(activity.getDir("filesdir", Context.MODE_PRIVATE) + "/" + modelFILE);
File metaModelFile = new File(activity.getDir("filesdir", Context.MODE_PRIVATE) + "/" + metaModelFILE);
String calcModelMD5 = "";
String calcMetaModelMD5 = "";
if (modelFile.exists()) {
try {
byte[] data = Files.readAllBytes(Paths.get(modelFile.getPath()));
byte[] hash = MessageDigest.getInstance("MD5").digest(data);
calcModelMD5 = new BigInteger(1, hash).toString(16);
} catch (IOException | NoSuchAlgorithmException e) {
throw new RuntimeException(e);
}
}
if (metaModelFile.exists()) {
try {
byte[] data = Files.readAllBytes(Paths.get(metaModelFile.getPath()));
byte[] hash = MessageDigest.getInstance("MD5").digest(data);
calcMetaModelMD5 = new BigInteger(1, hash).toString(16);
} catch (IOException | NoSuchAlgorithmException e) {
throw new RuntimeException(e);
}
}
if (modelFile.exists() && !(calcModelMD5.equals(modelMD5) || calcModelMD5.equals(model32MD5))) modelFile.delete();
if (metaModelFile.exists() && !calcMetaModelMD5.equals(metaModelMD5)) metaModelFile.delete();
return (calcModelMD5.equals(modelMD5) || calcModelMD5.equals(model32MD5)) && calcMetaModelMD5.equals(metaModelMD5);
}
public static void downloadModels(final Activity activity) {
File modelFile = new File(activity.getDir("filesdir", Context.MODE_PRIVATE) + "/" + modelFILE);
Log.d("Heyy","Model file checking");
if (!modelFile.exists()) {
Log.d("whoBIRD", "model file does not exist");
Thread thread = new Thread(() -> {
try {
URL url;
if (false) url = new URL(model32URL);
else url = new URL(modelURL);
Log.d("whoBIRD", "Download model");
URLConnection ucon = url.openConnection();
ucon.setReadTimeout(5000);
ucon.setConnectTimeout(10000);
InputStream is = ucon.getInputStream();
BufferedInputStream inStream = new BufferedInputStream(is, 1024 * 5);
modelFile.createNewFile();
FileOutputStream outStream = new FileOutputStream(modelFile);
byte[] buff = new byte[5 * 1024];
int len;
while ((len = inStream.read(buff)) != -1) {
outStream.write(buff, 0, len);
}
outStream.flush();
outStream.close();
inStream.close();
String calcModelMD5="";
if (modelFile.exists()) {
byte[] data = Files.readAllBytes(Paths.get(modelFile.getPath()));
byte[] hash = MessageDigest.getInstance("MD5").digest(data);
calcModelMD5 = new BigInteger(1, hash).toString(16);
} else {
throw new IOException(); //throw exception if there is no modelFile at this point
}
if (!(calcModelMD5.equals(modelMD5) || calcModelMD5.equals(model32MD5) )){
modelFile.delete();
activity.runOnUiThread(() -> {
Toast.makeText(activity, activity.getResources().getString(R.string.error_download), Toast.LENGTH_SHORT).show();
});
} else {
activity.runOnUiThread(() -> {
});
}
} catch (NoSuchAlgorithmException | IOException i) {
activity.runOnUiThread(() -> Toast.makeText(activity, activity.getResources().getString(R.string.error_download), Toast.LENGTH_SHORT).show());
modelFile.delete();
Log.w("whoBIRD", activity.getResources().getString(R.string.error_download), i);
}
});
thread.start();
} else {
Log.d("whoBIRD","model exists");
activity.runOnUiThread(() -> {
});
}
File metaModelFile = new File(activity.getDir("filesdir", Context.MODE_PRIVATE) + "/" + metaModelFILE);
if (!metaModelFile.exists()) {
Log.d("whoBIRD", "meta model file does not exist");
Thread thread = new Thread(() -> {
try {
URL url = new URL(metaModelURL);
Log.d("whoBIRD", "Download meta model");
URLConnection ucon = url.openConnection();
ucon.setReadTimeout(5000);
ucon.setConnectTimeout(10000);
InputStream is = ucon.getInputStream();
BufferedInputStream inStream = new BufferedInputStream(is, 1024 * 5);
metaModelFile.createNewFile();
FileOutputStream outStream = new FileOutputStream(metaModelFile);
byte[] buff = new byte[5 * 1024];
int len;
while ((len = inStream.read(buff)) != -1) {
outStream.write(buff, 0, len);
}
outStream.flush();
outStream.close();
inStream.close();
String calcMetaModelMD5="";
if (metaModelFile.exists()) {
byte[] data = Files.readAllBytes(Paths.get(metaModelFile.getPath()));
byte[] hash = MessageDigest.getInstance("MD5").digest(data);
calcMetaModelMD5 = new BigInteger(1, hash).toString(16);
} else {
throw new IOException(); //throw exception if there is no modelFile at this point
}
if (!calcMetaModelMD5.equals(metaModelMD5)){
metaModelFile.delete();
activity.runOnUiThread(() -> {
Toast.makeText(activity, activity.getResources().getString(R.string.error_download), Toast.LENGTH_SHORT).show();
});
} else {
activity.runOnUiThread(() -> {
});
}
} catch (NoSuchAlgorithmException | IOException i) {
activity.runOnUiThread(() -> Toast.makeText(activity, activity.getResources().getString(R.string.error_download), Toast.LENGTH_SHORT).show());
metaModelFile.delete();
Log.w("whoBIRD", activity.getResources().getString(R.string.error_download), i);
}
});
thread.start();
} else {
Log.d("whoBIRD", "meta file exists");
activity.runOnUiThread(() -> {
});
}
}
}

View File

@@ -0,0 +1,59 @@
package com.birdsounds.identify;
import android.Manifest;
import android.content.Context;
import android.content.pm.PackageManager;
import android.location.LocationListener;
import android.location.LocationManager;
import android.os.Bundle;
import android.widget.Toast;
import androidx.core.app.ActivityCompat;
public class Location {
private static LocationListener locationListenerGPS;
static void stopLocation(Context context){
LocationManager locationManager = (LocationManager) context.getSystemService(Context.LOCATION_SERVICE);
if (locationListenerGPS!=null) locationManager.removeUpdates(locationListenerGPS);
locationListenerGPS=null;
}
static void requestLocation(Context context, SoundClassifier soundClassifier) {
if (ActivityCompat.checkSelfPermission(context, Manifest.permission.ACCESS_COARSE_LOCATION) == PackageManager.PERMISSION_GRANTED && checkLocationProvider(context)) {
LocationManager locationManager = (LocationManager) context.getSystemService(Context.LOCATION_SERVICE);
if (locationListenerGPS==null) locationListenerGPS = new LocationListener() {
@Override
public void onLocationChanged(android.location.Location location) {
soundClassifier.runMetaInterpreter(location);
}
@Deprecated
@Override
public void onStatusChanged(String provider, int status, Bundle extras) {
}
@Override
public void onProviderEnabled(String provider) {
}
@Override
public void onProviderDisabled(String provider) {
}
};
locationManager.requestLocationUpdates(LocationManager.GPS_PROVIDER, 60000, 0, locationListenerGPS);
}
}
public static boolean checkLocationProvider(Context context) {
LocationManager locationManager = (LocationManager) context.getSystemService(Context.LOCATION_SERVICE);
if (!locationManager.isProviderEnabled(LocationManager.GPS_PROVIDER)){
Toast.makeText(context, "Error no GPS", Toast.LENGTH_SHORT).show();
return false;
} else {
return true;
}
}
}

View File

@@ -1,20 +1,57 @@
package com.birdsounds.identify
import android.content.pm.PackageManager
import android.os.Bundle
import android.Manifest
import android.util.Log
import androidx.activity.enableEdgeToEdge
import androidx.appcompat.app.AppCompatActivity
import androidx.core.content.ContextCompat
import androidx.core.view.ViewCompat
import androidx.core.view.WindowInsetsCompat
import com.google.android.gms.wearable.ChannelClient
import com.google.android.gms.wearable.Wearable
class MainActivity : AppCompatActivity() {
private lateinit var soundClassifier: SoundClassifier
override fun onCreate(savedInstanceState: Bundle?) {
super.onCreate(savedInstanceState)
enableEdgeToEdge()
setContentView(R.layout.activity_main)
Wearable.getChannelClient(this.applicationContext)
.registerChannelCallback(object : ChannelClient.ChannelCallback() {
override fun onChannelOpened(channel: ChannelClient.Channel) {
super.onChannelOpened(channel)
Log.d("HEY", "onChannelOpened")
}
}
)
Downloader.downloadModels(this);
requestPermissions();
soundClassifier = SoundClassifier(this, SoundClassifier.Options())
Location.requestLocation(this, soundClassifier)
ViewCompat.setOnApplyWindowInsetsListener(findViewById(R.id.main)) { v, insets ->
val systemBars = insets.getInsets(WindowInsetsCompat.Type.systemBars())
v.setPadding(systemBars.left, systemBars.top, systemBars.right, systemBars.bottom)
insets
}
}
companion object {
const val REQUEST_PERMISSIONS = 1337
}
private fun requestPermissions() {
val perms = mutableListOf<String>()
if (ContextCompat.checkSelfPermission(
this,
Manifest.permission.ACCESS_COARSE_LOCATION
) != PackageManager.PERMISSION_GRANTED
) {
perms.add(Manifest.permission.ACCESS_COARSE_LOCATION)
perms.add(Manifest.permission.ACCESS_FINE_LOCATION)
}
if (!perms.isEmpty()) requestPermissions(perms.toTypedArray(), REQUEST_PERMISSIONS)
}
}

View File

@@ -0,0 +1,8 @@
package com.birdsounds.identify
import android.content.Intent
object MessageConstants {
const val intentName = "WearableMessageDisplay"
const val message = "Message"
const val path = "audio"
}

View File

@@ -0,0 +1,20 @@
package com.birdsounds.identify
import android.content.Intent
import android.util.Log
import androidx.localbroadcastmanager.content.LocalBroadcastManager
import com.google.android.gms.wearable.MessageEvent
import com.google.android.gms.wearable.WearableListenerService
class MessageListenerService : WearableListenerService() {
private val tag = "MessageListenerService"
override fun onMessageReceived(p0: MessageEvent) {
super.onMessageReceived(p0)
// Log.i(tag ,p0.data.toString(Charsets.UTF_8))
// Log.i(tag, p0.data.toString(Charsets.US_ASCII))
// broadcastMessage(p0)
}
}

View File

@@ -0,0 +1,388 @@
package com.birdsounds.identify
import android.content.Context
import android.location.Location
import android.os.SystemClock
import android.preference.PreferenceManager
import android.util.Log
import org.tensorflow.lite.Interpreter
import java.io.BufferedReader
import java.io.File
import java.io.IOException
import java.io.InputStreamReader
import java.nio.ByteBuffer
import java.nio.FloatBuffer
import java.nio.channels.FileChannel
import java.nio.file.StandardOpenOption
import java.time.LocalDate
import java.util.Locale
import java.util.TimerTask
import java.util.Timer
import kotlin.concurrent.scheduleAtFixedRate
import kotlin.math.ceil
import kotlin.math.cos
import uk.me.berndporr.iirj.Butterworth
import kotlin.math.round
import kotlin.math.sin
class SoundClassifier(
context: Context,
private val options: Options = Options()
) {
internal var mContext: Context
val TAG = "Sound Classifier"
init {
this.mContext = context.applicationContext
}
class Options(
/** Path of the converted model label file, relative to the assets/ directory. */
val labelsBase: String = "labels",
/** Path of the converted .tflite file, relative to the assets/ directory. */
val assetFile: String = "assets.txt",
/** Path of the converted .tflite file, relative to the assets/ directory. */
val modelPath: String = "model.tflite",
/** Path of the meta model .tflite file, relative to the assets/ directory. */
val metaModelPath: String = "metaModel.tflite",
/** The required audio sample rate in Hz. */
val sampleRate: Int = 48000,
/** Multiplier for audio samples */
val warmupRuns: Int = 3,
/** Probability value above which a class in the meta model is labeled as active (i.e., detected) the display. (default 0.01) */
var metaProbabilityThreshold1: Float = 0.01f, //min must be > 0
var metaProbabilityThreshold2: Float = 0.008f, //min must be > 0
var metaProbabilityThreshold3: Float = 0.001f, //min must be > 0
/** Probability value above which a class is shown as image. (default 0.5) */
var displayImageThreshold: Float = 0.65f, //min must be > 0
)
/** Names of the model's output classes. */
lateinit var labelList: List<String>
/** Names of the model's output classes. */
lateinit var assetList: List<String>
/** How many milliseconds between consecutive model inference calls. */
private var inferenceInterval = 800L
/** The TFLite interpreter instance. */
private lateinit var interpreter: Interpreter
private lateinit var meta_interpreter: Interpreter
/** Audio length (in # of PCM samples) required by the TFLite model. */
private var modelInputLength = 0
/** input Length of the meta model */
private var metaModelInputLength = 0
/** Number of output classes of the TFLite model. */
private var modelNumClasses = 0
private var metaModelNumClasses = 0
/** Used to hold the real-time probabilities predicted by the model for the output classes. */
private lateinit var predictionProbs: FloatArray
private lateinit var metaPredictionProbs: FloatArray
/** Latest prediction latency in milliseconds. */
private var latestPredictionLatencyMs = 0f
private var recognitionTask: TimerTask? = null
/** Buffer that holds audio PCM sample that are fed to the TFLite model for inference. */
private lateinit var inputBuffer: FloatBuffer
private lateinit var metaInputBuffer: FloatBuffer
init {
loadLabels(context)
loadAssetList(context)
setupInterpreter(context)
setupMetaInterpreter(context)
warmUpModel()
}
/** Retrieve asset list from "asset_list" file */
private fun loadAssetList(context: Context) {
try {
val reader =
BufferedReader(InputStreamReader(context.assets.open(options.assetFile)))
val wordList = mutableListOf<String>()
reader.useLines { lines ->
lines.forEach {
wordList.add(it.trim())
}
}
assetList = wordList.map { it }
} catch (e: IOException) {
Log.e(TAG, "Failed to read labels ${options.assetFile}: ${e.message}")
}
}
/** Retrieve labels from "labels.txt" file */
private fun loadLabels(context: Context) {
val localeList = context.resources.configuration.locales
val language = localeList.get(0).language
var filename = options.labelsBase + "_${language}.txt"
//Check if file exists
val assetManager = context.assets // Replace 'assets' with actual AssetManager instance
try {
val mapList = assetManager.list("")?.toMutableList()
if (mapList != null) {
if (!mapList.contains(filename)) {
filename = options.labelsBase + "_en.txt"
}
}
} catch (ex: IOException) {
ex.printStackTrace()
filename = options.labelsBase + "_en.txt"
}
Log.i(TAG, filename)
try {
val reader =
BufferedReader(InputStreamReader(context.assets.open(filename)))
val wordList = mutableListOf<String>()
reader.useLines { lines ->
lines.forEach {
wordList.add(it)
}
}
labelList = wordList.map { it.toTitleCase() }
Log.i(TAG, "Label list entries: ${labelList.size}")
} catch (e: IOException) {
Log.e(TAG, "Failed to read labels ${filename}: ${e.message}")
}
}
private fun setupInterpreter(context: Context) {
try {
val modelFilePath = context.getDir("filesdir", Context.MODE_PRIVATE).absolutePath + "/"+ options.modelPath
Log.i(TAG, "Trying to create TFLite buffer from $modelFilePath")
val modelFile = File(modelFilePath)
val tfliteBuffer: ByteBuffer = FileChannel.open(modelFile.toPath(), StandardOpenOption.READ).use { channel ->
channel.map(FileChannel.MapMode.READ_ONLY, 0, channel.size())
}
Log.i(TAG, "Done creating TFLite buffer from $modelFilePath")
interpreter = Interpreter(tfliteBuffer, Interpreter.Options())
} catch (e: IOException) {
Log.e(TAG, "Failed to load TFLite model - ${e.message}")
return
}
// Inspect input and output specs.
val inputShape = interpreter.getInputTensor(0).shape()
Log.i(TAG, "TFLite model input shape: ${inputShape.contentToString()}")
modelInputLength = inputShape[1]
val outputShape = interpreter.getOutputTensor(0).shape()
Log.i(TAG, "TFLite output shape: ${outputShape.contentToString()}")
modelNumClasses = outputShape[1]
if (modelNumClasses != labelList.size) {
Log.e(
TAG,
"Mismatch between metadata number of classes (${labelList.size})" +
" and model output length ($modelNumClasses)"
)
}
// Fill the array with NaNs initially.
predictionProbs = FloatArray(modelNumClasses) { Float.NaN }
inputBuffer = FloatBuffer.allocate(modelInputLength)
}
private fun setupMetaInterpreter(context: Context) {
try {
val metaModelFilePath = context.getDir("filesdir", Context.MODE_PRIVATE).absolutePath + "/"+ options.metaModelPath
Log.i(TAG, "Trying to create TFLite buffer from $metaModelFilePath")
val metaModelFile = File(metaModelFilePath)
val tfliteBuffer: ByteBuffer = FileChannel.open(metaModelFile.toPath(), StandardOpenOption.READ).use { channel ->
channel.map(FileChannel.MapMode.READ_ONLY, 0, channel.size())
}
Log.i(TAG, "Done creating TFLite buffer from $metaModelFilePath")
meta_interpreter = Interpreter(tfliteBuffer, Interpreter.Options())
} catch (e: IOException) {
Log.e(TAG, "Failed to load TFLite model - ${e.message}")
return
}
// Inspect input and output specs.
val metaInputShape = meta_interpreter.getInputTensor(0).shape()
Log.i(TAG, "TFLite meta model input shape: ${metaInputShape.contentToString()}")
metaModelInputLength = metaInputShape[1]
val metaOutputShape = meta_interpreter.getOutputTensor(0).shape()
Log.i(TAG, "TFLite meta model output shape: ${metaOutputShape.contentToString()}")
metaModelNumClasses = metaOutputShape[1]
if (metaModelNumClasses != labelList.size) {
Log.e(
TAG,
"Mismatch between metadata number of classes (${labelList.size})" +
" and meta model output length ($metaModelNumClasses)"
)
}
// Fill the array with 1 initially.
metaPredictionProbs = FloatArray(metaModelNumClasses) { 1f }
metaInputBuffer = FloatBuffer.allocate(metaModelInputLength)
}
fun runMetaInterpreter(location: Location) {
val dayOfYear = LocalDate.now().dayOfYear
val week = ceil( dayOfYear*48.0/366.0) //model year has 48 weeks
lat = location.latitude.toFloat()
lon = location.longitude.toFloat()
val weekMeta = cos(Math.toRadians(week * 7.5)) + 1.0
metaInputBuffer.put(0, lat)
metaInputBuffer.put(1, lon)
metaInputBuffer.put(2, weekMeta.toFloat())
metaInputBuffer.rewind() // Reset position to beginning of buffer
val metaOutputBuffer = FloatBuffer.allocate(metaModelNumClasses)
metaOutputBuffer.rewind()
meta_interpreter.run(metaInputBuffer, metaOutputBuffer)
metaOutputBuffer.rewind()
metaOutputBuffer.get(metaPredictionProbs) // Copy data to metaPredictionProbs.
for (i in metaPredictionProbs.indices) {
metaPredictionProbs[i] =
if (metaPredictionProbs[i] >= options.metaProbabilityThreshold1) {
1f
} else if (metaPredictionProbs[i] >= options.metaProbabilityThreshold2) {
0.8f
} else if (metaPredictionProbs[i] >= options.metaProbabilityThreshold3) {
0.5f
} else {
0f
}
}
}
private fun warmUpModel() {
generateDummyAudioInput(inputBuffer)
for (n in 0 until options.warmupRuns) {
// Create input and output buffers.
val outputBuffer = FloatBuffer.allocate(modelNumClasses)
inputBuffer.rewind()
outputBuffer.rewind()
interpreter.run(inputBuffer, outputBuffer)
}
}
private fun generateDummyAudioInput(inputBuffer: FloatBuffer) {
val twoPiTimesFreq = 2 * Math.PI.toFloat() * 1000f
for (i in 0 until modelInputLength) {
val x = i.toFloat() / (modelInputLength - 1)
inputBuffer.put(i, sin(twoPiTimesFreq * x.toDouble()).toFloat())
}
}
private fun String.toTitleCase() =
splitToSequence("_")
.map { it.replaceFirstChar { if (it.isLowerCase()) it.titlecase(Locale.ROOT) else it.toString() } }
.joinToString("_")
.trim()
companion object {
private const val TAG = "SoundClassifier"
var lat: Float = 0.0f
var lon: Float = 0.0f
/** Number of nanoseconds in a millisecond */
private const val NANOS_IN_MILLIS = 1_000_000.toDouble()
}
private fun startRecognition() {
if (modelInputLength <= 0 || modelNumClasses <= 0) {
Log.e(TAG, "Switches: Cannot start recognition because model is unavailable.")
return
}
val sharedPref = PreferenceManager.getDefaultSharedPreferences(mContext)
val highPass = sharedPref.getInt("high_pass",0)
val butterworth = Butterworth()
butterworth.highPass(6, 48000.0, highPass.toDouble())
val circularBuffer = ShortArray(modelInputLength)
var j = 0 // Indices for the circular buffer next write
Log.w(TAG, "recognitionPeriod:"+inferenceInterval)
recognitionTask = Timer().scheduleAtFixedRate(inferenceInterval, inferenceInterval) task@{
val outputBuffer = FloatBuffer.allocate(modelNumClasses)
val recordingBuffer = ShortArray(modelInputLength)
// Load new audio samples
// val sampleCounts = loadAudio(recordingBuffer)
val sampleCounts = 0;
if (sampleCounts == 0) {
return@task
}
// Copy new data into the circular buffer
for (i in 0 until sampleCounts) {
circularBuffer[j] = recordingBuffer[i]
j = (j + 1) % circularBuffer.size
}
// Feed data to the input buffer.
var samplesAreAllZero = true
for (i in 0 until modelInputLength) {
val s = circularBuffer[(i + j) % modelInputLength]
if (samplesAreAllZero && s.toInt() != 0) {
samplesAreAllZero = false
}
if (highPass==0) inputBuffer.put(i, s.toFloat())
else inputBuffer.put(i, butterworth.filter(s.toDouble()).toFloat())
}
if (samplesAreAllZero) {
Log.w(TAG, mContext.resources.getString(R.string.samples_zero))
return@task
}
val t0 = SystemClock.elapsedRealtimeNanos()
inputBuffer.rewind()
outputBuffer.rewind()
interpreter.run(inputBuffer, outputBuffer)
outputBuffer.rewind()
outputBuffer.get(predictionProbs) // Copy data to predictionProbs.
val probList = mutableListOf<Float>()
// if (mBinding.checkIgnoreMeta.isChecked){
// for (value in predictionProbs) {
// probList.add(1 / (1 + kotlin.math.exp(-value))) //apply sigmoid
// }
// } else {
for (i in predictionProbs.indices) {
probList.add( metaPredictionProbs[i] / (1+kotlin.math.exp(-predictionProbs[i])) ) //apply sigmoid
}
// }
// if (mBinding.progressHorizontal.isIndeterminate){ //if start/stop button set to "running"
// probList.withIndex().also {
// val max = it.maxByOrNull { entry -> entry.value }
// updateTextView(max, mBinding.text1)
// updateImage(max)
//after finding the maximum probability and its corresponding label (max), we filter out that entry from the list of entries before finding the second highest probability (secondMax)
// val secondMax = it.filterNot { entry -> entry == max }.maxByOrNull { entry -> entry.value }
// updateTextView(secondMax,mBinding.text2)
// }
// }
latestPredictionLatencyMs =
((SystemClock.elapsedRealtimeNanos() - t0) / 1e6).toFloat()
}
}
}

View File

@@ -0,0 +1,11 @@
package com.birdsounds.identify.ui.theme
import androidx.compose.ui.graphics.Color
val Purple80 = Color(0xFFD0BCFF)
val PurpleGrey80 = Color(0xFFCCC2DC)
val Pink80 = Color(0xFFEFB8C8)
val Purple40 = Color(0xFF6650a4)
val PurpleGrey40 = Color(0xFF625b71)
val Pink40 = Color(0xFF7D5260)

View File

@@ -0,0 +1,57 @@
package com.birdsounds.identify.ui.theme
import android.app.Activity
import android.os.Build
import androidx.compose.foundation.isSystemInDarkTheme
import androidx.compose.material3.MaterialTheme
import androidx.compose.material3.darkColorScheme
import androidx.compose.material3.dynamicDarkColorScheme
import androidx.compose.material3.dynamicLightColorScheme
import androidx.compose.material3.lightColorScheme
import androidx.compose.runtime.Composable
import androidx.compose.ui.platform.LocalContext
private val DarkColorScheme = darkColorScheme(
primary = Purple80,
secondary = PurpleGrey80,
tertiary = Pink80
)
private val LightColorScheme = lightColorScheme(
primary = Purple40,
secondary = PurpleGrey40,
tertiary = Pink40
/* Other default colors to override
background = Color(0xFFFFFBFE),
surface = Color(0xFFFFFBFE),
onPrimary = Color.White,
onSecondary = Color.White,
onTertiary = Color.White,
onBackground = Color(0xFF1C1B1F),
onSurface = Color(0xFF1C1B1F),
*/
)
@Composable
fun IdentifyTheme(
darkTheme: Boolean = isSystemInDarkTheme(),
// Dynamic color is available on Android 12+
dynamicColor: Boolean = true,
content: @Composable () -> Unit
) {
val colorScheme = when {
dynamicColor && Build.VERSION.SDK_INT >= Build.VERSION_CODES.S -> {
val context = LocalContext.current
if (darkTheme) dynamicDarkColorScheme(context) else dynamicLightColorScheme(context)
}
darkTheme -> DarkColorScheme
else -> LightColorScheme
}
MaterialTheme(
colorScheme = colorScheme,
typography = Typography,
content = content
)
}

View File

@@ -0,0 +1,34 @@
package com.birdsounds.identify.ui.theme
import androidx.compose.material3.Typography
import androidx.compose.ui.text.TextStyle
import androidx.compose.ui.text.font.FontFamily
import androidx.compose.ui.text.font.FontWeight
import androidx.compose.ui.unit.sp
// Set of Material typography styles to start with
val Typography = Typography(
bodyLarge = TextStyle(
fontFamily = FontFamily.Default,
fontWeight = FontWeight.Normal,
fontSize = 16.sp,
lineHeight = 24.sp,
letterSpacing = 0.5.sp
)
/* Other default text styles to override
titleLarge = TextStyle(
fontFamily = FontFamily.Default,
fontWeight = FontWeight.Normal,
fontSize = 22.sp,
lineHeight = 28.sp,
letterSpacing = 0.sp
),
labelSmall = TextStyle(
fontFamily = FontFamily.Default,
fontWeight = FontWeight.Medium,
fontSize = 11.sp,
lineHeight = 16.sp,
letterSpacing = 0.5.sp
)
*/
)

View File

@@ -1,3 +1,6 @@
<resources>
<string name="app_name">identify</string>
<string name="title_activity_download">DownloadActivity</string>
<string name="error_download">Download Error</string>
<string name="samples_zero">Samples are all zero</string>
</resources>