Skip to content

Instantly share code, notes, and snippets.

@jbhntr861
Created March 9, 2023 11:52
Show Gist options
  • Save jbhntr861/b2328aed9598ac752372fd6f0e3f0df8 to your computer and use it in GitHub Desktop.
Save jbhntr861/b2328aed9598ac752372fd6f0e3f0df8 to your computer and use it in GitHub Desktop.
package com.jbhntr86.sabia
class ErrorHandler {
companion object {
fun handleException(e: Exception) {
when (e) {
is IllegalArgumentException -> {
println("Invalid argument: ${e.message}")
}
is SecurityException -> {
println("Security exception: ${e.message}")
}
else -> {
println("An unexpected error occurred: ${e.message}")
e.printStackTrace()
}
}
}
}
}
package com.jbhntr86.sabia
import android.content.Context
import com.google.android.gms.auth.api.credentials.*
import com.google.android.gms.auth.api.signin.GoogleSignIn
import com.google.android.gms.auth.api.signin.GoogleSignInOptions
import com.google.android.gms.common.GoogleApiAvailability
import com.google.firebase.FirebaseApp
import com.google.firebase.FirebaseOptions
import com.google.firebase.auth.FirebaseAuth
import com.google.firebase.auth.FirebaseUser
import com.google.firebase.auth.GoogleAuthProvider
import com.google.firebase.crashlytics.FirebaseCrashlytics
import org.json.JSONObject
class GoogleAuth(private val context: Context) {
private val googleApiClientId: String
private val googleProjectId: String
private val googleApplicationId: String
init {
val googleServicesJson =
context.resources.openRawResource(R.raw.google_services).bufferedReader()
.use { it.readText() }
val googleServicesJsonObj = JSONObject(googleServicesJson)
val clientArray = googleServicesJsonObj.getJSONArray("client")
val clientObj = clientArray.getJSONObject(0)
googleApiClientId = clientObj.getString("client_id")
val projectObj = googleServicesJsonObj.getJSONObject("project_info")
googleProjectId = projectObj.getString("project_id")
googleApplicationId = projectObj.getString("mobilesdk_app_id")
}
fun authenticate(onSuccess: (FirebaseUser) -> Unit, onFailure: () -> Unit) {
val gsa = GoogleApiAvailability.getInstance()
val result = gsa.isGooglePlayServicesAvailable(context)
if (result == 0) {
FirebaseApp.initializeApp(
context, FirebaseOptions.Builder()
.setProjectId(googleProjectId)
.setApplicationId(googleApplicationId)
.build()
)
val signInOptions = GoogleSignInOptions.Builder(GoogleSignInOptions.DEFAULT_SIGN_IN)
.requestEmail()
.requestProfile()
.requestId()
.requestIdToken(googleApiClientId)
.setServerClientId(googleApiClientId)
.build()
val googleSignInClient = GoogleSignIn.getClient(context, signInOptions)
val credentialsClient = Credentials.getClient(context)
credentialsClient.request(
CredentialRequest.Builder().setAccountTypes(Credential.TYPE_GOOGLE).build()
)
.addOnSuccessListener { credentialResponse ->
val credential = credentialResponse.credential
val account = credential?.id
googleSignInClient.silentSignIn()
.addOnSuccessListener { signInAccount ->
val googleToken = signInAccount.idToken
val credential = GoogleAuthProvider.getCredential(googleToken, null)
FirebaseAuth.getInstance().signInWithCredential(credential)
.addOnCompleteListener { task ->
if (task.isSuccessful) {
onSuccess(task.result?.user!!)
} else {
FirebaseCrashlytics.getInstance()
.recordException(task.exception)
onFailure()
}
}
}
.addOnFailureListener { exception ->
FirebaseCrashlytics.getInstance().recordException(exception)
onFailure()
}
}
.addOnFailureListener { exception ->
FirebaseCrashlytics.getInstance().recordException(exception)
onFailure()
}
} else {
onFailure()
}
}
}
import android.content.Context
import android.util.Log
import com.google.android.gms.auth.api.identity.BeginSignInRequest
import com.google.android.gms.auth.api.identity.Identity
import com.google.android.gms.auth.api.identity.SignInClient
import com.google.android.gms.auth.api.identity.SignInCredential
import com.google.android.gms.auth.api.identity.SignInOptions
import com.google.android.gms.common.api.ApiException
import com.google.android.gms.common.api.Status
class GoogleAuth(private val context: Context) {
companion object {
private const val TAG = "GoogleAuth"
private const val REQUEST_CODE_GOOGLE_SIGN_IN = 1
}
private lateinit var signInClient: SignInClient
fun authenticate(listener: AuthenticationListener) {
val signInOptions = SignInOptions.Builder()
.requestEmail()
.build()
val signInRequest = BeginSignInRequest.builder()
.setSignInOptions(signInOptions)
.build()
signInClient = Identity.getSignInClient(context)
signInClient.beginSignIn(signInRequest)
.addOnSuccessListener { result ->
listener.onBeginSignInSuccess(result)
try {
val pendingIntent = result.pendingIntent
pendingIntent?.let {
context.startIntentSenderForResult(
pendingIntent.intentSender,
REQUEST_CODE_GOOGLE_SIGN_IN,
null,
0,
0,
0,
null
)
}
} catch (e: Exception) {
Log.e(TAG, "Unable to start sign-in intent", e)
listener.onFailure(e)
}
}
.addOnFailureListener { e ->
Log.e(TAG, "Failed to begin sign-in", e)
listener.onFailure(e)
}
}
fun onActivityResult(requestCode: Int, resultCode: Int, data: Intent?) {
if (requestCode == REQUEST_CODE_GOOGLE_SIGN_IN) {
val task = signInClient.getSignInCredentialFromIntent(data)
try {
val signInResult = task.getResult(ApiException::class.java)
val signInCredential = signInResult.credential
signInCredential?.let {
// Successfully signed in
listener.onSuccess(signInCredential)
} ?: run {
// User cancelled sign-in
listener.onCancelled()
}
} catch (e: ApiException) {
when (e.statusCode) {
Status.RESULT_CANCELED.statusCode -> {
// User cancelled sign-in
listener.onCancelled()
}
else -> {
Log.e(TAG, "Error retrieving sign-in credential", e)
listener.onFailure(e)
}
}
}
}
}
interface AuthenticationListener {
fun onBeginSignInSuccess(result: BeginSignInResult)
fun onSuccess(signInCredential: SignInCredential)
fun onCancelled()
fun onFailure(e: Exception)
}
}
package com.jbhntr86.sabia
import android.content.Intent
import android.net.Uri
import android.os.Bundle
import android.os.Handler
import android.os.Looper
import android.widget.VideoView
import androidx.appcompat.app.AppCompatActivity
class IntroActivity : AppCompatActivity() {
private lateinit var videoView: VideoView
override fun onCreate(savedInstanceState: Bundle?) {
super.onCreate(savedInstanceState)
setContentView(R.layout.activity_intro)
videoView = findViewById(R.id.videoView)
val videoPath = "android.resource://${packageName}/${R.raw.intromovie}"
val videoUri = Uri.parse(videoPath)
videoView.setVideoURI(videoUri)
videoView.setOnCompletionListener {
Handler(Looper.getMainLooper()).postDelayed({
startMainActivity()
}, 500)
}
videoView.start()
}
private fun startMainActivity() {
val intent = Intent(this@IntroActivity, MainActivity::class.java)
startActivity(intent)
finish()
}
override fun onPause() {
super.onPause()
videoView.pause()
}
override fun onResume() {
super.onResume()
videoView.start()
}
}
package com.jbhntr86.sabia
import java.util.*
class Nlp {
// Create an instance of the Stemmer class to use for word stemming
private val stemmer: Stemmer = Stemmer()
// Function to recognize speech and return the transcribed text
fun recognizeSpeech(): String {
// Get an instance of the SpeechRecognizer class and start capturing audio
val recognizer = SpeechRecognizer.getSpeechRecognizer()
recognizer.captureAudio()
// Get the captured audio file from the SpeechRecognizer instance
val audioFile = recognizer.getAudioFile()
// Transcribe the audio file using an instance of the SpeechTranscriber class
return transcribeAudio(audioFile)
}
// Function to transcribe an audio file and return the transcribed text
private fun transcribeAudio(audioFile: File): String {
// Get an instance of the SpeechTranscriber class and transcribe the audio data
val transcriber = SpeechTranscriber.getSpeechTranscriber()
val audioData = AudioData(audioFile)
return transcriber.transcribe(audioData)
}
// Function to process text by tokenizing, removing stop words, and stemming
fun processText(text: String): List<String> {
// Tokenize the input text into individual words
val tokens = tokenize(text)
// Filter out any stop words from the tokenized list
val filteredTokens = filterStopwords(tokens)
// Stem each word in the filtered list using the Stemmer instance
return stem(filteredTokens)
}
// Function to tokenize a string into individual words
private fun tokenize(text: String): List<String> {
// Create a StringTokenizer instance to split the string into words
val tokenizer = StringTokenizer(text)
// Add each word to a list of tokens
val tokens: MutableList<String> = ArrayList()
while (tokenizer.hasMoreTokens()) {
tokens.add(tokenizer.nextToken())
}
return tokens
}
// Function to filter out stop words from a list of tokens
private fun filterStopwords(tokens: List<String>): List<String> {
// Define a set of common stop words
val stopwords = setOf(
"a", "an", "and", "are", "as", "at", "be", "by", "for", "from",
"has", "he", "in", "is", "it", "its", "of", "on", "that", "the",
"to", "was", "were", "will", "with"
)
// Filter out any stop words from the token list
return tokens.filter { !stopwords.contains(it.toLowerCase()) }
}
// Function to stem a single word using the Stemmer instance
private fun stem(word: String): String {
// Set the current word to the stemmer instance and return the stemmed version
stemmer.setCurrent(word)
stemmer.stem()
return stemmer.getCurrent()
}
}
package com.jbhntr86.sabia
import android.content.Context
import android.content.Intent
import android.os.Bundle
import android.speech.RecognitionListener
import android.speech.RecognizerIntent
import android.speech.SpeechRecognizer
import android.widget.Toast
class OnSpeechRecognized(private val context: Context) : RecognitionListener {
private var speechRecognizer: SpeechRecognizer? = null
override fun onReadyForSpeech(params: Bundle?) {}
override fun onBeginningOfSpeech() {}
override fun onRmsChanged(rmsdB: Float) {}
override fun onBufferReceived(buffer: ByteArray?) {}
override fun onEndOfSpeech() {}
override fun onError(error: Int) {}
override fun onResults(results: Bundle?) {
val matches = results?.getStringArrayList(SpeechRecognizer.RESULTS_RECOGNITION)
if (!matches.isNullOrEmpty()) {
val text = matches[0]
Toast.makeText(context, text, Toast.LENGTH_SHORT).show()
// Do something with the recognized text
}
startListening()
}
override fun onPartialResults(partialResults: Bundle?) {}
override fun onEvent(eventType: Int, params: Bundle?) {}
fun startListening() {
if (speechRecognizer == null) {
speechRecognizer = SpeechRecognizer.createSpeechRecognizer(context)
speechRecognizer?.setRecognitionListener(this)
}
val intent = Intent(RecognizerIntent.ACTION_RECOGNIZE_SPEECH)
intent.putExtra(
RecognizerIntent.EXTRA_LANGUAGE_MODEL,
RecognizerIntent.LANGUAGE_MODEL_FREE_FORM
)
intent.putExtra(RecognizerIntent.EXTRA_CALLING_PACKAGE, context.packageName)
intent.putExtra(RecognizerIntent.EXTRA_MAX_RESULTS, 1)
intent.putExtra(
RecognizerIntent.EXTRA_SPEECH_INPUT_POSSIBLY_COMPLETE_SILENCE_LENGTH_MILLIS,
2000
)
speechRecognizer?.startListening(intent)
}
fun stopListening() {
speechRecognizer?.stopListening()
}
fun destroy() {
stopListening()
speechRecognizer?.destroy()
speechRecognizer = null
}
}package com.jbhntr86.sabia
import android.content.Context
import android.media.AudioFormat
import android.media.AudioRecord
import android.media.MediaRecorder
import java.io.ByteArrayOutputStream
import java.util.concurrent.Executors
class OnWakeWordDetect(
private val context: Context,
private val listener: OnWakeWordDetectListener
) {
interface OnWakeWordDetectListener {
fun onWakeWordDetected()
}
private var audioRecord: AudioRecord? = null
private var isRecording = false
private var buffer: ShortArray? = null
private val bufferSize = AudioRecord.getMinBufferSize(SAMPLE_RATE, CHANNELS, AUDIO_ENCODING)
companion object {
private const val SAMPLE_RATE = 16000
private const val CHANNELS = AudioFormat.CHANNEL_IN_MONO
private const val AUDIO_ENCODING = AudioFormat.ENCODING_PCM_16BIT
private const val AUDIO_BLOCK_SIZE = 4096
private const val AUDIO_BLOCK_COUNT = 4
private const val DETECTION_THRESHOLD = 5000
}
fun startListening() {
if (isRecording) {
return
}
isRecording = true
audioRecord = AudioRecord(
MediaRecorder.AudioSource.MIC,
SAMPLE_RATE,
CHANNELS,
AUDIO_ENCODING,
bufferSize
)
val executor = Executors.newSingleThreadExecutor()
executor.execute {
try {
audioRecord?.startRecording()
val outputStream = ByteArrayOutputStream()
buffer = ShortArray(bufferSize)
while (isRecording) {
val bufferReadResult = audioRecord?.read(buffer!!, 0, bufferSize)
if (bufferReadResult != null && bufferReadResult != -1) {
outputStream.write(toByteArray(buffer!!, bufferReadResult))
checkWakeWordDetected(buffer!!, bufferReadResult)
}
}
audioRecord?.stop()
audioRecord?.release()
audioRecord = null
buffer = null
} catch (e: Exception) {
stopListening()
}
}
}
fun stopListening() {
isRecording = false
}
private fun checkWakeWordDetected(buffer: ShortArray, bufferReadResult: Int) {
val blockSize = bufferReadResult / AUDIO_BLOCK_COUNT
var index = 0
while (index < bufferReadResult) {
var maxAmplitude = 0
for (i in index until index + blockSize) {
val amplitude = Math.abs(buffer[i].toInt())
if (amplitude > maxAmplitude) {
maxAmplitude = amplitude
}
}
if (maxAmplitude > DETECTION_THRESHOLD) {
listener.onWakeWordDetected()
return
}
index += blockSize
}
}
private fun toByteArray(shortArray: ShortArray, size: Int): ByteArray {
val bytes = ByteArray(size * 2)
for (i in 0 until size) {
bytes[i * 2] = (shortArray[i] and 0xff).toByte()
bytes[i * 2 + 1] = (shortArray[i].toInt() shr 8 and 0xff).toByte()
}
return bytes
}
}
package com.jbhntr86.sabia
import com.openai.api.models.apiKey
import com.openai.api.models.configuration
import com.openai.api.requests.CompletionRequest
import com.openai.api.requests.EngineRequest
import com.openai.texttovoice.TextToSpeech
import java.io.File
import java.nio.file.Paths
class OpenAIEngine {
companion object {
private const val PROCESSED_INPUT_FILE_NAME = "processedInput.txt"
private const val API_KEY_FILE_NAME = "api.txt"
private const val CONFIG_FILE_NAME = "config.txt"
private const val AVAILABLE_ENGINES_FILE_NAME = "availEngines.txt"
private const val DEFAULT_MAX_TOKENS = 60
private const val DEFAULT_N = 1
private const val DEFAULT_STOP = ""
private val errorHandler = ErrorHandler()
private val textToSpeech = TextToSpeech()
private val openAIKey = apiKey {
File("assets/$API_KEY_FILE_NAME").readText().trim()
}
private val openAIConfig = configuration {
val configFile = Paths.get("assets", CONFIG_FILE_NAME).toFile()
if (!configFile.exists() || configFile.length() == 0L) {
mapOf(
"max_tokens" to DEFAULT_MAX_TOKENS,
"n" to DEFAULT_N,
"stop" to DEFAULT_STOP
)
} else {
configFile.bufferedReader().useLines { lines ->
lines.fold(mutableMapOf<String, Any>()) { config, line ->
val (key, value) = line.trim().split("=")
config[key] = value
config
}
}
}
}
private val openAIEngines = EngineRequest(openAIKey).list()
init {
if (openAIKey.isEmpty()) {
errorHandler.handle("No API key provided.")
}
}
fun getCompletion(engineId: String, prompt: String): String? {
val engine = openAIEngines.find { it.id == engineId }
?: errorHandler.handle("Engine with ID '$engineId' not found.")
val maxTokens = openAIConfig["max_tokens"] as? Int ?: DEFAULT_MAX_TOKENS
val n = openAIConfig["n"] as? Int ?: DEFAULT_N
val stop = openAIConfig["stop"] as? String ?: DEFAULT_STOP
val completionRequest =
CompletionRequest(openAIKey, engineId, prompt, maxTokens, n, stop)
val response = completionRequest.makeRequest()
return response.choices.firstOrNull()?.text
}
fun listEngines() {
val enginesFile = File(AVAILABLE_ENGINES_FILE_NAME)
openAIEngines.forEach {
enginesFile.appendText("${it.id} - ${it.name}\n")
}
}
fun generateSpeech(text: String) {
textToSpeech.generateSpeech(text)
}
fun processInputFile() {
val inputFile = Paths.get(PROCESSED_INPUT_FILE_NAME).toFile()
val prompt = inputFile.readText()
if (prompt.isBlank()) {
errorHandler.handle("Input file is empty.")
}
val engineId = openAIEngines.firstOrNull()?.id
?: errorHandler.handle("No available engines found.")
val completionText = getCompletion(engineId, prompt)
?: errorHandler.handle("Failed to generate completion for prompt.")
generateSpeech(completionText)
}
}
}
package com.jbhntr86.sabia
public class PermissionsHandler {
private final Activity activity;
public PermissionsHandler(Activity activity)
{
this.activity = activity;
}
public void requestPermissions()
{
if (ContextCompat.checkSelfPermission(activity, Manifest.permission.RECORD_AUDIO)
!= PackageManager.PERMISSION_GRANTED
) {
ActivityCompat.requestPermissions(
activity,
new String []{ Manifest.permission.RECORD_AUDIO },
0
);
}
}
}package com.jbhntr86.sabia
import com.google.auth.oauth2.GoogleCredentials
import com.google.cloud.speech.v1.RecognitionAudio
import com.google.cloud.speech.v1.RecognitionConfig
import com.google.cloud.speech.v1.RecognitionConfig.AudioEncoding
import com.google.cloud.speech.v1.SpeechClient
import com.google.cloud.speech.v1.SpeechRecognitionAlternative
import java.io.ByteArrayInputStream
import java.io.File
import java.nio.file.Files
import java.util.*
class SpeechToText(private val apiKey: String, private val langCode: String) {
private fun getSpeechSettings(): RecognitionConfig {
return RecognitionConfig.newBuilder()
.setEncoding(AudioEncoding.LINEAR16)
.setLanguageCode(langCode)
.build()
}
fun process(audioData: ByteArray): String {
try {
val credentials = GoogleCredentials.fromStream(getCredentialsStream())
val speechClient = SpeechClient.create()
val audioBytes = ByteArrayInputStream(audioData)
val audio = RecognitionAudio.newBuilder()
.setContent(audioBytes.readBytes())
.build()
val config = getSpeechSettings()
val response = speechClient.recognize(config, audio)
val results = response.resultsList
val recognizedText = ArrayList<String>()
for (result in results) {
val alternatives: List<SpeechRecognitionAlternative> = result.alternativesList
for (alternative in alternatives) {
val transcript = alternative.transcript
recognizedText.add(transcript)
}
}
speechClient.close()
return recognizedText.joinToString(" ")
} catch (e: Exception) {
e.printStackTrace()
}
return ""
}
private fun getCredentialsStream(): ByteArrayInputStream {
try {
val credentialFile = File("google-services.json")
if (credentialFile.exists()) {
return ByteArrayInputStream(Files.readAllBytes(credentialFile.toPath()))
}
val confFile = File("google.conf")
if (confFile.exists()) {
return ByteArrayInputStream(Files.readAllBytes(confFile.toPath()))
}
} catch (e: Exception) {
e.printStackTrace()
}
throw RuntimeException("Could not find Google credentials")
}
}
package com.jbhntr86.sabia
import android.content.Context
import android.os.Bundle
import android.speech.tts.TextToSpeech
import com.google.auth.oauth2.GoogleCredentials
import com.google.cloud.texttospeech.v1.TextToSpeechClient
import com.google.cloud.texttospeech.v1.TextToSpeechSettings
import java.io.FileInputStream
import java.io.IOException
import java.nio.file.Paths
import java.util.*
class TextToSpeechSabia(private val context: Context) : TextToSpeech.OnInitListener {
private var textToSpeech: TextToSpeech? = null
private var isInitialized = false
private val errorHandler = ErrorHandler(context)
override fun onInit(status: Int) {
if (status == TextToSpeech.SUCCESS) {
val locale = Locale.US
val result = textToSpeech?.setLanguage(locale)
if (result == TextToSpeech.LANG_MISSING_DATA || result == TextToSpeech.LANG_NOT_SUPPORTED) {
errorHandler.handleError(ErrorHandler.ErrorType.LANGUAGE_NOT_SUPPORTED)
} else {
isInitialized = true
}
} else {
errorHandler.handleError(ErrorHandler.ErrorType.TEXT_TO_SPEECH_NOT_SUPPORTED)
}
}
fun speak(text: String) {
if (!isInitialized) {
return
}
val utteranceId = UUID.randomUUID().toString()
val utteranceListener = UtteranceListener()
textToSpeech?.setOnUtteranceProgressListener(utteranceListener)
val params = Bundle()
params.putString(TextToSpeech.Engine.KEY_PARAM_UTTERANCE_ID, utteranceId)
textToSpeech?.speak(text, TextToSpeech.QUEUE_FLUSH, params, utteranceId)
}
fun stop() {
if (textToSpeech != null) {
textToSpeech?.stop()
}
}
fun shutdown() {
if (textToSpeech != null) {
textToSpeech?.shutdown()
}
}
fun initialize() {
val credentials = getCredentials()
if (credentials == null) {
errorHandler.handleError(ErrorHandler.ErrorType.CREDENTIALS_NOT_FOUND)
return
}
try {
val settingsBuilder = TextToSpeechSettings.newBuilder()
.setCredentialsProvider { credentials }
.build()
val textToSpeechClient = TextToSpeechClient.create(settingsBuilder)
textToSpeech = TextToSpeech(context, this)
} catch (e: IOException) {
errorHandler.handleError(ErrorHandler.ErrorType.TEXT_TO_SPEECH_NOT_SUPPORTED)
}
}
private fun getCredentials(): GoogleCredentials? {
val jsonFile = "google-services.json"
val confFile = "google.conf"
var credentials: GoogleCredentials? = null
try {
// Try reading from google-services.json
val inputStream = FileInputStream(jsonFile)
credentials = GoogleCredentials.fromStream(inputStream)
} catch (e: IOException) {
try {
// If google-services.json is not found, try reading from google.conf
val path = Paths.get(confFile).toAbsolutePath()
val inputStream = FileInputStream(path.toString())
credentials = GoogleCredentials.fromStream(inputStream)
} catch (e: Exception) {
errorHandler.handleError(ErrorHandler.ErrorType.CREDENTIALS_NOT_FOUND)
}
}
return credentials
}
}
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment