Skip to content

Instantly share code, notes, and snippets.

@monday8am
Created December 9, 2025 11:29
Show Gist options
  • Select an option

  • Save monday8am/411b4e6e9f41e5475225682afcae8e73 to your computer and use it in GitHub Desktop.

Select an option

Save monday8am/411b4e6e9f41e5475225682afcae8e73 to your computer and use it in GitHub Desktop.
Initialization code for Mediapipe local inference lib
suspend fun initialize(
modelConfig: ModelConfiguration,
modelPath: String,
): Result<Unit> =
withContext(dispatcher) {
runCatching {
val llmInference = createLlmInference(context, modelPath, modelConfig)
val backend = LlmInferenceBackend(llmInference, HammerFormatter())
val systemInstruction = createSystemInstruction()
generativeModel = GenerativeModel(backend, systemInstruction, tools)
chatSession = generativeModel!!.startChat()
[email protected] = modelConfig
}
}
private fun createLlmInference(
context: Context,
modelPath: String,
modelConfig: ModelConfiguration,
): LlmInference {
if (!File(modelPath).exists()) {
throw IllegalStateException("Model file not found at path: $modelPath")
}
val backend =
if (modelConfig.hardwareAcceleration == HardwareBackend.GPU_SUPPORTED) {
Backend.GPU
} else {
Backend.CPU
}
val llmInferenceOptions =
LlmInference.LlmInferenceOptions
.builder()
.setModelPath(modelPath)
.setMaxTokens(modelConfig.defaultMaxOutputTokens)
.setPreferredBackend(backend)
.build()
return LlmInference.createFromOptions(context, llmInferenceOptions)
}
private fun createSystemInstruction(): Content =
Content
.newBuilder()
.setRole(SYSTEM_ROLE)
.addParts(
Part
.newBuilder()
.setText(SYSTEM_PROMPT),
).build()
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment