Type Error in interpreter.run(inputBuffer, floatOutputBuffer)

class MyTFLiteModel(context: Context) {
private val interpreter: Interpreter

init {
    val tfliteModel = loadModelFile(context, "compressed_model.tflite")
    interpreter = Interpreter(tfliteModel)

private fun loadModelFile(context: Context, filename: String): ByteBuffer {
    val inputStream = FileInputStream(context.assets.open(filename))
    val fileChannel = inputStream.channel
    val startOffset = 0
    val declaredLength = fileChannel.size()
    return fileChannel.map(

fun predict(inputData: List<ArrayList<Float>>): Int {
    val inputBuffer = prepareInput(inputData)

    val outputSize = interpreter.getOutputTensor(0).shape()[1]
    val outputBuffer = ByteBuffer.allocateDirect(outputSize * java.lang.Float.SIZE / java.lang.Byte.SIZE)
    val floatOutputBuffer = outputBuffer.asFloatBuffer()

    interpreter.run(inputBuffer, floatOutputBuffer)

    val outputArray = FloatArray(outputSize)

    var maxIndex = 0
    for (i in 1 until outputArray.size) {
        if (outputArray[i] > outputArray[maxIndex]) {
            maxIndex = i
    return maxIndex

private fun prepareInput(inputData: List<ArrayList<Float>>): ByteBuffer {
    val inputSize = interpreter.getInputTensor(0).shape()[1]
    val inputBuffer = ByteBuffer.allocateDirect(inputSize * inputData.size * java.lang.Float.SIZE / java.lang.Byte.SIZE)
    for (inputList in inputData) {
        for (input in inputList) {
    return inputBuffer


Doubt :-
In this line interpreter.run(inputBuffer, floatOutputBuffer)
I am getting a type error for inputBuffer and floatOutpurBuffer:-
TypeVariable(T).() → TypeVariable(R)

Can someone help me in this

The type error you’re encountering in interpreter.run(inputBuffer, floatOutputBuffer) suggests that there’s a mismatch in the expected and provided parameter types. Ensure you’re using the correct TensorFlow Lite Interpreter.run() method that accepts ByteBuffer for both input and output. This issue might be due to using an incorrect method overload, a library version mismatch, or interference from Kotlin extension functions. Double-check the TensorFlow Lite documentation for the correct usage, ensure you’re using a compatible library version, and verify there are no conflicting Kotlin extensions or higher-order functions in your code.