So, I have been trying to implement a few tflite models in flutter, which I had earlier converted from .h5 files. I purposefully used print functions while debugging to understand where the issue lies.
The app hangs after a message like this appears:

The print functions I used before this appears indicate my model was loaded successfully

The app runs into a run time error stating bad preconditions, often pointing out to input size parameters (I may be wrong here).
Here are the functions that I am using to implement my Tflite model. Basically I need to run a few models(one at a time), after running the first model and I placed a few "if" conditions for the same.
import 'package:flutter/material.dart';
import 'package:file_picker/file_picker.dart';
import 'package:permission_handler/permission_handler.dart';
import 'package:tflite_flutter/tflite_flutter.dart';
import 'package:image/image.dart' as img;
import 'dart:math';
import 'dart:io';
class ImageScreen extends StatefulWidget {
const ImageScreen({Key? key}) : super(key: key);
@override
_ImageScreenState createState() => _ImageScreenState();
}
class _ImageScreenState extends State<ImageScreen> {
var imgFile;
String Image_Path= "";
Future<String> pickImage() async {
// Function body
// Add a return statement at the end
PermissionStatus status = await Permission.storage.request();
FilePickerResult? result = await FilePicker.platform.pickFiles(
type: FileType.custom,
allowedExtensions: ['jpg', 'jpeg', 'png'],
);
if (result != null) {
imgFile = result.files.single;
print("Image file path: " + imgFile.path);
Image_Path = imgFile.path;
print("picked image");
return Image_Path;
} else {
// User canceled the picker
print("Error");
return "Error";
}
}
//LOAD MODEL WORKS
Interpreter? interpreter;
Future<void> loadModel(String modelPath) async {
try {
interpreter = await Interpreter.fromAsset(modelPath);
print('Loaded model successfully');
// Get input and output shapes
if (interpreter != null) {
var inputShape = interpreter!.getInputTensor(0).shape;
var outputShape = interpreter!.getOutputTensor(0).shape;
print('Input shape: $inputShape');
print('Output shape: $outputShape');
//I/flutter (10135): Input shape: [1, 256, 256, 3]
//I/flutter (10135): Output shape: [1, 4]
}
} catch (e) {
print('Failed to load model: $e');
}
}
Future<List> runModel(String imagePath) async {
// Load the image
var image = img.decodeImage(File(imagePath).readAsBytesSync());
var resized = img.copyResize(image!, width: 256, height: 256);
// Convert image to a list of floats
var input = resized.getBytes().buffer.asFloat32List().reshape([49152]);
// Create output tensor
var output = List<double>.filled(4,0).reshape([1,4]);//try filled(4.0), reshape([1,4])
// Check if interpreter is null
if (interpreter == null) {
print('Interpreter is null');
//return;
}
// Print input and output shapes
print('Input shape: ${input.shape}');
print('Output shape: ${output.shape}');
// Print input data
print('Input data: $input');
// Run the model
interpreter?.run(input, output);
print("ok running model");
return output;
}
Future<String> makePredictions(String imagePath) async {
// Load and run the 4-class model
await loadModel('assets/models/four_class_STFT_80valacc.tflite');
var prediction_4class = await runModel(imagePath);
String result = '';
// Convert List<dynamic> to List<double>
List<double> prediction_4class_double = prediction_4class.cast<double>();
// Find the index of the maximum value in prediction_4class
int maxIndex = prediction_4class.indexWhere((d) => d == prediction_4class_double.reduce(max));
// Based on the result, load and run the appropriate binary model
if (maxIndex == 0) {
print("ok 1");
result = 'Normal';
} else if (maxIndex == 1) {
// Asthma
await loadModel('assets/models/NvsA.tflite');
print("ok 1");
var prediction_binary = await runModel(imagePath);
result = 'Asthma with confidence ${prediction_binary[0]}';
} else if (maxIndex == 2) {
// Pneumonia
print("ok 1");
await loadModel('assets/models/NvsP.tflite');
var prediction_binary = await runModel(imagePath);
result = 'Pneumonia with confidence ${prediction_binary[0]}';
} else if (maxIndex == 3) {
// COPD
print("ok 1");
await loadModel('assets/models/NvsC_best.tflite');
var prediction_binary = await runModel(imagePath);
result = 'COPD with confidence ${prediction_binary[0]}';
}
return result;
}
//Remove this if causing errors
/*
@override
void initState() {
super.initState();
loadModel(Image_Path).then((value) {setState((){});});
}*/
@override
Widget build(BuildContext context) {
return Scaffold(
backgroundColor: Colors.white,
appBar:AppBar(
elevation: 0,
title:const Row(
mainAxisAlignment: MainAxisAlignment.center,
children: <Widget>[
Text(
"Ausculto",
style:
TextStyle(color: Color.fromARGB(221, 7, 173, 224), fontWeight: FontWeight.w600),
),
Text(
"Wave",
style: TextStyle(color: Color.fromARGB(255, 248, 213, 16), fontWeight: FontWeight.w600),
),
Text(" "),//balances out, maintains center, use better methods later
],
),centerTitle: true,
backgroundColor: const Color.fromARGB(255, 255, 255, 255),
),
body:Center(
child:Container(
width: 200,
height: 250,
child:ListView(
children: [
const Text(
" Results:",
style: TextStyle(fontSize: 24, fontWeight: FontWeight.bold,color: Colors.black),
),
const SizedBox(height: 20,),
//FOR TESTING ONLY:
Container(
width:200,
child:ElevatedButton(
onPressed: () async{
//ability to add an image
String? imagePath = await pickImage();
if (imagePath != null) {
// Run the model on the image
String result = await makePredictions(imagePath);
// Display the result in a dialog
showDialog(
context: context,
builder: (BuildContext context) {
return AlertDialog(
title: Text('Prediction'),
content: Text(result),
actions: <Widget>[
TextButton(
child: Text('Close'),
onPressed: () {
Navigator.of(context).pop();
},
),
],
);
},
);
}
},
//change color of elevated button here
style: ElevatedButton.styleFrom(
backgroundColor: Colors.red,
foregroundColor: Colors.white,
shape: RoundedRectangleBorder(
borderRadius: BorderRadius.circular(18.0),
),
),
child: const Text('Add Test Image',style: TextStyle(fontSize: 13),),
),
),
const SizedBox(height: 20,),
],
),
),
),
);
}
}
I want to get the output from the tflite model, the models tell me whether a person is normal or not, and then further Ive placeed if conditions such that, if the model predicts that the person is not normal nad say has a disease, other models are implemented to check the same . I have spent over 10 hours trying out different ways to implement the dart file, from different projects, all of which have failed and led to the same result or the app ending up not building itself. Several solutions on the web include packages that are either deprecated or incompatible with dart 3.0
So I figured this out after many hours of debugging. It turned out the number of elements present in the labels.txt changes the output shape requirement.
Here's the code that worked:
return FutureBuilder<List<String>>(
future: () async {
try {
print("Closed previous model");
await Tflite.loadModel(
model: 'assets/models/NvsA.tflite',
labels: 'assets/models/labelsA.txt',
);
print("ModelA loaded successfully");
var output = await Tflite.runModelOnImage(
path: path_to_your_image,
numResults: 2,
threshold:0.2,
imageMean: 127.5,
imageStd: 127.5,
);
print("Model A run successfully");
print(output);
String result;
if(output?[0]['confidence'] > 0.5){
print("Normal");
result = "Normal";
}
else{
print("Asthma");
result = "Asthma";
}
//await Tflite.close(); // Close ModelA after inference
print("ModelA closed successfully");
return [result];
} catch (error) {
print(error); // Log errors for debugging
return ["Error"];
}
}(),
builder: (context, snapshot) {
if (snapshot.connectionState == ConnectionState.waiting) {
return const CircularProgressIndicator();
} else if (snapshot.hasError) {
return Text('Error: ${snapshot.error}');
} else {
return Text('Result: ${snapshot.data![0]}');
}
}
);
If you love us? You can donate to us via Paypal or buy me a coffee so we can maintain and grow! Thank you!
Donate Us With