I am not a developer so do not understand much about reading logs, so this may not be a bug
final navigator = Navigator.of(context);
debugPrint('_processPickedImage called');
try {
final image =
await decodeImageFromList(File(imageFile!.path).readAsBytesSync());
// final jsonImage = JsonImage(imageUri: imageFile!.path); // this does not work
final byteData = await image.toByteData(format: ui.ImageByteFormat.png);
if (byteData == null) {
throw Exception('Failed to convert image to ByteData');
}
final uint8List = byteData.buffer.asUint8List();
final base64Image = base64Encode(uint8List);
final jsonImage = JsonImage(imageUri: base64Image);
final requests = AnnotationRequests(
requests: [
AnnotationRequest(
jsonImage: jsonImage,
features: [Feature(type: AnnotationType.textDetection)],
),
],
);
final annotatedResponses =
await googleVision.annotate(requests: requests);
print('$annotatedResponses');
String text =
annotatedResponses.responses.first.fullTextAnnotation?.text ?? '';
setState(() {
recognizedText = text;
});
await navigator.push(
MaterialPageRoute(
builder: (BuildContext context) => ResultScreen(text: recognizedText),
),
);
} catch (error) {
print('Error during text recognition: $error');
}