无法从张力量张量(statefulpartitionedCall:0)复制形状[1,25200,14]到具有形状的Java对象[1,9]
我正在使用Flutter创建一个应用程序来检测对象,识别面,..等 使用TFLITE软件包将自定义模型与应用程序集成到这里的是源代码时,我
遇到
import 'dart:io';
import 'package:flutter/material.dart';
import 'package:flutter/services.dart';
import 'package:image_picker/image_picker.dart';
import 'package:tflite/tflite.dart';
class Tensorflow extends StatefulWidget {
const Tensorflow({Key? key}) : super(key: key);
@override
_TensorflowState createState() => _TensorflowState();
}
class _TensorflowState extends State<Tensorflow> {
List? _outputs;
File? _image;
bool _loading = false;
double? _imageWidth;
double? _imageHeight;
List? _recognitions;
@override
void initState() {
super.initState();
_loading = true;
loadModel().then((value) {
setState(() {
_loading = false;
});
});
}
loadModel() async {
try {
await Tflite.loadModel(
model: "assets/best-fp16.tflite",
labels: "assets/data.txt",
);
} on PlatformException {
print("Failed to load the model");
}
}
classifyImage(File image) async {
print("classifyImage running");
var output = await Tflite.runModelOnImage(
path: image.path,
);
FileImage(image)
.resolve(ImageConfiguration())
.addListener((ImageStreamListener((ImageInfo info, bool _) {
setState(() {
_imageWidth = info.image.width.toDouble();
_imageHeight = info.image.height.toDouble();
});
})));
print("output = ");
print(output);
if (output!.isEmpty) {
print("in if output = []");
var tmp = {
"confidence": 0,
"index": 100,
"label": "Can't identify",
};
output = [...output, tmp];
print("output after change in if = ");
print(output);
}
setState(() {
_loading = false;
_outputs = output;
});
print("_outputs = ");
print(_outputs);
print("classifyImage set state complete");
print(
"==============================================================================");
}
@override
void dispose() {
super.dispose();
Tflite.close();
}
pickImage() async {
print(
"==============================================================================");
var image = await ImagePicker().pickImage(source: ImageSource.gallery);
if (image == null) {
print("image is null");
return null;
}
setState(() {
_loading = true;
_image = File(image.path);
});
classifyImage(_image!);
}
List<Widget> renderBoxes(Size screen) {
if (_recognitions == null) return [];
if (_imageWidth == null || _imageHeight == null) return [];
double factorX = screen.width;
double factorY = _imageHeight! / _imageHeight! * screen.width;
Color blue = Colors.red;
return _recognitions!.map((re) {
return Positioned(
left: re["rect"]["x"] * factorX,
top: re["rect"]["y"] * factorY,
width: re["rect"]["w"] * factorX,
height: re["rect"]["h"] * factorY,
child: Container(
decoration: BoxDecoration(
border: Border.all(
color: blue,
width: 3,
)),
child: Text(
"${re["detectedClass"]} ${(re["confidenceInClass"] * 100).toStringAsFixed(0)}%",
style: TextStyle(
background: Paint()..color = blue,
color: Colors.white,
fontSize: 15,
),
),
),
);
}).toList();
}
@override
Widget build(BuildContext context) {
return Scaffold(
appBar: AppBar(
backgroundColor: Colors.black,
title: const Text(
'Fruits and Veggies Neural Network',
style: TextStyle(
color: Colors.white,
fontWeight: FontWeight.w200,
fontSize: 20,
letterSpacing: 0.8),
),
),
body: Container(
color: Colors.black.withOpacity(0.9),
padding: const EdgeInsets.symmetric(horizontal: 35, vertical: 50),
child: Container(
alignment: Alignment.center,
padding: const EdgeInsets.all(30),
decoration: BoxDecoration(
color: const Color(0xFF2A363B),
borderRadius: BorderRadius.circular(30),
),
child: Column(
mainAxisAlignment: MainAxisAlignment.center,
children: [
Container(
child: Center(
child: _loading == true
? null //show nothing if no picture selected
: Container(
child: Column(
children: [
Container(
height: 250,
width: 250,
child: ClipRRect(
borderRadius: BorderRadius.circular(30),
child: _image == null
? Container(
height: 50,
width: double.infinity,
color: Colors.amber,
)
: Image.file(_image!),
),
),
const Divider(
height: 25,
thickness: 1,
),
_outputs != null
? Text(
'The object is: ${_outputs![0]['label']}!',
style: const TextStyle(
color: Colors.white,
fontSize: 18,
fontWeight: FontWeight.w400),
)
: Container(),
const Divider(
height: 25,
thickness: 1,
),
],
),
),
),
),
Container(
child: Column(
children: [
const SizedBox(
height: 30,
),
GestureDetector(
onTap: pickImage, //no parenthesis
child: Container(
width: MediaQuery.of(context).size.width - 200,
alignment: Alignment.center,
padding: const EdgeInsets.symmetric(
horizontal: 24, vertical: 17),
decoration: BoxDecoration(
color: Colors.blueGrey[600],
borderRadius: BorderRadius.circular(15)),
child: const Text(
'Pick From Gallery',
style: TextStyle(color: Colors.white, fontSize: 16),
),
),
),
],
),
),
],
),
),
),
);
}
}
错误
I/InterpreterApi(19420): Loaded native library: tensorflowlite_jni
I/InterpreterApi(19420): Didn't load native library: tensorflowlite_jni_gms_client
I/tflite (19420): Initialized TensorFlow Lite runtime.
W/ithub_tfliteapp(19420): type=1400 audit(0.0:42592): avc: denied { read } for name="u:object_r:vendor_default_prop:s0" dev="tmpfs" ino=12923 scontext=u:r:untrusted_app:s0:c39,c257,c512,c768 tcontext=u:object_r:vendor_default_prop:s0 tclass=file permissive=0
E/libc (19420): Access denied finding property "ro.hardware.chipname"
I/tflite (19420): Created TensorFlow Lite XNNPACK delegate for CPU.
W/Gralloc3(19420): mapper 3.x is not supported
I/tflite (19420): Replacing 486 node(s) with delegate (TfLiteXNNPackDelegate) node, yielding 7 partitions.
W/Looper (19420): Slow Looper main: Long Msg: seq=351 plan=١٦:٥٧:١٦.٨٥٣ late=9ms wall=2801ms running=559ms runnable=38ms io=2084ms reclaim=2ms h=android.os.Handler c=io.flutter.embedding.engine.dart.-$$Lambda$DartMessenger$TsixYUB5E6FpKhMtCSQVHKE89gQ
I/Choreographer(19420): Skipped 169 frames! The application may be doing too much work on its main thread.
I/OpenGLRenderer(19420): Davey! duration=3058ms; Flags=0, IntendedVsync=72247862129103, Vsync=72250678795657, OldestInputEvent=9223372036854775807, NewestInputEvent=0, HandleInputStart=72250684016756, AnimationStart=72250684150818, PerformTraversalsStart=72250684258006, DrawStart=72250709874256, SyncQueued=72250832948527, SyncStart=72250833268631, IssueDrawCommandsStart=72250834512433, SwapBuffers=72250917023891, FrameCompleted=72250920814308, DequeueBufferDuration=7374000, QueueBufferDuration=2793000,
W/Looper (19420): Slow Looper main: doFrame is 2821ms late because of 1 msg, msg 1 took 2801ms (seq=351 running=559ms runnable=38ms io=2084ms reclaim=2ms late=9ms h=android.os.Handler c=io.flutter.embedding.engine.dart.-$$Lambda$DartMessenger$TsixYUB5E6FpKhMtCSQVHKE89gQ)
I/flutter (19420): ==============================================================================
I/Timeline(19420): Timeline: Activity_launch_request time:72255028
W/ActivityThread(19420): SCHED: com.example.github_tfliteapp/.MainActivity [90, r=1030ms, a=108ms, w=13052ms]
W/System (19420): A resource failed to call close.
W/Activity(19420): Slow Operation: Activity com.example.github_tfliteapp/.MainActivity onActivityResult took 236ms
I/flutter (19420): classifyImage running
E/AndroidRuntime(19420): FATAL EXCEPTION: AsyncTask #1
E/AndroidRuntime(19420): Process: com.example.github_tfliteapp, PID: 19420
E/AndroidRuntime(19420): java.lang.RuntimeException: An error occurred while executing doInBackground()
E/AndroidRuntime(19420): at android.os.AsyncTask$4.done(AsyncTask.java:399)
E/AndroidRuntime(19420): at java.util.concurrent.FutureTask.finishCompletion(FutureTask.java:383)
E/AndroidRuntime(19420): at java.util.concurrent.FutureTask.setException(FutureTask.java:252)
E/AndroidRuntime(19420): at java.util.concurrent.FutureTask.run(FutureTask.java:271)
E/AndroidRuntime(19420): at android.os.AsyncTask$SerialExecutor$1.run(AsyncTask.java:289)
E/AndroidRuntime(19420): at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1167)
E/AndroidRuntime(19420): at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:641)
E/AndroidRuntime(19420): at java.lang.Thread.run(Thread.java:919)
E/AndroidRuntime(19420): Caused by: java.lang.IllegalArgumentException: Cannot copy from a TensorFlowLite tensor (StatefulPartitionedCall:0) with shape [1, 25200, 14] to a Java object with shape [1, 9].
E/AndroidRuntime(19420): at org.tensorflow.lite.TensorImpl.throwIfDstShapeIsIncompatible(TensorImpl.java:456)
E/AndroidRuntime(19420): at org.tensorflow.lite.TensorImpl.copyTo(TensorImpl.java:215)
E/AndroidRuntime(19420): at org.tensorflow.lite.NativeInterpreterWrapper.run(NativeInterpreterWrapper.java:263)
E/AndroidRuntime(19420): at org.tensorflow.lite.InterpreterImpl.runForMultipleInputsOutputs(InterpreterImpl.java:133)
E/AndroidRuntime(19420): at org.tensorflow.lite.Interpreter.runForMultipleInputsOutputs(Interpreter.java:80)
E/AndroidRuntime(19420): at org.tensorflow.lite.InterpreterImpl.run(InterpreterImpl.java:126)
E/AndroidRuntime(19420): at org.tensorflow.lite.Interpreter.run(Interpreter.java:80)
E/AndroidRuntime(19420): at sq.flutter.tflite.TflitePlugin$RunModelOnImage.runTflite(TflitePlugin.java:504)
E/AndroidRuntime(19420): at sq.flutter.tflite.TflitePlugin$TfliteTask.doInBackground(TflitePlugin.java:471)
E/AndroidRuntime(19420): at sq.flutter.tflite.TflitePlugin$TfliteTask.doInBackground(TflitePlugin.java:445)
E/AndroidRuntime(19420): at android.os.AsyncTask$3.call(AsyncTask.java:378)
E/AndroidRuntime(19420): at java.util.concurrent.FutureTask.run(FutureTask.java:266)
E/AndroidRuntime(19420): ... 4 more
W/ActivityThread(19420): SCHED: com.example.github_tfliteapp/.MainActivity [92, r=392ms, a=30ms, w=5356ms]
I/Process (19420): Sending signal. PID: 19420 SIG: 9
Lost connection to device.
会 有人可以帮忙吗? 谢谢。
I am using flutter to create an app to detect objects, recognize faces,..etc
I get error when using tflite package to integrate my custom model with the app
here is the source code
import 'dart:io';
import 'package:flutter/material.dart';
import 'package:flutter/services.dart';
import 'package:image_picker/image_picker.dart';
import 'package:tflite/tflite.dart';
class Tensorflow extends StatefulWidget {
const Tensorflow({Key? key}) : super(key: key);
@override
_TensorflowState createState() => _TensorflowState();
}
class _TensorflowState extends State<Tensorflow> {
List? _outputs;
File? _image;
bool _loading = false;
double? _imageWidth;
double? _imageHeight;
List? _recognitions;
@override
void initState() {
super.initState();
_loading = true;
loadModel().then((value) {
setState(() {
_loading = false;
});
});
}
loadModel() async {
try {
await Tflite.loadModel(
model: "assets/best-fp16.tflite",
labels: "assets/data.txt",
);
} on PlatformException {
print("Failed to load the model");
}
}
classifyImage(File image) async {
print("classifyImage running");
var output = await Tflite.runModelOnImage(
path: image.path,
);
FileImage(image)
.resolve(ImageConfiguration())
.addListener((ImageStreamListener((ImageInfo info, bool _) {
setState(() {
_imageWidth = info.image.width.toDouble();
_imageHeight = info.image.height.toDouble();
});
})));
print("output = ");
print(output);
if (output!.isEmpty) {
print("in if output = []");
var tmp = {
"confidence": 0,
"index": 100,
"label": "Can't identify",
};
output = [...output, tmp];
print("output after change in if = ");
print(output);
}
setState(() {
_loading = false;
_outputs = output;
});
print("_outputs = ");
print(_outputs);
print("classifyImage set state complete");
print(
"==============================================================================");
}
@override
void dispose() {
super.dispose();
Tflite.close();
}
pickImage() async {
print(
"==============================================================================");
var image = await ImagePicker().pickImage(source: ImageSource.gallery);
if (image == null) {
print("image is null");
return null;
}
setState(() {
_loading = true;
_image = File(image.path);
});
classifyImage(_image!);
}
List<Widget> renderBoxes(Size screen) {
if (_recognitions == null) return [];
if (_imageWidth == null || _imageHeight == null) return [];
double factorX = screen.width;
double factorY = _imageHeight! / _imageHeight! * screen.width;
Color blue = Colors.red;
return _recognitions!.map((re) {
return Positioned(
left: re["rect"]["x"] * factorX,
top: re["rect"]["y"] * factorY,
width: re["rect"]["w"] * factorX,
height: re["rect"]["h"] * factorY,
child: Container(
decoration: BoxDecoration(
border: Border.all(
color: blue,
width: 3,
)),
child: Text(
"${re["detectedClass"]} ${(re["confidenceInClass"] * 100).toStringAsFixed(0)}%",
style: TextStyle(
background: Paint()..color = blue,
color: Colors.white,
fontSize: 15,
),
),
),
);
}).toList();
}
@override
Widget build(BuildContext context) {
return Scaffold(
appBar: AppBar(
backgroundColor: Colors.black,
title: const Text(
'Fruits and Veggies Neural Network',
style: TextStyle(
color: Colors.white,
fontWeight: FontWeight.w200,
fontSize: 20,
letterSpacing: 0.8),
),
),
body: Container(
color: Colors.black.withOpacity(0.9),
padding: const EdgeInsets.symmetric(horizontal: 35, vertical: 50),
child: Container(
alignment: Alignment.center,
padding: const EdgeInsets.all(30),
decoration: BoxDecoration(
color: const Color(0xFF2A363B),
borderRadius: BorderRadius.circular(30),
),
child: Column(
mainAxisAlignment: MainAxisAlignment.center,
children: [
Container(
child: Center(
child: _loading == true
? null //show nothing if no picture selected
: Container(
child: Column(
children: [
Container(
height: 250,
width: 250,
child: ClipRRect(
borderRadius: BorderRadius.circular(30),
child: _image == null
? Container(
height: 50,
width: double.infinity,
color: Colors.amber,
)
: Image.file(_image!),
),
),
const Divider(
height: 25,
thickness: 1,
),
_outputs != null
? Text(
'The object is: ${_outputs![0]['label']}!',
style: const TextStyle(
color: Colors.white,
fontSize: 18,
fontWeight: FontWeight.w400),
)
: Container(),
const Divider(
height: 25,
thickness: 1,
),
],
),
),
),
),
Container(
child: Column(
children: [
const SizedBox(
height: 30,
),
GestureDetector(
onTap: pickImage, //no parenthesis
child: Container(
width: MediaQuery.of(context).size.width - 200,
alignment: Alignment.center,
padding: const EdgeInsets.symmetric(
horizontal: 24, vertical: 17),
decoration: BoxDecoration(
color: Colors.blueGrey[600],
borderRadius: BorderRadius.circular(15)),
child: const Text(
'Pick From Gallery',
style: TextStyle(color: Colors.white, fontSize: 16),
),
),
),
],
),
),
],
),
),
),
);
}
}
and this is the error that I get and then the app crashes
I/InterpreterApi(19420): Loaded native library: tensorflowlite_jni
I/InterpreterApi(19420): Didn't load native library: tensorflowlite_jni_gms_client
I/tflite (19420): Initialized TensorFlow Lite runtime.
W/ithub_tfliteapp(19420): type=1400 audit(0.0:42592): avc: denied { read } for name="u:object_r:vendor_default_prop:s0" dev="tmpfs" ino=12923 scontext=u:r:untrusted_app:s0:c39,c257,c512,c768 tcontext=u:object_r:vendor_default_prop:s0 tclass=file permissive=0
E/libc (19420): Access denied finding property "ro.hardware.chipname"
I/tflite (19420): Created TensorFlow Lite XNNPACK delegate for CPU.
W/Gralloc3(19420): mapper 3.x is not supported
I/tflite (19420): Replacing 486 node(s) with delegate (TfLiteXNNPackDelegate) node, yielding 7 partitions.
W/Looper (19420): Slow Looper main: Long Msg: seq=351 plan=١٦:٥٧:١٦.٨٥٣ late=9ms wall=2801ms running=559ms runnable=38ms io=2084ms reclaim=2ms h=android.os.Handler c=io.flutter.embedding.engine.dart.-$Lambda$DartMessenger$TsixYUB5E6FpKhMtCSQVHKE89gQ
I/Choreographer(19420): Skipped 169 frames! The application may be doing too much work on its main thread.
I/OpenGLRenderer(19420): Davey! duration=3058ms; Flags=0, IntendedVsync=72247862129103, Vsync=72250678795657, OldestInputEvent=9223372036854775807, NewestInputEvent=0, HandleInputStart=72250684016756, AnimationStart=72250684150818, PerformTraversalsStart=72250684258006, DrawStart=72250709874256, SyncQueued=72250832948527, SyncStart=72250833268631, IssueDrawCommandsStart=72250834512433, SwapBuffers=72250917023891, FrameCompleted=72250920814308, DequeueBufferDuration=7374000, QueueBufferDuration=2793000,
W/Looper (19420): Slow Looper main: doFrame is 2821ms late because of 1 msg, msg 1 took 2801ms (seq=351 running=559ms runnable=38ms io=2084ms reclaim=2ms late=9ms h=android.os.Handler c=io.flutter.embedding.engine.dart.-$Lambda$DartMessenger$TsixYUB5E6FpKhMtCSQVHKE89gQ)
I/flutter (19420): ==============================================================================
I/Timeline(19420): Timeline: Activity_launch_request time:72255028
W/ActivityThread(19420): SCHED: com.example.github_tfliteapp/.MainActivity [90, r=1030ms, a=108ms, w=13052ms]
W/System (19420): A resource failed to call close.
W/Activity(19420): Slow Operation: Activity com.example.github_tfliteapp/.MainActivity onActivityResult took 236ms
I/flutter (19420): classifyImage running
E/AndroidRuntime(19420): FATAL EXCEPTION: AsyncTask #1
E/AndroidRuntime(19420): Process: com.example.github_tfliteapp, PID: 19420
E/AndroidRuntime(19420): java.lang.RuntimeException: An error occurred while executing doInBackground()
E/AndroidRuntime(19420): at android.os.AsyncTask$4.done(AsyncTask.java:399)
E/AndroidRuntime(19420): at java.util.concurrent.FutureTask.finishCompletion(FutureTask.java:383)
E/AndroidRuntime(19420): at java.util.concurrent.FutureTask.setException(FutureTask.java:252)
E/AndroidRuntime(19420): at java.util.concurrent.FutureTask.run(FutureTask.java:271)
E/AndroidRuntime(19420): at android.os.AsyncTask$SerialExecutor$1.run(AsyncTask.java:289)
E/AndroidRuntime(19420): at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1167)
E/AndroidRuntime(19420): at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:641)
E/AndroidRuntime(19420): at java.lang.Thread.run(Thread.java:919)
E/AndroidRuntime(19420): Caused by: java.lang.IllegalArgumentException: Cannot copy from a TensorFlowLite tensor (StatefulPartitionedCall:0) with shape [1, 25200, 14] to a Java object with shape [1, 9].
E/AndroidRuntime(19420): at org.tensorflow.lite.TensorImpl.throwIfDstShapeIsIncompatible(TensorImpl.java:456)
E/AndroidRuntime(19420): at org.tensorflow.lite.TensorImpl.copyTo(TensorImpl.java:215)
E/AndroidRuntime(19420): at org.tensorflow.lite.NativeInterpreterWrapper.run(NativeInterpreterWrapper.java:263)
E/AndroidRuntime(19420): at org.tensorflow.lite.InterpreterImpl.runForMultipleInputsOutputs(InterpreterImpl.java:133)
E/AndroidRuntime(19420): at org.tensorflow.lite.Interpreter.runForMultipleInputsOutputs(Interpreter.java:80)
E/AndroidRuntime(19420): at org.tensorflow.lite.InterpreterImpl.run(InterpreterImpl.java:126)
E/AndroidRuntime(19420): at org.tensorflow.lite.Interpreter.run(Interpreter.java:80)
E/AndroidRuntime(19420): at sq.flutter.tflite.TflitePlugin$RunModelOnImage.runTflite(TflitePlugin.java:504)
E/AndroidRuntime(19420): at sq.flutter.tflite.TflitePlugin$TfliteTask.doInBackground(TflitePlugin.java:471)
E/AndroidRuntime(19420): at sq.flutter.tflite.TflitePlugin$TfliteTask.doInBackground(TflitePlugin.java:445)
E/AndroidRuntime(19420): at android.os.AsyncTask$3.call(AsyncTask.java:378)
E/AndroidRuntime(19420): at java.util.concurrent.FutureTask.run(FutureTask.java:266)
E/AndroidRuntime(19420): ... 4 more
W/ActivityThread(19420): SCHED: com.example.github_tfliteapp/.MainActivity [92, r=392ms, a=30ms, w=5356ms]
I/Process (19420): Sending signal. PID: 19420 SIG: 9
Lost connection to device.
I think there is a problem with the shape but I can't handle this problem
can any one help?
thanks.
如果你对这篇内容有疑问,欢迎到本站社区发帖提问 参与讨论,获取更多帮助,或者扫码二维码加入 Web 技术交流群。

绑定邮箱获取回复消息
由于您还没有绑定你的真实邮箱,如果其他用户或者作者回复了您的评论,将不能在第一时间通知您!
发布评论