将 onnx 模型转换为 TensorFlow 失败

kin*_*eld 7 python onnx tensorflow2.0

我正在尝试使用 onnx 将detr 模型转换为张量流。torch.onnx.export我使用opset_version=12转换了模型。(它生成一个detr.onnx文件)

然后我尝试使用此示例将 onnx 文件转换为张量流模型。我添加了onnx.check_model一行以确保模型正确加载。

import math
from PIL import Image
import requests
import matplotlib.pyplot as plt
import torch
from torch import nn
from torchvision.models import resnet50
import onnx
from onnx_tf.backend import prepare
import torchvision.transforms as T

torch.set_grad_enabled(False)
model = torch.hub.load('facebookresearch/detr', 'detr_resnet50', pretrained=True)

url = 'http://images.cocodataset.org/val2017/000000039769.jpg'
im = Image.open(requests.get(url, stream=True).raw)
transform = T.Compose([
    T.Resize(800),
    T.ToTensor(),
    T.Normalize([0.485, 0.456, 0.406], [0.229, 0.224, 0.225])])
img = transform(im).unsqueeze(0)

torch.onnx.export(model, img, 'detr.onnx', opset_version = 12)
    
onnx_model = onnx.load('./detr.onnx')
    
result = onnx.checker.check_model(onnx_model)
    
tf_rep = prepare(onnx_model)
tf_rep.export_graph('./model.pb')
Run Code Online (Sandbox Code Playgroud)

此代码在到达行时引发异常 tf_rep.export_graph('./model.pb')
onnx 版本 = 1.13.0 ,火炬版本 = 1.13.0+cu117 ,onnx_tf = 1.10.0

异常消息:

KeyError                                  Traceback (most recent call last)
Cell In[19], line 26
     23 result = onnx.checker.check_model(onnx_model)
     25 tf_rep = prepare(onnx_model)
---> 26 tf_rep.export_graph('./model.pb')

File ~\AppData\Local\Programs\Python\Python39\lib\site-packages\onnx_tf\backend_rep.py:143, in TensorflowRep.export_graph(self, path)
    129 """Export backend representation to a Tensorflow proto file.
    130 
    131 This function obtains the graph proto corresponding to the ONNX
   (...)
    137 :returns: none.
    138 """
    139 self.tf_module.is_export = True
    140 tf.saved_model.save(
    141     self.tf_module,
    142     path,
--> 143     signatures=self.tf_module.__call__.get_concrete_function(
    144         **self.signatures))
    145 self.tf_module.is_export = False

File ~\AppData\Local\Programs\Python\Python39\lib\site-packages\tensorflow\python\eager\def_function.py:1239, in Function.get_concrete_function(self, *args, **kwargs)
   1237 def get_concrete_function(self, *args, **kwargs):
   1238   # Implements GenericFunction.get_concrete_function.
-> 1239   concrete = self._get_concrete_function_garbage_collected(*args, **kwargs)
   1240   concrete._garbage_collector.release()  # pylint: disable=protected-access
   1241   return concrete

File ~\AppData\Local\Programs\Python\Python39\lib\site-packages\tensorflow\python\eager\def_function.py:1219, in Function._get_concrete_function_garbage_collected(self, *args, **kwargs)
   1217   if self._stateful_fn is None:
   1218     initializers = []
-> 1219     self._initialize(args, kwargs, add_initializers_to=initializers)
   1220     self._initialize_uninitialized_variables(initializers)
   1222 if self._created_variables:
   1223   # In this case we have created variables on the first call, so we run the
   1224   # defunned version which is guaranteed to never create variables.

File ~\AppData\Local\Programs\Python\Python39\lib\site-packages\tensorflow\python\eager\def_function.py:785, in Function._initialize(self, args, kwds, add_initializers_to)
    782 self._lifted_initializer_graph = lifted_initializer_graph
    783 self._graph_deleter = FunctionDeleter(self._lifted_initializer_graph)
    784 self._concrete_stateful_fn = (
--> 785     self._stateful_fn._get_concrete_function_internal_garbage_collected(  # pylint: disable=protected-access
    786         *args, **kwds))
    788 def invalid_creator_scope(*unused_args, **unused_kwds):
    789   """Disables variable creation."""

File ~\AppData\Local\Programs\Python\Python39\lib\site-packages\tensorflow\python\eager\function.py:2523, in Function._get_concrete_function_internal_garbage_collected(self, *args, **kwargs)
   2521   args, kwargs = None, None
   2522 with self._lock:
-> 2523   graph_function, _ = self._maybe_define_function(args, kwargs)
   2524 return graph_function

File ~\AppData\Local\Programs\Python\Python39\lib\site-packages\tensorflow\python\eager\function.py:2760, in Function._maybe_define_function(self, args, kwargs)
   2758   # Only get placeholders for arguments, not captures
   2759   args, kwargs = placeholder_dict["args"]
-> 2760 graph_function = self._create_graph_function(args, kwargs)
   2762 graph_capture_container = graph_function.graph._capture_func_lib  # pylint: disable=protected-access
   2763 # Maintain the list of all captures

File ~\AppData\Local\Programs\Python\Python39\lib\site-packages\tensorflow\python\eager\function.py:2670, in Function._create_graph_function(self, args, kwargs)
   2665 missing_arg_names = [
   2666     "%s_%d" % (arg, i) for i, arg in enumerate(missing_arg_names)
   2667 ]
   2668 arg_names = base_arg_names + missing_arg_names
   2669 graph_function = ConcreteFunction(
-> 2670     func_graph_module.func_graph_from_py_func(
   2671         self._name,
   2672         self._python_function,
   2673         args,
   2674         kwargs,
   2675         self.input_signature,
   2676         autograph=self._autograph,
   2677         autograph_options=self._autograph_options,
   2678         arg_names=arg_names,
   2679         capture_by_value=self._capture_by_value),
   2680     self._function_attributes,
   2681     spec=self.function_spec,
   2682     # Tell the ConcreteFunction to clean up its graph once it goes out of
   2683     # scope. This is not the default behavior since it gets used in some
   2684     # places (like Keras) where the FuncGraph lives longer than the
   2685     # ConcreteFunction.
   2686     shared_func_graph=False)
   2687 return graph_function

File ~\AppData\Local\Programs\Python\Python39\lib\site-packages\tensorflow\python\framework\func_graph.py:1247, in func_graph_from_py_func(name, python_func, args, kwargs, signature, func_graph, autograph, autograph_options, add_control_dependencies, arg_names, op_return_value, collections, capture_by_value, acd_record_initial_resource_uses)
   1244 else:
   1245   _, original_func = tf_decorator.unwrap(python_func)
-> 1247 func_outputs = python_func(*func_args, **func_kwargs)
   1249 # invariant: `func_outputs` contains only Tensors, CompositeTensors,
   1250 # TensorArrays and `None`s.
   1251 func_outputs = nest.map_structure(
   1252     convert, func_outputs, expand_composites=True)

File ~\AppData\Local\Programs\Python\Python39\lib\site-packages\tensorflow\python\eager\def_function.py:677, in Function._defun_with_scope.<locals>.wrapped_fn(*args, **kwds)
    673 with default_graph._variable_creator_scope(scope, priority=50):  # pylint: disable=protected-access
    674   # __wrapped__ allows AutoGraph to swap in a converted function. We give
    675   # the function a weak reference to itself to avoid a reference cycle.
    676   with OptionalXlaContext(compile_with_xla):
--> 677     out = weak_wrapped_fn().__wrapped__(*args, **kwds)
    678   return out

File ~\AppData\Local\Programs\Python\Python39\lib\site-packages\tensorflow\python\eager\function.py:3317, in class_method_to_instance_method.<locals>.bound_method_wrapper(*args, **kwargs)
   3312   return wrapped_fn(weak_instance(), *args, **kwargs)
   3314 # If __wrapped__ was replaced, then it is always an unbound function.
   3315 # However, the replacer is still responsible for attaching self properly.
   3316 # TODO(mdan): Is it possible to do it here instead?
-> 3317 return wrapped_fn(*args, **kwargs)

File ~\AppData\Local\Programs\Python\Python39\lib\site-packages\tensorflow\python\framework\func_graph.py:1233, in func_graph_from_py_func.<locals>.autograph_handler(*args, **kwargs)
   1231 except Exception as e:  # pylint:disable=broad-except
   1232   if hasattr(e, "ag_error_metadata"):
-> 1233     raise e.ag_error_metadata.to_exception(e)
   1234   else:
   1235     raise

File ~\AppData\Local\Programs\Python\Python39\lib\site-packages\tensorflow\python\framework\func_graph.py:1222, in func_graph_from_py_func.<locals>.autograph_handler(*args, **kwargs)
   1220 # TODO(mdan): Push this block higher in tf.function's call stack.
   1221 try:
-> 1222   return autograph.converted_call(
   1223       original_func,
   1224       args,
   1225       kwargs,
   1226       options=autograph.ConversionOptions(
   1227           recursive=True,
   1228           optional_features=autograph_options,
   1229           user_requested=True,
   1230       ))
   1231 except Exception as e:  # pylint:disable=broad-except
   1232   if hasattr(e, "ag_error_metadata"):

File ~\AppData\Local\Programs\Python\Python39\lib\site-packages\tensorflow\python\autograph\impl\api.py:439, in converted_call(f, args, kwargs, caller_fn_scope, options)
    437 try:
    438   if kwargs is not None:
--> 439     result = converted_f(*effective_args, **kwargs)
    440   else:
    441     result = converted_f(*effective_args)

File ~\AppData\Local\Temp\__autograph_generated_fileq0h7j9t_.py:30, in outer_factory.<locals>.inner_factory.<locals>.tf____call__(self, **kwargs)
     28 node = ag__.Undefined('node')
     29 onnx_node = ag__.Undefined('onnx_node')
---> 30 ag__.for_stmt(ag__.ld(self).graph_def.node, None, loop_body, get_state, set_state, (), {'iterate_names': 'node'})
     31 outputs = ag__.converted_call(ag__.ld(dict), (), None, fscope)
     33 def get_state_4():

File ~\AppData\Local\Programs\Python\Python39\lib\site-packages\tensorflow\python\autograph\operators\control_flow.py:463, in for_stmt(iter_, extra_test, body, get_state, set_state, symbol_names, opts)
    459   _tf_distributed_iterable_for_stmt(
    460       iter_, extra_test, body, get_state, set_state, symbol_names, opts)
    462 else:
--> 463   _py_for_stmt(iter_, extra_test, body, None, None)

File ~\AppData\Local\Programs\Python\Python39\lib\site-packages\tensorflow\python\autograph\operators\control_flow.py:512, in _py_for_stmt(***failed resolving arguments***)
    510 else:
    511   for target in iter_:
--> 512     body(target)

File ~\AppData\Local\Programs\Python\Python39\lib\site-packages\tensorflow\python\autograph\operators\control_flow.py:478, in _py_for_stmt.<locals>.protected_body(protected_iter)
    477 def protected_body(protected_iter):
--> 478   original_body(protected_iter)
    479   after_iteration()
    480   before_iteration()

File ~\AppData\Local\Temp\__autograph_generated_fileq0h7j9t_.py:23, in outer_factory.<locals>.inner_factory.<locals>.tf____call__.<locals>.loop_body(itr)
     21 node = itr
     22 onnx_node = ag__.converted_call(ag__.ld(OnnxNode), (ag__.ld(node),), None, fscope)
---> 23 output_ops = ag__.converted_call(ag__.ld(self).backend._onnx_node_to_tensorflow_op, (ag__.ld(onnx_node), ag__.ld(tensor_dict), ag__.ld(self).handlers), dict(opset=ag__.ld(self).opset, strict=ag__.ld(self).strict), fscope)
     24 curr_node_output_map = ag__.converted_call(ag__.ld(dict), (ag__.converted_call(ag__.ld(zip), (ag__.ld(onnx_node).outputs, ag__.ld(output_ops)), None, fscope),), None, fscope)
     25 ag__.converted_call(ag__.ld(tensor_dict).update, (ag__.ld(curr_node_output_map),), None, fscope)

File ~\AppData\Local\Programs\Python\Python39\lib\site-packages\tensorflow\python\autograph\impl\api.py:439, in converted_call(f, args, kwargs, caller_fn_scope, options)
    437 try:
    438   if kwargs is not None:
--> 439     result = converted_f(*effective_args, **kwargs)
    440   else:
    441     result = converted_f(*effective_args)

File ~\AppData\Local\Temp\__autograph_generated_filetsq4l59p.py:62, in outer_factory.<locals>.inner_factory.<locals>.tf___onnx_node_to_tensorflow_op(cls, node, tensor_dict, handlers, opset, strict)
     60     pass
     61 handler = ag__.Undefined('handler')
---> 62 ag__.if_stmt(ag__.ld(handlers), if_body_1, else_body_1, get_state_1, set_state_1, ('do_return', 'retval_'), 2)
     64 def get_state_2():
     65     return ()

File ~\AppData\Local\Programs\Python\Python39\lib\site-packages\tensorflow\python\autograph\operators\control_flow.py:1363, in if_stmt(cond, body, orelse, get_state, set_state, symbol_names, nouts)
   1361   _tf_if_stmt(cond, body, orelse, get_state, set_state, symbol_names, nouts)
   1362 else:
-> 1363   _py_if_stmt(cond, body, orelse)

File ~\AppData\Local\Programs\Python\Python39\lib\site-packages\tensorflow\python\autograph\operators\control_flow.py:1416, in _py_if_stmt(cond, body, orelse)
   1414 def _py_if_stmt(cond, body, orelse):
   1415   """Overload of if_stmt that executes a Python if statement."""
-> 1416   return body() if cond else orelse()

File ~\AppData\Local\Temp\__autograph_generated_filetsq4l59p.py:56, in outer_factory.<locals>.inner_factory.<locals>.tf___onnx_node_to_tensorflow_op.<locals>.if_body_1()
     54     nonlocal retval_, do_return
     55     pass
---> 56 ag__.if_stmt(ag__.ld(handler), if_body, else_body, get_state, set_state, ('do_return', 'retval_'), 2)

File ~\AppData\Local\Programs\Python\Python39\lib\site-packages\tensorflow\python\autograph\operators\control_flow.py:1363, in if_stmt(cond, body, orelse, get_state, set_state, symbol_names, nouts)
   1361   _tf_if_stmt(cond, body, orelse, get_state, set_state, symbol_names, nouts)
   1362 else:
-> 1363   _py_if_stmt(cond, body, orelse)

File ~\AppData\Local\Programs\Python\Python39\lib\site-packages\tensorflow\python\autograph\operators\control_flow.py:1416, in _py_if_stmt(cond, body, orelse)
   1414 def _py_if_stmt(cond, body, orelse):
   1415   """Overload of if_stmt that executes a Python if statement."""
-> 1416   return body() if cond else orelse()

File ~\AppData\Local\Temp\__autograph_generated_filetsq4l59p.py:48, in outer_factory.<locals>.inner_factory.<locals>.tf___onnx_node_to_tensorflow_op.<locals>.if_body_1.<locals>.if_body()
     46 try:
     47     do_return = True
---> 48     retval_ = ag__.converted_call(ag__.ld(handler).handle, (ag__.ld(node),), dict(tensor_dict=ag__.ld(tensor_dict), strict=ag__.ld(strict)), fscope)
     49 except:
     50     do_return = False

File ~\AppData\Local\Programs\Python\Python39\lib\site-packages\tensorflow\python\autograph\impl\api.py:439, in converted_call(f, args, kwargs, caller_fn_scope, options)
    437 try:
    438   if kwargs is not None:
--> 439     result = converted_f(*effective_args, **kwargs)
    440   else:
    441     result = converted_f(*effective_args)

File ~\AppData\Local\Temp\__autograph_generated_filec7_esoft.py:41, in outer_factory.<locals>.inner_factory.<locals>.tf__handle(cls, node, **kwargs)
     39     nonlocal retval_, do_return
     40     raise ag__.converted_call(ag__.ld(BackendIsNotSupposedToImplementIt), (ag__.converted_call('{} version {} is not implemented.'.format, (ag__.ld(node).op_type, ag__.ld(cls).SINCE_VERSION), None, fscope),), None, fscope)
---> 41 ag__.if_stmt(ag__.ld(ver_handle), if_body, else_body, get_state, set_state, ('do_return', 'retval_'), 2)
     42 return fscope.ret(retval_, do_return)

File ~\AppData\Local\Programs\Python\Python39\lib\site-packages\tensorflow\python\autograph\operators\control_flow.py:1363, in if_stmt(cond, body, orelse, get_state, set_state, symbol_names, nouts)
   1361   _tf_if_stmt(cond, body, orelse, get_state, set_state, symbol_names, nouts)
   1362 else:
-> 1363   _py_if_stmt(cond, body, orelse)

File ~\AppData\Local\Programs\Python\Python39\lib\site-packages\tensorflow\python\autograph\operators\control_flow.py:1416, in _py_if_stmt(cond, body, orelse)
   1414 def _py_if_stmt(cond, body, orelse):
   1415   """Overload of if_stmt that executes a Python if statement."""
-> 1416   return body() if cond else orelse()

File ~\AppData\Local\Temp\__autograph_generated_filec7_esoft.py:33, in outer_factory.<locals>.inner_factory.<locals>.tf__handle.<locals>.if_body()
     31 try:
     32     do_return = True
---> 33     retval_ = ag__.converted_call(ag__.ld(ver_handle), (ag__.ld(node),), dict(**ag__.ld(kwargs)), fscope)
     34 except:
     35     do_return = False

File ~\AppData\Local\Programs\Python\Python39\lib\site-packages\tensorflow\python\autograph\impl\api.py:439, in converted_call(f, args, kwargs, caller_fn_scope, options)
    437 try:
    438   if kwargs is not None:
--> 439     result = converted_f(*effective_args, **kwargs)
    440   else:
    441     result = converted_f(*effective_args)

File ~\AppData\Local\Temp\__autograph_generated_filevddqx9qt.py:12, in outer_factory.<locals>.inner_factory.<locals>.tf__version(cls, node, **kwargs)
     10 try:
     11     do_return = True
---> 12     retval_ = ag__.converted_call(ag__.ld(cls)._common, (ag__.ld(node),), dict(**ag__.ld(kwargs)), fscope)
     13 except:
     14     do_return = False

File ~\AppData\Local\Programs\Python\Python39\lib\site-packages\tensorflow\python\autograph\impl\api.py:439, in converted_call(f, args, kwargs, caller_fn_scope, options)
    437 try:
    438   if kwargs is not None:
--> 439     result = converted_f(*effective_args, **kwargs)
    440   else:
    441     result = converted_f(*effective_args)

File ~\AppData\Local\Temp\__autograph_generated_filedezd6jrz.py:122, in outer_factory.<locals>.inner_factory.<locals>.tf___common(cls, node, **kwargs)
    120 paddings = ag__.Undefined('paddings')
    121 constant_values = ag__.Undefined('constant_values')
--> 122 ag__.if_stmt(ag__.ld(cls).SINCE_VERSION < 11, if_body_1, else_body_1, get_state_1, set_state_1, ('constant_values', 'paddings'), 2)
    123 cond = ag__.converted_call(ag__.ld(tf).cond, (ag__.converted_call(ag__.ld(check_positive), (ag__.ld(paddings),), None, fscope), ag__.autograph_artifact(lambda : ag__.converted_call(ag__.ld(process_pos_pads), (ag__.ld(x), ag__.ld(paddings), ag__.ld(constant_values)), None, fscope)), ag__.autograph_artifact(lambda : ag__.converted_call(ag__.ld(process_neg_pads), (ag__.ld(x), ag__.ld(paddings), ag__.ld(constant_values)), None, fscope))), None, fscope)
    124 try:

File ~\AppData\Local\Programs\Python\Python39\lib\site-packages\tensorflow\python\autograph\operators\control_flow.py:1363, in if_stmt(cond, body, orelse, get_state, set_state, symbol_names, nouts)
   1361   _tf_if_stmt(cond, body, orelse, get_state, set_state, symbol_names, nouts)
   1362 else:
-> 1363   _py_if_stmt(cond, body, orelse)

File ~\AppData\Local\Programs\Python\Python39\lib\site-packages\tensorflow\python\autograph\operators\control_flow.py:1416, in _py_if_stmt(cond, body, orelse)
   1414 def _py_if_stmt(cond, body, orelse):
   1415   """Overload of if_stmt that executes a Python if statement."""
-> 1416   return body() if cond else orelse()

File ~\AppData\Local\Temp\__autograph_generated_filedezd6jrz.py:119, in outer_factory.<locals>.inner_factory.<locals>.tf___common.<locals>.else_body_1()
    117 nonlocal paddings, constant_values
    118 paddings = ag__.ld(tensor_dict)[ag__.ld(node).inputs[1]]
--> 119 constant_values = ag__.if_exp(ag__.converted_call(ag__.ld(len), (ag__.ld(node).inputs,), None, fscope) == 3, lambda : ag__.ld(tensor_dict)[ag__.ld(node).inputs[2]], lambda : 0, 'ag__.converted_call(len, (node.inputs,), None, fscope) == 3')

File ~\AppData\Local\Programs\Python\Python39\lib\site-packages\tensorflow\python\autograph\operators\conditional_expressions.py:27, in if_exp(cond, if_true, if_false, expr_repr)
     25   return _tf_if_exp(cond, if_true, if_false, expr_repr)
     26 else:
---> 27   return _py_if_exp(cond, if_true, if_false)

File ~\AppData\Local\Programs\Python\Python39\lib\site-packages\tensorflow\python\autograph\o

anu*_*rag 1

您面临的问题是由于在模型源使用动态填充而不是静态填充形状造成的。当您在导出期间降低 onnx opset 版本时,会暴露此问题。

import warnings
warnings.filterwarnings("ignore")

#import onnxruntime
import math
from PIL import Image
import requests
import matplotlib.pyplot as plt
import torch
from torch import nn
from torchvision.models import resnet50
import torchvision.transforms as T
import onnx
from onnx_tf.backend import prepare
#from onnxsim import simplify

torch.set_grad_enabled(False)
model = torch.hub.load('facebookresearch/detr', 'detr_resnet50', pretrained=True)

url = 'http://images.cocodataset.org/val2017/000000039769.jpg'
im = Image.open(requests.get(url, stream=True).raw)
transform = T.Compose([
    T.Resize(800),
    T.ToTensor(),
    T.Normalize([0.485, 0.456, 0.406], [0.229, 0.224, 0.225])])
img = transform(im).unsqueeze(0)

model.eval()
torch.onnx.export(model, img, 'detr.onnx', opset_version = 10)
    
onnx_model = onnx.load('./detr.onnx')

#onnx_model, _ = simplify(model)
    
result = onnx.checker.check_model(onnx_model)
    
tf_rep = prepare(onnx_model)
tf_rep.export_graph('./model.pb')
Run Code Online (Sandbox Code Playgroud)

它将抛出以下输出:

SymbolicValueError                        Traceback (most recent call last)
c:\Anaconda3\envs\workenv\lib\site-packages\torch\onnx\symbolic_opset9.py in _convert_padding_node(input)
   1821         try:
-> 1822             padding = [
   1823                 symbolic_helper._get_const(v, "i", "padding") for v in input_list

c:\Anaconda3\envs\workenv\lib\site-packages\torch\onnx\symbolic_opset9.py in <listcomp>(.0)
   1822             padding = [
-> 1823                 symbolic_helper._get_const(v, "i", "padding") for v in input_list
   1824             ]

c:\Anaconda3\envs\workenv\lib\site-packages\torch\onnx\symbolic_helper.py in _get_const(value, desc, arg_name)
    169     if not _is_constant(value):
--> 170         raise errors.SymbolicValueError(
    171             f"ONNX symbolic expected a constant value of the '{arg_name}' argument, "

SymbolicValueError: ONNX symbolic expected a constant value of the 'padding' argument, got '509 defined in (%509 : Long(requires_grad=0, device=cpu) = onnx::Sub(%max_size_i, %498), scope: models.detr.DETR:: # C:\Users\Anurag/.cache\torch\hub\facebookresearch_detr_main\util\misc.py:349:0
)'  [Caused by the value '509 defined in (%509 : Long(requires_grad=0, device=cpu) = onnx::Sub(%max_size_i, %498), scope: models.detr.DETR:: # C:\Users\Anurag/.cache\torch\hub\facebookresearch_detr_main\util\misc.py:349:0
)' (type 'Tensor') in the TorchScript graph. The containing node has kind 'onnx::Sub'.] 
    (node defined in C:\Users\Anurag/.cache\torch\hub\facebookresearch_detr_main\util\misc.py(349): <listcomp>
C:\Users\Anurag/.cache\torch\hub\facebookresearch_detr_main\util\misc.py(349): _onnx_nested_tensor_from_tensor_list
C:\Users\Anurag/.cache\torch\hub\facebookresearch_detr_main\util\misc.py(313): nested_tensor_from_tensor_list
C:\Users\Anurag/.cache\torch\hub\facebookresearch_detr_main\models\detr.py(60): forward
c:\Anaconda3\envs\workenv\lib\site-packages\torch\nn\modules\module.py(1182): _slow_forward
...
        #5: 507 defined in (%507 : Long(requires_grad=0, device=cpu) = onnx::Sub(%478, %466), scope: models.detr.DETR:: # C:\Users\Anurag/.cache\torch\hub\facebookresearch_detr_main\util\misc.py:349:0
    )  (type 'Tensor')
    Outputs:
        #0: 510 defined in (%510 : int[] = prim::ListConstruct(%459, %509, %459, %508, %459, %507), scope: models.detr.DETR::
    )  (type 'List[int]')
Run Code Online (Sandbox Code Playgroud)

我的建议是从其他来源获取模型。

作为参考,请查看:ONNX 符号期望填充参数的常量值