-
Notifications
You must be signed in to change notification settings - Fork 38
Open
Description
I got this error:
File /scratch/aqa6122/anaconda3/lib/python3.11/site-packages/torch/nn/modules/module.py:1736, in Module._wrapped_call_impl(self, *args, **kwargs)
1734 return self._compiled_call_impl(*args, **kwargs) # type: ignore[misc]
1735 else:
-> 1736 return self._call_impl(*args, **kwargs)
File /scratch/aqa6122/anaconda3/lib/python3.11/site-packages/torch/nn/modules/module.py:1844, in Module._call_impl(self, *args, **kwargs)
1841 return inner()
1843 try:
-> 1844 return inner()
1845 except Exception:
1846 # run always called hooks if they have not already been run
1847 # For now only forward hooks have the always_call option but perhaps
1848 # this functionality should be added to full backward hooks as well.
1849 for hook_id, hook in _global_forward_hooks.items():
File /scratch/aqa6122/anaconda3/lib/python3.11/site-packages/torch/nn/modules/module.py:1790, in Module._call_impl.<locals>.inner()
1787 bw_hook = BackwardHook(self, full_backward_hooks, backward_pre_hooks)
1788 args = bw_hook.setup_input_hook(args)
-> 1790 result = forward_call(*args, **kwargs)
1791 if _global_forward_hooks or self._forward_hooks:
1792 for hook_id, hook in (
1793 *_global_forward_hooks.items(),
1794 *self._forward_hooks.items(),
1795 ):
1796 # mark that always called hook is run
File /scratch/aqa6122/anaconda3/lib/python3.11/site-packages/segmentation_models_pytorch/encoders/efficientnet.py:72, in EfficientNetEncoder.forward(self, x)
70 drop_connect = drop_connect_rate * block_number / len(self._blocks)
71 block_number += 1.0
---> 72 x = module(x, drop_connect)
74 features.append(x)
76 return features
File /scratch/aqa6122/anaconda3/lib/python3.11/site-packages/torch/nn/modules/module.py:1736, in Module._wrapped_call_impl(self, *args, **kwargs)
1734 return self._compiled_call_impl(*args, **kwargs) # type: ignore[misc]
1735 else:
-> 1736 return self._call_impl(*args, **kwargs)
File /scratch/aqa6122/anaconda3/lib/python3.11/site-packages/torch/nn/modules/module.py:1844, in Module._call_impl(self, *args, **kwargs)
1841 return inner()
1843 try:
-> 1844 return inner()
1845 except Exception:
1846 # run always called hooks if they have not already been run
1847 # For now only forward hooks have the always_call option but perhaps
1848 # this functionality should be added to full backward hooks as well.
1849 for hook_id, hook in _global_forward_hooks.items():
File /scratch/aqa6122/anaconda3/lib/python3.11/site-packages/torch/nn/modules/module.py:1790, in Module._call_impl.<locals>.inner()
1787 bw_hook = BackwardHook(self, full_backward_hooks, backward_pre_hooks)
1788 args = bw_hook.setup_input_hook(args)
-> 1790 result = forward_call(*args, **kwargs)
1791 if _global_forward_hooks or self._forward_hooks:
1792 for hook_id, hook in (
1793 *_global_forward_hooks.items(),
1794 *self._forward_hooks.items(),
1795 ):
1796 # mark that always called hook is run
File /scratch/aqa6122/anaconda3/lib/python3.11/site-packages/efficientnet_pytorch/model.py:109, in MBConvBlock.forward(self, inputs, drop_connect_rate)
106 x = self._bn0(x)
107 x = self._swish(x)
--> 109 x = self._depthwise_conv(x)
110 x = self._bn1(x)
111 x = self._swish(x)
File /scratch/aqa6122/anaconda3/lib/python3.11/site-packages/torch/nn/modules/module.py:1736, in Module._wrapped_call_impl(self, *args, **kwargs)
1734 return self._compiled_call_impl(*args, **kwargs) # type: ignore[misc]
1735 else:
-> 1736 return self._call_impl(*args, **kwargs)
File /scratch/aqa6122/anaconda3/lib/python3.11/site-packages/torch/nn/modules/module.py:1844, in Module._call_impl(self, *args, **kwargs)
1841 return inner()
1843 try:
-> 1844 return inner()
1845 except Exception:
1846 # run always called hooks if they have not already been run
1847 # For now only forward hooks have the always_call option but perhaps
1848 # this functionality should be added to full backward hooks as well.
1849 for hook_id, hook in _global_forward_hooks.items():
File /scratch/aqa6122/anaconda3/lib/python3.11/site-packages/torch/nn/modules/module.py:1790, in Module._call_impl.<locals>.inner()
1787 bw_hook = BackwardHook(self, full_backward_hooks, backward_pre_hooks)
1788 args = bw_hook.setup_input_hook(args)
-> 1790 result = forward_call(*args, **kwargs)
1791 if _global_forward_hooks or self._forward_hooks:
1792 for hook_id, hook in (
1793 *_global_forward_hooks.items(),
1794 *self._forward_hooks.items(),
1795 ):
1796 # mark that always called hook is run
File /scratch/aqa6122/anaconda3/lib/python3.11/site-packages/efficientnet_pytorch/utils.py:275, in Conv2dStaticSamePadding.forward(self, x)
273 def forward(self, x):
274 x = self.static_padding(x)
--> 275 x = F.conv2d(x, self.weight, self.bias, self.stride, self.padding, self.dilation, self.groups)
276 return x
File /scratch/aqa6122/anaconda3/lib/python3.11/site-packages/calflops/pytorch_ops.py:359, in wrapFunc.<locals>.newFunc(*args, **kwds)
358 def newFunc(*args, **kwds):
--> 359 flops, macs = funcFlopCompute(*args, **kwds)
360 if module_flop_count:
361 module_flop_count[-1].append((name, flops))
File /scratch/aqa6122/anaconda3/lib/python3.11/site-packages/calflops/pytorch_ops.py:111, in _conv_flops_compute(input, weight, bias, stride, padding, dilation, groups)
109 output_dims = []
110 for idx, input_dim in enumerate(input_dims):
--> 111 output_dim = (input_dim + 2 * paddings[idx] - (dilations[idx] *
112 (kernel_dims[idx] - 1) + 1)) // strides[idx] + 1
113 output_dims.append(output_dim)
115 filters_per_channel = out_channels // groups
TypeError: unsupported operand type(s) for //: 'int' and 'list'
Metadata
Metadata
Assignees
Labels
No labels