General errors for dealing with `Transform`s
/mnt/d/lib/python3.7/site-packages/torch/cuda/__init__.py:52: UserWarning: CUDA initialization: Found no NVIDIA driver on your system. Please check that you have an NVIDIA GPU and installed a driver from http://www.nvidia.com/Download/index.aspx (Triggered internally at  /pytorch/c10/cuda/CUDAFunctions.cpp:100.)
  return torch._C._cuda_getDeviceCount() > 0

Transform errors are currently some of the hardest to debug, including the dreaded Could not do one pass through your DataLoader.

This module provides some extra errors and injection points to see the full traces and their causes

transform_error[source]

transform_error(e:Exception, nm:str, event:str)

Raises Exception e stemming from a Transform with more information

  • nm: The name of the Transform
  • event: The event called (such as encodes or decodes)

Transform.__call__[source]

Transform.__call__(x, **kwargs)

Call self as a function.

Transform.decode[source]

Transform.decode(x, **kwargs)

Delegate to decodes to undo transform

The transform_error is extremely useful for trying to figure out what section of your pipeline broke.

As an example, we'll write a broken transform and attempt to build a DataLoader with it.

TfmdDL.new[source]

TfmdDL.new(dataset=None)

Create a new version of self with a few changed attributes

from fastai.vision.all import *

class BrokenTransform(DisplayedTransform):
    "A purposefully broken transform"
    y = 'a'
    def encodes(self, x:TensorImage): return x*self.y
path = untar_data(URLs.PETS)/'images'
dls = ImageDataLoaders.from_name_func(
    path, get_image_files(path), valid_pct=0.2,
    label_func=lambda x: x[0].isupper(),
    item_tfms=[Resize(224)], batch_tfms=[BrokenTransform()])

x,y = dls.one_batch()
Could not do one pass in your DataLoader, there is something wrong in it. Please see the stack trace below:
---------------------------------------------------------------------------
TypeError                                 Traceback (most recent call last)
<ipython-input-33-07c095d99b52> in <module>()
      4     path, get_image_files(path), valid_pct=0.2,
      5     label_func=lambda x: x[0].isupper(),
----> 6     item_tfms=[Resize(224)], batch_tfms=[BrokenTransform()])
      7 
      8 x,y = dls.one_batch()

/usr/local/lib/python3.7/dist-packages/fastai/vision/data.py in from_name_func(cls, path, fnames, label_func, **kwargs)
    119             raise ValueError("label_func couldn't be lambda function on Windows")
    120         f = using_attr(label_func, 'name')
--> 121         return cls.from_path_func(path, fnames, f, **kwargs)
    122 
    123     @classmethod

/usr/local/lib/python3.7/dist-packages/fastai/vision/data.py in from_path_func(cls, path, fnames, label_func, valid_pct, seed, item_tfms, batch_tfms, **kwargs)
    110                            item_tfms=item_tfms,
    111                            batch_tfms=batch_tfms)
--> 112         return cls.from_dblock(dblock, fnames, path=path, **kwargs)
    113 
    114     @classmethod

/usr/local/lib/python3.7/dist-packages/fastai/data/core.py in from_dblock(cls, dblock, source, path, bs, val_bs, shuffle, device, **kwargs)
    193     @classmethod
    194     def from_dblock(cls, dblock, source, path='.',  bs=64, val_bs=None, shuffle=True, device=None, **kwargs):
--> 195         return dblock.dataloaders(source, path=path, bs=bs, val_bs=val_bs, shuffle=shuffle, device=device, **kwargs)
    196 
    197     _docs=dict(__getitem__="Retrieve `DataLoader` at `i` (`0` is training, `1` is validation)",

/usr/local/lib/python3.7/dist-packages/fastai/data/block.py in dataloaders(self, source, path, verbose, **kwargs)
    113         dsets = self.datasets(source, verbose=verbose)
    114         kwargs = {**self.dls_kwargs, **kwargs, 'verbose': verbose}
--> 115         return dsets.dataloaders(path=path, after_item=self.item_tfms, after_batch=self.batch_tfms, **kwargs)
    116 
    117     _docs = dict(new="Create a new `DataBlock` with other `item_tfms` and `batch_tfms`",

/usr/local/lib/python3.7/dist-packages/fastai/data/core.py in dataloaders(self, bs, shuffle_train, shuffle, val_shuffle, n, path, dl_type, dl_kwargs, device, drop_last, val_bs, **kwargs)
    235         def_kwargs = {'bs':bs if val_bs is None else val_bs,'shuffle':val_shuffle,'n':None,'drop_last':False}
    236         dls = [dl] + [dl.new(self.subset(i), **merge(kwargs,def_kwargs,val_kwargs,dl_kwargs[i]))
--> 237                       for i in range(1, self.n_subsets)]
    238         return self._dbunch_type(*dls, path=path, device=device)
    239 

/usr/local/lib/python3.7/dist-packages/fastai/data/core.py in <listcomp>(.0)
    235         def_kwargs = {'bs':bs if val_bs is None else val_bs,'shuffle':val_shuffle,'n':None,'drop_last':False}
    236         dls = [dl] + [dl.new(self.subset(i), **merge(kwargs,def_kwargs,val_kwargs,dl_kwargs[i]))
--> 237                       for i in range(1, self.n_subsets)]
    238         return self._dbunch_type(*dls, path=path, device=device)
    239 

<ipython-input-31-c6b453e54b12> in new(self, dataset, cls, **kwargs)
     11         except Exception as e:
     12             print("Could not do one pass in your DataLoader, there is something wrong in it. Please see the stack trace below:")
---> 13             raise e
     14     else: res._n_inp,res._types = self._n_inp,self._types
     15     return res

<ipython-input-31-c6b453e54b12> in new(self, dataset, cls, **kwargs)
      7     if not hasattr(self, '_n_inp') or not hasattr(self, '_types'):
      8         try:
----> 9             self._one_pass()
     10             res._n_inp,res._types = self._n_inp,self._types
     11         except Exception as e:

/usr/local/lib/python3.7/dist-packages/fastai/data/core.py in _one_pass(self)
     53         if self.device is not None and multiprocessing.get_start_method().lower() == "fork":
     54             b = to_device(b, self.device)
---> 55         its = self.after_batch(b)
     56         self._n_inp = 1 if not isinstance(its, (list,tuple)) or len(its)==1 else len(its)-1
     57         self._types = explode_types(its)

/usr/local/lib/python3.7/dist-packages/fastcore/transform.py in __call__(self, o)
    196         self.fs.append(t)
    197 
--> 198     def __call__(self, o): return compose_tfms(o, tfms=self.fs, split_idx=self.split_idx)
    199     def __repr__(self): return f"Pipeline: {' -> '.join([f.name for f in self.fs if f.name != 'noop'])}"
    200     def __getitem__(self,i): return self.fs[i]

/usr/local/lib/python3.7/dist-packages/fastcore/transform.py in compose_tfms(x, tfms, is_enc, reverse, **kwargs)
    148     for f in tfms:
    149         if not is_enc: f = f.decode
--> 150         x = f(x, **kwargs)
    151     return x
    152 

<ipython-input-29-9e3f8ea9ab42> in __call__(self, x, **kwargs)
      5         return self._call('encodes', x, **kwargs)
      6     except Exception as e:
----> 7         transform_error(e, _get_name(self), 'encodes')

<ipython-input-28-d548505727b6> in transform_error(e, nm, event)
     10     err += e.args[0]
     11     e.args = [err]
---> 12     raise e

<ipython-input-29-9e3f8ea9ab42> in __call__(self, x, **kwargs)
      3 def __call__(self:Transform, x, **kwargs):
      4     try:
----> 5         return self._call('encodes', x, **kwargs)
      6     except Exception as e:
      7         transform_error(e, _get_name(self), 'encodes')

/usr/local/lib/python3.7/dist-packages/fastcore/transform.py in _call(self, fn, x, split_idx, **kwargs)
     81     def _call(self, fn, x, split_idx=None, **kwargs):
     82         if split_idx!=self.split_idx and self.split_idx is not None: return x
---> 83         return self._do_call(getattr(self, fn), x, **kwargs)
     84 
     85     def _do_call(self, f, x, **kwargs):

/usr/local/lib/python3.7/dist-packages/fastcore/transform.py in _do_call(self, f, x, **kwargs)
     88             ret = f.returns(x) if hasattr(f,'returns') else None
     89             return retain_type(f(x, **kwargs), x, ret)
---> 90         res = tuple(self._do_call(f, x_, **kwargs) for x_ in x)
     91         return retain_type(res, x)
     92 

/usr/local/lib/python3.7/dist-packages/fastcore/transform.py in <genexpr>(.0)
     88             ret = f.returns(x) if hasattr(f,'returns') else None
     89             return retain_type(f(x, **kwargs), x, ret)
---> 90         res = tuple(self._do_call(f, x_, **kwargs) for x_ in x)
     91         return retain_type(res, x)
     92 

/usr/local/lib/python3.7/dist-packages/fastcore/transform.py in _do_call(self, f, x, **kwargs)
     87             if f is None: return x
     88             ret = f.returns(x) if hasattr(f,'returns') else None
---> 89             return retain_type(f(x, **kwargs), x, ret)
     90         res = tuple(self._do_call(f, x_, **kwargs) for x_ in x)
     91         return retain_type(res, x)

/usr/local/lib/python3.7/dist-packages/fastcore/dispatch.py in __call__(self, *args, **kwargs)
    116         elif self.inst is not None: f = MethodType(f, self.inst)
    117         elif self.owner is not None: f = MethodType(f, self.owner)
--> 118         return f(*args, **kwargs)
    119 
    120     def __get__(self, inst, owner):

<ipython-input-32-8976e30e4832> in encodes(self, x)
      4     "A purposefully broken transform"
      5     y = 'a'
----> 6     def encodes(self, x:TensorImage): return x*self.y

/usr/local/lib/python3.7/dist-packages/fastai/torch_core.py in __torch_function__(self, func, types, args, kwargs)
    327         convert=False
    328         if _torch_handled(args, self._opt, func): convert,types = type(self),(torch.Tensor,)
--> 329         res = super().__torch_function__(func, types, args=args, kwargs=kwargs)
    330         if convert: res = convert(res)
    331         if isinstance(res, TensorBase): res.set_meta(self, as_copy=True)

/usr/local/lib/python3.7/dist-packages/torch/tensor.py in __torch_function__(cls, func, types, args, kwargs)
    993 
    994         with _C.DisableTorchFunction():
--> 995             ret = func(*args, **kwargs)
    996             return _convert(ret, cls)
    997 

TypeError: There was an issue calling the encodes on transform BrokenTransform:

only integer tensors of a single element can be converted to an index

And as you can see it tells that there was an issue on our encodes for BrokenTransform, which is exactly what we expected to have happen!