Callback to apply MixUp data augmentation to your training
from nbdev.showdoc import *
from fastai2.test_utils import *

reduce_loss[source]

reduce_loss(loss, reduction='mean')

class MixUp[source]

MixUp(alpha=0.4) :: Callback

Basic class handling tweaks of the training loop by changing a Learner in various events

from fastai2.vision.core import *

path = untar_data(URLs.MNIST_TINY)
items = get_image_files(path)
tds = Datasets(items, [PILImageBW.create, [parent_label, Categorize()]], splits=GrandparentSplitter()(items))
dls = tds.dataloaders(after_item=[ToTensor(), IntToFloatTensor()])
mixup = MixUp(0.5)
learn = Learner(dls, nn.Linear(3,4), loss_func=CrossEntropyLossFlat(), cbs=mixup)
learn._do_begin_fit(1)
learn.epoch,learn.training = 0,True
learn.dl = dls.train
b = dls.one_batch()
learn._split(b)
learn('begin_batch')
_,axs = plt.subplots(3,3, figsize=(9,9))
dls.show_batch(b=(mixup.x,mixup.y), ctxs=axs.flatten())