Callback to apply MixUp data augmentation to your training


reduce_loss(loss, reduction='mean')

class MixUp[source]

MixUp(alpha=0.4) :: Callback

Basic class handling tweaks of the training loop by changing a Learner in various events

from import *
path = untar_data(URLs.MNIST_TINY)
items = get_image_files(path)
tds = Datasets(items, [PILImageBW.create, [parent_label, Categorize()]], splits=GrandparentSplitter()(items))
dls = tds.dataloaders(after_item=[ToTensor(), IntToFloatTensor()])
mixup = MixUp(0.5)
with Learner(dls, nn.Linear(3,4), loss_func=CrossEntropyLossFlat(), cbs=mixup) as learn:
    learn.epoch, = 0,True
    learn.dl = dls.train
    b = dls.one_batch()

_,axs = plt.subplots(3,3, figsize=(9,9))
dls.show_batch(b=(mixup.x,mixup.y), ctxs=axs.flatten())
epoch train_loss valid_loss time
0 00:00