--- title: MVP (aka TSBERT) - Self-Supervised Pretraining of Time Series Models keywords: fastai sidebar: home_sidebar summary: "Masked Value Predictor callback used to predict time series step values after a binary mask has been applied." description: "Masked Value Predictor callback used to predict time series step values after a binary mask has been applied." nb_path: "nbs/063_callback.MVP.ipynb" ---
t = torch.rand(16, 3, 100)
mask = create_subsequence_mask(t, sync=False)
test_eq(mask.shape, t.shape)
mask = create_subsequence_mask(t, sync=True)
test_eq(mask.shape, t.shape)
mask = create_variable_mask(t)
test_eq(mask.shape, t.shape)
mask = create_future_mask(t)
test_eq(mask.shape, t.shape)
o = torch.randn(2, 3, 4)
o[o>.5] = np.nan
test_eq(torch.isnan(natural_mask(o)).sum(), 0)
t = torch.rand(16, 30, 100)
mask = create_subsequence_mask(t, r=.15) # default settings
test_eq(mask.dtype, torch.bool)
plt.figure(figsize=(10, 3))
plt.pcolormesh(mask[0], cmap='cool')
plt.title(f'sample 0 subsequence mask (sync=False) - default mean: {mask[0].float().mean().item():.3f}')
plt.show()
plt.figure(figsize=(10, 3))
plt.pcolormesh(mask[1], cmap='cool')
plt.title(f'sample 1 subsequence mask (sync=False) - default mean: {mask[1].float().mean().item():.3f}')
plt.show()
t = torch.rand(16, 30, 100)
mask = create_subsequence_mask(t, r=.3) # 30% of values masked
test_eq(mask.dtype, torch.bool)
plt.figure(figsize=(10, 3))
plt.pcolormesh(mask[0], cmap='cool')
plt.title(f'sample 0 subsequence mask (r=.3) mean: {mask[0].float().mean().item():.3f}')
plt.show()
t = torch.rand(16, 30, 100)
mask = create_subsequence_mask(t, lm=5) # average length of mask = 5
test_eq(mask.dtype, torch.bool)
plt.figure(figsize=(10, 3))
plt.pcolormesh(mask[0], cmap='cool')
plt.title(f'sample 0 subsequence mask (lm=5) mean: {mask[0].float().mean().item():.3f}')
plt.show()
t = torch.rand(16, 30, 100)
mask = create_subsequence_mask(t, stateful=False) # individual time steps masked
test_eq(mask.dtype, torch.bool)
plt.figure(figsize=(10, 3))
plt.pcolormesh(mask[0], cmap='cool')
plt.title(f'per sample subsequence mask (stateful=False) mean: {mask[0].float().mean().item():.3f}')
plt.show()
t = torch.rand(1, 30, 100)
mask = create_subsequence_mask(t, sync=True) # all time steps masked simultaneously
test_eq(mask.dtype, torch.bool)
plt.figure(figsize=(10, 3))
plt.pcolormesh(mask[0], cmap='cool')
plt.title(f'per sample subsequence mask (sync=True) mean: {mask[0].float().mean().item():.3f}')
plt.show()
t = torch.rand(1, 30, 100)
mask = create_variable_mask(t) # masked variables
test_eq(mask.dtype, torch.bool)
plt.figure(figsize=(10, 3))
plt.pcolormesh(mask[0], cmap='cool')
plt.title(f'per sample variable mask mean: {mask[0].float().mean().item():.3f}')
plt.show()
t = torch.rand(1, 30, 100)
mask = create_future_mask(t, r=.15, sync=True) # masked steps
test_eq(mask.dtype, torch.bool)
plt.figure(figsize=(10, 3))
plt.pcolormesh(mask[0], cmap='cool')
plt.title(f'future mask mean: {mask[0].float().mean().item():.3f}')
plt.show()
t = torch.rand(1, 30, 100)
mask = create_future_mask(t, r=.15, sync=False) # masked steps
mask = create_future_mask(t, r=.15, sync=True) # masked steps
test_eq(mask.dtype, torch.bool)
plt.figure(figsize=(10, 3))
plt.pcolormesh(mask[0], cmap='cool')
plt.title(f'future mask mean: {mask[0].float().mean().item():.3f}')
plt.show()
from fastai.data.transforms import *
from tsai.data.all import *
from tsai.models.utils import *
from tsai.models.layers import *
from tsai.learner import *
from tsai.models.TSTPlus import *
from tsai.models.InceptionTimePlus import *
dsid = 'MoteStrain'
X, y, splits = get_UCR_data(dsid, split_data=False)
check_data(X, y, splits, False)
X[X<-1] = np.nan # This is to test the model works well even if nan values are passed through the dataloaders.
tfms = [None, [Categorize()]]
batch_tfms = [TSStandardize(by_var=True)]
unlabeled_dls = get_ts_dls(X, splits=splits, tfms=tfms, batch_tfms=batch_tfms)
learn = ts_learner(unlabeled_dls, InceptionTimePlus, cbs=[MVP(fname=f'{dsid}', window_size=(.5, 1))]) # trained on variable window size
learn.fit_one_cycle(1, 3e-3)
learn = ts_learner(unlabeled_dls, InceptionTimePlus, cbs=[MVP(weights_path=f'data/MVP/{dsid}.pth')])
learn.fit_one_cycle(1, 3e-3)
learn.MVP.show_preds(sharey=True) # these preds are highly inaccurate as the model's been trained for just 1 epoch for testing purposes
tfms = [None, [Categorize()]]
batch_tfms = [TSStandardize(by_var=True), Nan2Value()]
labeled_dls = get_ts_dls(X, y, splits=splits, tfms=tfms, batch_tfms=batch_tfms, bs=64)
learn = ts_learner(labeled_dls, InceptionTimePlus, pretrained=True, weights_path=f'data/MVP/{dsid}.pth', metrics=accuracy)
learn.fit_one_cycle(1)
tfms = [None, [Categorize()]]
batch_tfms = [TSStandardize(by_var=True), Nan2Value()]
unlabeled_dls = get_ts_dls(X, splits=splits, tfms=tfms, batch_tfms=batch_tfms, bs=64)
fname = f'{dsid}_test'
mvp = MVP(subsequence_mask=True, sync='random', variable_mask=True, future_mask=True, fname=fname)
learn = ts_learner(unlabeled_dls, InceptionTimePlus, metrics=accuracy, cbs=mvp) # Metrics will not be used!
tfms = [None, [Categorize()]]
batch_tfms = [TSStandardize(by_var=True)]
unlabeled_dls = get_ts_dls(X, splits=splits, tfms=tfms, batch_tfms=batch_tfms, bs=64)
fname = f'{dsid}_test'
mvp = MVP(subsequence_mask=True, sync='random', variable_mask=True, future_mask=True, custom_mask=partial(create_future_mask, r=.15),
fname=fname)
learn = ts_learner(unlabeled_dls, InceptionTimePlus, metrics=accuracy, cbs=mvp) # Metrics will not be used!