--- title: Mixed data keywords: fastai sidebar: home_sidebar summary: "DataLoader than can take data from multiple dataloaders with different types of data" description: "DataLoader than can take data from multiple dataloaders with different types of data" nb_path: "nbs/022_data.mixed.ipynb" ---
{% raw %}
{% endraw %} {% raw %}
{% endraw %} {% raw %}

class MixedDataLoader[source]

MixedDataLoader(*loaders, path='.', shuffle=False, device=None, bs=None)

{% endraw %} {% raw %}

class MixedDataLoaders[source]

MixedDataLoaders(*loaders, path='.', device=None) :: DataLoaders

Basic wrapper around several DataLoaders.

{% endraw %} {% raw %}
{% endraw %} {% raw %}

get_mixed_dls[source]

get_mixed_dls(*dls, device=None, shuffle_train=None, shuffle_valid=None, **kwargs)

{% endraw %} {% raw %}
{% endraw %} {% raw %}
from tsai.data.tabular import *

path = untar_data(URLs.ADULT_SAMPLE)
df = pd.read_csv(path/'adult.csv')
# df['salary'] = np.random.rand(len(df)) # uncomment to simulate a cont dependent variable
target = 'salary'
splits = RandomSplitter()(range_of(df))

cat_names = ['workclass', 'education', 'marital-status']
cont_names = ['age', 'fnlwgt']
dls1 = get_tabular_dls(df, cat_names=cat_names, cont_names=cont_names, y_names=target, splits=splits, bs=512)
dls1.show_batch()

cat_names = None #['occupation', 'relationship', 'race']
cont_names = ['education-num']
dls2 = get_tabular_dls(df, cat_names=cat_names, cont_names=cont_names, y_names=target, splits=splits, bs=128)
dls2.show_batch()
workclass education marital-status age fnlwgt salary
0 Private Some-college Married-civ-spouse 49.0 195611.999981 >=50k
1 Private Assoc-voc Widowed 32.0 257978.002662 <50k
2 Self-emp-not-inc HS-grad Divorced 51.0 311569.001420 <50k
3 Private HS-grad Never-married 34.0 258675.001077 <50k
4 Local-gov Bachelors Married-civ-spouse 78.0 136198.003049 <50k
5 Self-emp-not-inc HS-grad Married-civ-spouse 57.0 35561.005753 >=50k
6 Private 7th-8th Never-married 52.0 35305.006102 <50k
7 State-gov HS-grad Divorced 63.0 109735.000555 <50k
8 Private HS-grad Never-married 41.0 111482.999541 <50k
9 Private HS-grad Never-married 19.0 244115.000590 <50k
education-num_na education-num salary
0 False 10.0 <50k
1 False 8.0 <50k
2 False 13.0 <50k
3 False 9.0 <50k
4 False 13.0 <50k
5 False 10.0 <50k
6 False 13.0 >=50k
7 False 10.0 <50k
8 False 4.0 <50k
9 False 12.0 <50k
{% endraw %} {% raw %}
dls = get_mixed_dls(dls1, dls2, bs=8)
first(dls.train)
first(dls.valid)
torch.save(dls,'export/mixed_dls.pth')
del dls
dls = torch.load('export/mixed_dls.pth')
dls.train.show_batch()
workclass education marital-status age fnlwgt salary
0 Private HS-grad Never-married 18.0 41380.998801 <50k
1 Private HS-grad Never-married 48.0 245948.001366 <50k
2 Private HS-grad Never-married 41.0 107305.999223 <50k
3 ? Assoc-voc Married-civ-spouse 28.0 168523.999755 <50k
4 Self-emp-inc Some-college Never-married 48.0 85109.000981 <50k
5 Private Some-college Married-civ-spouse 57.0 194849.999944 <50k
6 Private HS-grad Married-civ-spouse 45.0 368560.997254 >=50k
7 Private Bachelors Married-civ-spouse 46.0 243743.001292 >=50k
education-num_na education-num salary
0 False 9.0 <50k
1 False 9.0 <50k
2 False 9.0 <50k
3 False 11.0 <50k
4 False 10.0 <50k
5 False 10.0 <50k
6 False 9.0 >=50k
7 False 13.0 >=50k
{% endraw %} {% raw %}
xb, yb = first(dls.train)
xb
((tensor([[ 5, 12,  5],
          [ 5, 12,  5],
          [ 5, 12,  5],
          [ 1,  9,  3],
          [ 6, 16,  5],
          [ 5, 16,  3],
          [ 5, 12,  3],
          [ 5, 10,  3]]),
  tensor([[-1.5111, -1.4119],
          [ 0.6884,  0.5379],
          [ 0.1752, -0.7836],
          [-0.7780, -0.2000],
          [ 0.6884, -0.9951],
          [ 1.3483,  0.0509],
          [ 0.4685,  1.7066],
          [ 0.5418,  0.5169]])),
 (tensor([[1],
          [1],
          [1],
          [1],
          [1],
          [1],
          [1],
          [1]]),
  tensor([[-0.4220],
          [-0.4220],
          [-0.4220],
          [ 0.3597],
          [-0.0311],
          [-0.0311],
          [-0.4220],
          [ 1.1413]])))
{% endraw %} {% raw %}
xs, ys = first(dls.train)
xs[0][0].shape, xs[0][1].shape, xs[1][0].shape, xs[1][1].shape
(torch.Size([8, 3]),
 torch.Size([8, 2]),
 torch.Size([8, 1]),
 torch.Size([8, 1]))
{% endraw %} {% raw %}
from tsai.data.validation import TimeSplitter
from tsai.data.core import TSRegression, get_ts_dls
X = np.repeat(np.repeat(np.arange(8)[:, None, None], 2, 1), 5, 2).astype(float)
X = np.concatenate([X, X])
y = np.concatenate([np.arange(len(X)//2)]*2)
alphabet = np.array(list(string.ascii_lowercase))
# y = alphabet[y]
splits = TimeSplitter(.5, show_plot=False)(range_of(X))
tfms = [None, TSRegression()]
dls1 = get_ts_dls(X, y, splits=splits, tfms=tfms)
dls1.one_batch()
(TSTensor(samples:8, vars:2, len:5, device=cpu),
 tensor([0., 1., 2., 3., 4., 5., 6., 7.]))
{% endraw %} {% raw %}
data = np.concatenate([np.repeat(np.arange(8)[:, None], 3, 1)*np.array([1, 10, 100])]*2)
df = pd.DataFrame(data, columns=['cat1', 'cat2', 'cont'])
df['cont'] = df['cont'].astype(float)
df['target'] = y
cat_names = ['cat1', 'cat2']
cont_names = ['cont']
target = 'target'
dls2 = get_tabular_dls(df, procs=[Categorify, FillMissing, #Normalize
                                 ], cat_names=cat_names, cont_names=cont_names, y_names=target, splits=splits, bs=8)
dls2.one_batch()
(tensor([[2, 2],
         [1, 1],
         [3, 3],
         [4, 4],
         [7, 7],
         [6, 6],
         [8, 8],
         [5, 5]]),
 tensor([[100.],
         [  0.],
         [200.],
         [300.],
         [600.],
         [500.],
         [700.],
         [400.]]),
 tensor([[1],
         [0],
         [2],
         [3],
         [6],
         [5],
         [7],
         [4]], dtype=torch.int8))
{% endraw %} {% raw %}
z = zip(_loaders[dls1.train.fake_l.num_workers == 0](dls1.train.fake_l))
for b in z: 
    print(b)
    break
((TSTensor(samples:8, vars:2, len:5, device=cpu), tensor([0., 1., 2., 3., 4., 5., 6., 7.])),)
{% endraw %} {% raw %}
bs = 8
dls = get_mixed_dls(dls1, dls2, bs=bs)
dl = dls.train
xb, yb = dl.one_batch()
test_eq(len(xb), 2)
test_eq(len(xb[0]), bs)
test_eq(len(xb[1]), 2)
test_eq(len(xb[1][0]), bs)
test_eq(len(xb[1][1]), bs)
test_eq(xb[0].data[:, 0, 0].long(), xb[1][0][:, 0] - 1) # categorical data and ts are in synch
test_eq(xb[0].data[:, 0, 0], (xb[1][1]/100).flatten()) # continuous data and ts are in synch
test_eq(tensor(dl.input_idxs), yb.long().cpu())
dl = dls.valid
xb, yb = dl.one_batch()
test_eq(tensor(y[dl.input_idxs]), yb.long().cpu())
{% endraw %}