PyTorch 读取大数据

  • Post author:
  • Post category:其他




PyTorch 读取大数据

数据量太大,必须分批从磁盘加载,下面是单机单卡的思路:

from torch.utils.data import Dataset, DataLoader
import torch


class PretrainData(Dataset):
    def __init__(self):
        '''
		假设data是个数据量很大的文件,每次只能从内存中加载3条数据,
		后续可以把data放在odps_batch_data中改写成从文件中读数据
		'''
        self.meta_list = []
        self.shift = 0
        self.odps_batch = 3
        self.data = [[1, 1, 1], [2, 2, 2], [3, 3, 3], [4, 4, 4], [5, 5, 5], [6, 6, 6], [7, 7, 7], [8, 8, 8]]
        self.datalength = len(self.data)

    def __len__(self):
        if len(self.meta_list) == 0:
            return self.odps_batch
        else:
            return len(self.meta_list)

    def __getitem__(self, idx):
        return self.meta_list[idx]

    def get_odps_batch_data(self):
        '''
		通过偏移量shift和大小odps_batch,来从表中读数据
		'''
        if self.shift + self.odps_batch < self.datalength:
            self.meta_list = torch.tensor(self.data[self.shift:self.shift + self.odps_batch])
        else:
            self.meta_list = torch.tensor(self.data[self.shift:])
        print("self.meta_list:", self.meta_list)


if __name__ == "__main__":
    dataset = PretrainData()
    dataloader = DataLoader(dataset, batch_size=2, shuffle=True, drop_last=False)

    for epoch in range(3):
        for shift in range(dataset.datalength // dataset.odps_batch + 1):
            dataset.shift = shift * dataset.odps_batch
            dataloader.dataset.get_odps_batch_data()
            print(len(dataloader))
            for data in dataloader:
                print("epoch:", epoch)
                print("data:", data)
                user = data[:, 0]
                item = data[:, 1]
                click = data[:, 2]
                print(user, item, click)



版权声明:本文为weixin_37913042原创文章,遵循 CC 4.0 BY-SA 版权协议,转载请附上原文出处链接和本声明。