Python torchvision.transforms.RandomHorizontalFlip() Examples

The following are 30 code examples for showing how to use torchvision.transforms.RandomHorizontalFlip(). These examples are extracted from open source projects. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example.

You may check out the related API usage on the sidebar.

You may also want to check out all available functions/classes of the module torchvision.transforms , or try the search function .

Example 1
Project: transferlearning   Author: jindongwang   File: data_loader.py    License: MIT License 9 votes vote down vote up
def load_data(root_path, dir, batch_size, phase):
    transform_dict = {
        'src': transforms.Compose(
        [transforms.RandomResizedCrop(224),
         transforms.RandomHorizontalFlip(),
         transforms.ToTensor(),
         transforms.Normalize(mean=[0.485, 0.456, 0.406],
                              std=[0.229, 0.224, 0.225]),
         ]),
        'tar': transforms.Compose(
        [transforms.Resize(224),
         transforms.ToTensor(),
         transforms.Normalize(mean=[0.485, 0.456, 0.406],
                              std=[0.229, 0.224, 0.225]),
         ])}
    data = datasets.ImageFolder(root=root_path + dir, transform=transform_dict[phase])
    data_loader = torch.utils.data.DataLoader(data, batch_size=batch_size, shuffle=True, drop_last=False, num_workers=4)
    return data_loader 
Example 2
Project: transferlearning   Author: jindongwang   File: data_loader.py    License: MIT License 7 votes vote down vote up
def load_training(root_path, dir, batch_size, kwargs):

    transform = transforms.Compose(
        [transforms.Resize([256, 256]),
         transforms.RandomCrop(224),
         transforms.RandomHorizontalFlip(),
         transforms.ToTensor()])
    data = datasets.ImageFolder(root=root_path + dir, transform=transform)
    train_loader = torch.utils.data.DataLoader(data, batch_size=batch_size, shuffle=True, drop_last=True, **kwargs)
    return train_loader 
Example 3
Project: self-supervised-da   Author: Jiaolong   File: data_loader.py    License: MIT License 7 votes vote down vote up
def get_rot_train_transformers(args):
    size = args.img_transform.random_resize_crop.size
    scale = args.img_transform.random_resize_crop.scale
    img_tr = [transforms.RandomResizedCrop((int(size[0]), int(size[1])), (scale[0], scale[1]))]
    if args.img_transform.random_horiz_flip > 0.0:
        img_tr.append(transforms.RandomHorizontalFlip(args.img_transform.random_horiz_flip))
    if args.img_transform.jitter > 0.0:
        img_tr.append(transforms.ColorJitter(
            brightness=args.img_transform.jitter, contrast=args.img_transform.jitter,
            saturation=args.jitter, hue=min(0.5, args.jitter)))

    mean = args.normalize.mean
    std = args.normalize.std
    img_tr += [transforms.ToTensor(), transforms.Normalize(mean=mean, std=std)]

    return transforms.Compose(img_tr) 
Example 4
Project: transferlearning   Author: jindongwang   File: data_load.py    License: MIT License 6 votes vote down vote up
def load_data(data_folder, batch_size, phase='train', train_val_split=True, train_ratio=.8):
    transform_dict = {
        'train': transforms.Compose(
            [transforms.Resize(256),
             transforms.RandomCrop(224),
             transforms.RandomHorizontalFlip(),
             transforms.ToTensor(),
             transforms.Normalize(mean=[0.485, 0.456, 0.406],
                                  std=[0.229, 0.224, 0.225]),
             ]),
        'test': transforms.Compose(
            [transforms.Resize(224),
             transforms.ToTensor(),
             transforms.Normalize(mean=[0.485, 0.456, 0.406],
                                  std=[0.229, 0.224, 0.225]),
             ])}

    data = datasets.ImageFolder(root=data_folder, transform=transform_dict[phase])
    if phase == 'train':
        if train_val_split:
            train_size = int(train_ratio * len(data))
            test_size = len(data) - train_size
            data_train, data_val = torch.utils.data.random_split(data, [train_size, test_size])
            train_loader = torch.utils.data.DataLoader(data_train, batch_size=batch_size, shuffle=True, drop_last=True,
                                                    num_workers=4)
            val_loader = torch.utils.data.DataLoader(data_val, batch_size=batch_size, shuffle=False, drop_last=False,
                                                num_workers=4)
            return [train_loader, val_loader]
        else:
            train_loader = torch.utils.data.DataLoader(data, batch_size=batch_size, shuffle=True, drop_last=True,
                                                    num_workers=4)
            return train_loader
    else: 
        test_loader = torch.utils.data.DataLoader(data, batch_size=batch_size, shuffle=False, drop_last=False,
                                                    num_workers=4)
        return test_loader

## Below are for ImageCLEF datasets 
Example 5
Project: transferlearning   Author: jindongwang   File: data_load.py    License: MIT License 6 votes vote down vote up
def load_imageclef_train(root_path, domain, batch_size, phase):
    transform_dict = {
        'src': transforms.Compose(
            [transforms.Resize((256, 256)),
             transforms.RandomCrop(224),
             transforms.RandomHorizontalFlip(),
             transforms.ToTensor(),
             transforms.Normalize(mean=[0.485, 0.456, 0.406],
                                  std=[0.229, 0.224, 0.225]),
             ]),
        'tar': transforms.Compose(
            [transforms.Resize((224, 224)),
             transforms.ToTensor(),
             transforms.Normalize(mean=[0.485, 0.456, 0.406],
                                  std=[0.229, 0.224, 0.225]),
             ])}
    data = ImageCLEF(root_dir=root_path, domain=domain, transform=transform_dict[phase])
    train_size = int(0.8 * len(data))
    test_size = len(data) - train_size
    data_train, data_val = torch.utils.data.random_split(data, [train_size, test_size])
    train_loader = torch.utils.data.DataLoader(data_train, batch_size=batch_size, shuffle=True, drop_last=False,
                                               num_workers=4)
    val_loader = torch.utils.data.DataLoader(data_val, batch_size=batch_size, shuffle=True, drop_last=False,
                                             num_workers=4)
    return train_loader, val_loader 
Example 6
Project: transferlearning   Author: jindongwang   File: data_load.py    License: MIT License 6 votes vote down vote up
def load_imageclef_test(root_path, domain, batch_size, phase):
    transform_dict = {
        'src': transforms.Compose(
            [transforms.Resize((256,256)),
             transforms.RandomCrop(224),
             transforms.RandomHorizontalFlip(),
             transforms.ToTensor(),
             transforms.Normalize(mean=[0.485, 0.456, 0.406],
                                  std=[0.229, 0.224, 0.225]),
             ]),
        'tar': transforms.Compose(
            [transforms.Resize((224, 224)),
             transforms.ToTensor(),
             transforms.Normalize(mean=[0.485, 0.456, 0.406],
                                  std=[0.229, 0.224, 0.225]),
             ])}
    data = ImageCLEF(root_dir=root_path, domain=domain, transform=transform_dict[phase])
    data_loader = torch.utils.data.DataLoader(data, batch_size=batch_size, shuffle=True, drop_last=False, num_workers=4)
    return data_loader 
Example 7
Project: transferlearning   Author: jindongwang   File: data_loader.py    License: MIT License 6 votes vote down vote up
def load_data(data_folder, batch_size, train, kwargs):
    transform = {
        'train': transforms.Compose(
            [transforms.Resize([256, 256]),
                transforms.RandomCrop(224),
                transforms.RandomHorizontalFlip(),
                transforms.ToTensor(),
                transforms.Normalize(mean=[0.485, 0.456, 0.406],
                                  std=[0.229, 0.224, 0.225])]),
        'test': transforms.Compose(
            [transforms.Resize([224, 224]),
                transforms.ToTensor(),
                transforms.Normalize(mean=[0.485, 0.456, 0.406],
                                  std=[0.229, 0.224, 0.225])])
        }
    data = datasets.ImageFolder(root = data_folder, transform=transform['train' if train else 'test'])
    data_loader = torch.utils.data.DataLoader(data, batch_size=batch_size, shuffle=True, **kwargs, drop_last = True if train else False)
    return data_loader 
Example 8
Project: transferlearning   Author: jindongwang   File: data_loader.py    License: MIT License 6 votes vote down vote up
def load_train(root_path, dir, batch_size, phase):
    transform_dict = {
        'src': transforms.Compose(
            [transforms.RandomResizedCrop(224),
             transforms.RandomHorizontalFlip(),
             transforms.ToTensor(),
             transforms.Normalize(mean=[0.485, 0.456, 0.406],
                                  std=[0.229, 0.224, 0.225]),
             ]),
        'tar': transforms.Compose(
            [transforms.Resize(224),
             transforms.ToTensor(),
             transforms.Normalize(mean=[0.485, 0.456, 0.406],
                                  std=[0.229, 0.224, 0.225]),
             ])}
    data = datasets.ImageFolder(root=root_path + dir, transform=transform_dict[phase])
    train_size = int(0.8 * len(data))
    test_size = len(data) - train_size
    data_train, data_val = torch.utils.data.random_split(data, [train_size, test_size])
    train_loader = torch.utils.data.DataLoader(data_train, batch_size=batch_size, shuffle=True, drop_last=False, num_workers=4)
    val_loader = torch.utils.data.DataLoader(data_val, batch_size=batch_size, shuffle=True, drop_last=False, num_workers=4)
    return train_loader, val_loader 
Example 9
Project: pytorch-multigpu   Author: dnddnjs   File: train.py    License: MIT License 6 votes vote down vote up
def main():
    best_acc = 0

    device = 'cuda' if torch.cuda.is_available() else 'cpu'

    print('==> Preparing data..')
    transforms_train = transforms.Compose([
        transforms.RandomCrop(32, padding=4),
        transforms.RandomHorizontalFlip(),
        transforms.ToTensor(),
        transforms.Normalize((0.4914, 0.4822, 0.4465), (0.2023, 0.1994, 0.2010))])

    dataset_train = CIFAR10(root='../data', train=True, download=True, 
                            transform=transforms_train)

    train_loader = DataLoader(dataset_train, batch_size=args.batch_size, 
                              shuffle=True, num_workers=args.num_worker)

    # there are 10 classes so the dataset name is cifar-10
    classes = ('plane', 'car', 'bird', 'cat', 'deer', 
               'dog', 'frog', 'horse', 'ship', 'truck')

    print('==> Making model..')

    net = pyramidnet()
    net = nn.DataParallel(net)
    net = net.to(device)
    num_params = sum(p.numel() for p in net.parameters() if p.requires_grad)
    print('The number of parameters of model is', num_params)

    criterion = nn.CrossEntropyLoss()
    optimizer = optim.Adam(net.parameters(), lr=args.lr)
    # optimizer = optim.SGD(net.parameters(), lr=args.lr, 
    #                       momentum=0.9, weight_decay=1e-4)
    
    train(net, criterion, optimizer, train_loader, device) 
Example 10
Project: sgd-influence   Author: sato9hara   File: outlier.py    License: MIT License 6 votes vote down vote up
def cifar10():
    transform_train = transforms.Compose([
        transforms.RandomCrop(32, padding=4),
        transforms.RandomHorizontalFlip(),
        transforms.ToTensor(),
        transforms.Normalize((0.4914, 0.4822, 0.4465), (0.2023, 0.1994, 0.2010)),
    ])
    transform_test = transforms.Compose([
        transforms.ToTensor(),
        transforms.Normalize((0.4914, 0.4822, 0.4465), (0.2023, 0.1994, 0.2010)),
    ])
    trainset = MyCIFAR10.CIFAR10(root='./data', train=True, download=True, transform=transform_train, seed=0)
    valset = MyCIFAR10.CIFAR10(root='./data', train=True, download=True, transform=transform_test, seed=0)
    testset = MyCIFAR10.CIFAR10(root='./data', train=False, download=True, transform=transform_test, seed=0)
    net_func = MyNet.CifarAE
    return net_func, trainset, valset, testset 
Example 11
Project: sgd-influence   Author: sato9hara   File: train.py    License: MIT License 6 votes vote down vote up
def cifar10():
    transform_train = transforms.Compose([
        transforms.RandomCrop(32, padding=4),
        transforms.RandomHorizontalFlip(),
        transforms.ToTensor(),
        transforms.Normalize((0.4914, 0.4822, 0.4465), (0.2023, 0.1994, 0.2010)),
    ])
    transform_test = transforms.Compose([
        transforms.ToTensor(),
        transforms.Normalize((0.4914, 0.4822, 0.4465), (0.2023, 0.1994, 0.2010)),
    ])
    trainset = MyCIFAR10.CIFAR10(root='./data', train=True, download=True, transform=transform_train, seed=0)
    valset = MyCIFAR10.CIFAR10(root='./data', train=True, download=True, transform=transform_test, seed=0)
    testset = MyCIFAR10.CIFAR10(root='./data', train=False, download=True, transform=transform_test, seed=0)
    net_func = MyNet.CifarNet
    return net_func, trainset, valset, testset 
Example 12
Project: Recycle-GAN   Author: aayushbansal   File: base_dataset.py    License: MIT License 6 votes vote down vote up
def get_transform(opt):
    transform_list = []
    if opt.resize_or_crop == 'resize_and_crop':
        osize = [opt.loadSize, opt.loadSize]
        transform_list.append(transforms.Scale(osize, Image.BICUBIC))
        transform_list.append(transforms.RandomCrop(opt.fineSize))
    elif opt.resize_or_crop == 'crop':
        transform_list.append(transforms.RandomCrop(opt.fineSize))
    elif opt.resize_or_crop == 'scale_width':
        transform_list.append(transforms.Lambda(
            lambda img: __scale_width(img, opt.fineSize)))
    elif opt.resize_or_crop == 'scale_width_and_crop':
        transform_list.append(transforms.Lambda(
            lambda img: __scale_width(img, opt.loadSize)))
        transform_list.append(transforms.RandomCrop(opt.fineSize))

    if opt.isTrain and not opt.no_flip:
        transform_list.append(transforms.RandomHorizontalFlip())

    transform_list += [transforms.ToTensor(),
                       transforms.Normalize((0.5, 0.5, 0.5),
                                            (0.5, 0.5, 0.5))]
    return transforms.Compose(transform_list) 
Example 13
Project: verb-attributes   Author: uwnlp   File: imsitu_loader.py    License: MIT License 6 votes vote down vote up
def transform(is_train=True, normalize=True):
    """
    Returns a transform object
    """
    filters = []
    filters.append(Scale(256))

    if is_train:
        filters.append(RandomCrop(224))
    else:
        filters.append(CenterCrop(224))

    if is_train:
        filters.append(RandomHorizontalFlip())

    filters.append(ToTensor())
    if normalize:
        filters.append(Normalize(mean=[0.485, 0.456, 0.406],
                                 std=[0.229, 0.224, 0.225]))
    return Compose(filters) 
Example 14
Project: Clothing-Detection   Author: simaiden   File: bbox_aug.py    License: GNU General Public License v3.0 6 votes vote down vote up
def im_detect_bbox_hflip(model, images, target_scale, target_max_size, device):
    """
    Performs bbox detection on the horizontally flipped image.
    Function signature is the same as for im_detect_bbox.
    """
    transform = TT.Compose([
        T.Resize(target_scale, target_max_size),
        TT.RandomHorizontalFlip(1.0),
        TT.ToTensor(),
        T.Normalize(
            mean=cfg.INPUT.PIXEL_MEAN, std=cfg.INPUT.PIXEL_STD, to_bgr255=cfg.INPUT.TO_BGR255
        )
    ])
    images = [transform(image) for image in images]
    images = to_image_list(images, cfg.DATALOADER.SIZE_DIVISIBILITY)
    boxlists = model(images.to(device))

    # Invert the detections computed on the flipped image
    boxlists_inv = [boxlist.transpose(0) for boxlist in boxlists]
    return boxlists_inv 
Example 15
Project: ClassyVision   Author: facebookresearch   File: util.py    License: MIT License 6 votes vote down vote up
def __init__(
        self,
        crop_size: int = ImagenetConstants.CROP_SIZE,
        mean: List[float] = ImagenetConstants.MEAN,
        std: List[float] = ImagenetConstants.STD,
    ):
        """The constructor method of ImagenetAugmentTransform class.

        Args:
            crop_size: expected output size per dimension after random cropping
            mean: a 3-tuple denoting the pixel RGB mean
            std: a 3-tuple denoting the pixel RGB standard deviation

        """
        self.transform = transforms.Compose(
            [
                transforms.RandomResizedCrop(crop_size),
                transforms.RandomHorizontalFlip(),
                transforms.ToTensor(),
                transforms.Normalize(mean=mean, std=std),
            ]
        ) 
Example 16
def make(sz_resize = 256, sz_crop = 227, mean = [104, 117, 128],
        std = [1, 1, 1], rgb_to_bgr = True, is_train = True,
        intensity_scale = None):
    return transforms.Compose([
        RGBToBGR() if rgb_to_bgr else Identity(),
        transforms.RandomResizedCrop(sz_crop) if is_train else Identity(),
        transforms.Resize(sz_resize) if not is_train else Identity(),
        transforms.CenterCrop(sz_crop) if not is_train else Identity(),
        transforms.RandomHorizontalFlip() if is_train else Identity(),
        transforms.ToTensor(),
        ScaleIntensities(
            *intensity_scale) if intensity_scale is not None else Identity(),
        transforms.Normalize(
            mean=mean,
            std=std,
        )
    ]) 
Example 17
Project: imgclsmob   Author: osmr   File: cifar10_cls_dataset.py    License: MIT License 6 votes vote down vote up
def cifar10_train_transform(ds_metainfo,
                            mean_rgb=(0.4914, 0.4822, 0.4465),
                            std_rgb=(0.2023, 0.1994, 0.2010),
                            jitter_param=0.4):
    assert (ds_metainfo is not None)
    assert (ds_metainfo.input_image_size[0] == 32)
    return transforms.Compose([
        transforms.RandomCrop(
            size=32,
            padding=4),
        transforms.RandomHorizontalFlip(),
        transforms.ColorJitter(
            brightness=jitter_param,
            contrast=jitter_param,
            saturation=jitter_param),
        transforms.ToTensor(),
        transforms.Normalize(
            mean=mean_rgb,
            std=std_rgb)
    ]) 
Example 18
Project: self-supervised-da   Author: Jiaolong   File: data_loader.py    License: MIT License 6 votes vote down vote up
def get_jig_train_transformers(args):
    size = args.img_transform.random_resize_crop.size
    scale = args.img_transform.random_resize_crop.scale
    img_tr = [transforms.RandomResizedCrop((int(size[0]), int(size[1])), (scale[0], scale[1]))]
    if args.img_transform.random_horiz_flip > 0.0:
        img_tr.append(transforms.RandomHorizontalFlip(args.img_transform.random_horiz_flip))
    if args.img_transform.jitter > 0.0:
        img_tr.append(transforms.ColorJitter(
            brightness=args.img_transform.jitter, contrast=args.img_transform.jitter,
            saturation=args.jitter, hue=min(0.5, args.jitter)))

    tile_tr = []
    if args.jig_transform.tile_random_grayscale:
        tile_tr.append(transforms.RandomGrayscale(args.jig_transform.tile_random_grayscale))
    mean = args.normalize.mean
    std = args.normalize.std
    tile_tr = tile_tr + [transforms.ToTensor(), transforms.Normalize(mean=mean, std=std)]

    return transforms.Compose(img_tr), transforms.Compose(tile_tr) 
Example 19
Project: pytorch_deephash   Author: flyingpot   File: train.py    License: MIT License 6 votes vote down vote up
def init_dataset():
    transform_train = transforms.Compose(
        [transforms.Resize(256),
         transforms.RandomCrop(227),
         transforms.RandomHorizontalFlip(),
         transforms.ToTensor(),
         transforms.Normalize((0.4914, 0.4822, 0.4465), (0.2023, 0.1994, 0.2010))])
    transform_test = transforms.Compose(
        [transforms.Resize(227),
         transforms.ToTensor(),
         transforms.Normalize((0.4914, 0.4822, 0.4465), (0.2023, 0.1994, 0.2010))])
    trainset = datasets.CIFAR10(root='./data', train=True, download=True,
                                transform=transform_train)
    trainloader = torch.utils.data.DataLoader(trainset, batch_size=128,
                                              shuffle=True, num_workers=0)

    testset = datasets.CIFAR10(root='./data', train=False, download=True,
                               transform=transform_test)
    testloader = torch.utils.data.DataLoader(testset, batch_size=100,
                                             shuffle=True, num_workers=0)
    return trainloader, testloader 
Example 20
Project: sepconv   Author: martkartasev   File: dataset.py    License: MIT License 6 votes vote down vote up
def __init__(self, patches, use_cache, augment_data):
        super(PatchDataset, self).__init__()
        self.patches = patches
        self.crop = CenterCrop(config.CROP_SIZE)

        if augment_data:
            self.random_transforms = [RandomRotation((90, 90)), RandomVerticalFlip(1.0), RandomHorizontalFlip(1.0),
                                      (lambda x: x)]
            self.get_aug_transform = (lambda: random.sample(self.random_transforms, 1)[0])
        else:
            # Transform does nothing. Not sure if horrible or very elegant...
            self.get_aug_transform = (lambda: (lambda x: x))

        if use_cache:
            self.load_patch = data_manager.load_cached_patch
        else:
            self.load_patch = data_manager.load_patch

        print('Dataset ready with {} tuples.'.format(len(patches))) 
Example 21
Project: nasnet-pytorch   Author: wandering007   File: imagenet.py    License: MIT License 6 votes vote down vote up
def preprocess(self):
        if self.train:
            return transforms.Compose([
                transforms.RandomResizedCrop(self.image_size),
                transforms.RandomHorizontalFlip(),
                transforms.ColorJitter(brightness=0.4, contrast=0.4, saturation=0.4, hue=0.2),
                transforms.ToTensor(),
                transforms.Normalize(self.mean, self.std),
            ])
        else:
            return transforms.Compose([
                transforms.Resize((int(self.image_size / 0.875), int(self.image_size / 0.875))),
                transforms.CenterCrop(self.image_size),
                transforms.ToTensor(),
                transforms.Normalize(self.mean, self.std),
            ]) 
Example 22
Project: NAO_pytorch   Author: renqianluo   File: utils.py    License: GNU General Public License v3.0 6 votes vote down vote up
def _data_transforms_cifar10(cutout_size):
    CIFAR_MEAN = [0.49139968, 0.48215827, 0.44653124]
    CIFAR_STD = [0.24703233, 0.24348505, 0.26158768]

    train_transform = transforms.Compose([
        transforms.RandomCrop(32, padding=4),
        transforms.RandomHorizontalFlip(),
        transforms.ToTensor(),
        transforms.Normalize(CIFAR_MEAN, CIFAR_STD),
    ])
    if cutout_size is not None:
        train_transform.transforms.append(Cutout(cutout_size))

    valid_transform = transforms.Compose([
        transforms.ToTensor(),
        transforms.Normalize(CIFAR_MEAN, CIFAR_STD),
        ])
    return train_transform, valid_transform 
Example 23
Project: DetNAS   Author: megvii-model   File: bbox_aug.py    License: MIT License 6 votes vote down vote up
def im_detect_bbox_hflip(model, images, target_scale, target_max_size, device):
    """
    Performs bbox detection on the horizontally flipped image.
    Function signature is the same as for im_detect_bbox.
    """
    transform = TT.Compose([
        T.Resize(target_scale, target_max_size),
        TT.RandomHorizontalFlip(1.0),
        TT.ToTensor(),
        T.Normalize(
            mean=cfg.INPUT.PIXEL_MEAN, std=cfg.INPUT.PIXEL_STD, to_bgr255=cfg.INPUT.TO_BGR255
        )
    ])
    images = [transform(image) for image in images]
    images = to_image_list(images, cfg.DATALOADER.SIZE_DIVISIBILITY)
    boxlists = model(images.to(device))

    # Invert the detections computed on the flipped image
    boxlists_inv = [boxlist.transpose(0) for boxlist in boxlists]
    return boxlists_inv 
Example 24
Project: real-world-sr   Author: ManuelFritsche   File: data_loader.py    License: MIT License 6 votes vote down vote up
def __init__(self, noisy_dir, crop_size, upscale_factor=4, cropped=False, flips=False, rotations=False, **kwargs):
        super(TrainDataset, self).__init__()
        # get all directories used for training
        if isinstance(noisy_dir, str):
            noisy_dir = [noisy_dir]
        self.files = []
        for n_dir in noisy_dir:
            self.files += [join(n_dir, x) for x in listdir(n_dir) if utils.is_image_file(x)]
        # intitialize image transformations and variables
        self.input_transform = T.Compose([
            T.RandomVerticalFlip(0.5 if flips else 0.0),
            T.RandomHorizontalFlip(0.5 if flips else 0.0),
            T.RandomCrop(crop_size)
        ])
        self.crop_transform = T.RandomCrop(crop_size // upscale_factor)
        self.upscale_factor = upscale_factor
        self.cropped = cropped
        self.rotations = rotations 
Example 25
Project: DGP   Author: cyvius96   File: image_folder.py    License: MIT License 6 votes vote down vote up
def __init__(self, path, classes, stage='train'):
        self.data = []
        for i, c in enumerate(classes):
            cls_path = osp.join(path, c)
            images = os.listdir(cls_path)
            for image in images:
                self.data.append((osp.join(cls_path, image), i))

        normalize = transforms.Normalize(mean=[0.485, 0.456, 0.406],
                                         std=[0.229, 0.224, 0.225])
        
        if stage == 'train':
            self.transforms = transforms.Compose([transforms.RandomResizedCrop(224),
                                                  transforms.RandomHorizontalFlip(),
                                                  transforms.ToTensor(),
                                                  normalize])
        if stage == 'test':
            self.transforms = transforms.Compose([transforms.Resize(256),
                                                  transforms.CenterCrop(224),
                                                  transforms.ToTensor(),
                                                  normalize]) 
Example 26
Project: ResNet50-Pytorch-Face-Recognition   Author: KaihuaTang   File: data.py    License: MIT License 6 votes vote down vote up
def __init__(self, root_path="CACD2000/", label_path="data/label.npy", name_path="data/name.npy", train_mode = "train"):
        """
        Initialize some variables
        Load labels & names
        define transform
        """
        self.root_path = root_path
        self.image_labels = np.load(label_path)
        self.image_names = np.load(name_path)
        self.train_mode = train_mode
        self.transform = {
            'train': transforms.Compose([                
                transforms.Resize(224),
                transforms.RandomHorizontalFlip(),
                transforms.ToTensor(),
 #               transforms.Normalize([0.656,0.487,0.411], [1., 1., 1.])
            ]),
            'val': transforms.Compose([
                transforms.Resize(224),
                transforms.ToTensor(),
 #               transforms.Normalize([0.656,0.487,0.411], [1., 1., 1.])
            ]),
        } 
Example 27
Project: VSE-C   Author: ExplorerFreda   File: data.py    License: MIT License 5 votes vote down vote up
def get_transform(data_name, split_name, opt):
    normalizer = transforms.Normalize(mean=[0.485, 0.456, 0.406],
                                      std=[0.229, 0.224, 0.225])
    t_list = []
    if split_name == 'train':
        t_list = [transforms.RandomSizedCrop(opt.crop_size),
                  transforms.RandomHorizontalFlip()]
    elif split_name == 'val':
        t_list = [transforms.Scale(256), transforms.CenterCrop(224)]
    elif split_name == 'test':
        t_list = [transforms.Scale(256), transforms.CenterCrop(224)]

    t_end = [transforms.ToTensor(), normalizer]
    transform = transforms.Compose(t_list + t_end)
    return transform 
Example 28
Project: dogTorch   Author: ehsanik   File: sun_dataset.py    License: MIT License 5 votes vote down vote up
def __init__(self, args, train=True):
        self.root_dir = args.data
        root_dir = self.root_dir
        if train:
            self.data_set_list = os.path.join(root_dir,
                                              args.trainset_image_list)
        else:
            self.data_set_list = os.path.join(root_dir, args.testset_image_list)

        self.categ_dict = get_class_names(
            os.path.join(root_dir, 'ClassName.txt'))

        self.data_set_list = parse_file(self.data_set_list, self.categ_dict)

        self.args = args
        self.read_features = args.read_features

        self.features_dir = args.features_dir
        if train:
            self.transform = transforms.Compose([
                transforms.RandomSizedCrop(args.image_size),
                transforms.RandomHorizontalFlip(),
                transforms.Scale((args.image_size, args.image_size)),
                transforms.ToTensor(),
                transforms.Normalize(mean=[0.485, 0.456, 0.406],
                                     std=[0.229, 0.224, 0.225]),
            ])
        else:
            self.transform = transforms.Compose([
                transforms.Scale((args.image_size, args.image_size)),
                transforms.ToTensor(),
                transforms.Normalize(mean=[0.485, 0.456, 0.406],
                                     std=[0.229, 0.224, 0.225]),
            ]) 
Example 29
Project: transferlearning   Author: jindongwang   File: dataset.py    License: MIT License 5 votes vote down vote up
def loader(path, batch_size=16, num_workers=1, pin_memory=True):
    normalize = transforms.Normalize(mean=[0.485, 0.456, 0.406], std=[0.229, 0.224, 0.225])
    return data.DataLoader(
        datasets.ImageFolder(path,
                             transforms.Compose([
                                 transforms.Resize(256),
                                 transforms.RandomResizedCrop(224),
                                 transforms.RandomHorizontalFlip(),
                                 transforms.ToTensor(),
                                 normalize,
                             ])),
        batch_size=batch_size,
        shuffle=True,
        num_workers=num_workers,
        pin_memory=pin_memory) 
Example 30
Project: transferlearning   Author: jindongwang   File: data_loader.py    License: MIT License 5 votes vote down vote up
def load_training(root_path, dir, batch_size, kwargs):
    transform = transforms.Compose(
        [transforms.Resize([256, 256]),
         transforms.RandomCrop(224),
         transforms.RandomHorizontalFlip(),
         transforms.ToTensor()])
    data = datasets.ImageFolder(root=root_path + dir, transform=transform)
    train_loader = torch.utils.data.DataLoader(data, batch_size=batch_size, shuffle=True, drop_last=True, **kwargs)
    return train_loader