Python tensorflow.keras.utils.Sequence() Examples

The following are 6 code examples of tensorflow.keras.utils.Sequence(). You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may also want to check out all available functions/classes of the module tensorflow.keras.utils , or try the search function .
Example #1
Source File: data.py    From keras-YOLOv3-model-set with MIT License 6 votes vote down vote up
def __init__(self, annotation_lines, batch_size, input_shape, anchors, num_classes, enhance_augment=None, rescale_interval=-1, shuffle=True):
        self.annotation_lines = annotation_lines
        self.batch_size = batch_size
        self.input_shape = input_shape
        self.anchors = anchors
        self.num_classes = num_classes
        self.enhance_augment = enhance_augment
        self.indexes = np.arange(len(self.annotation_lines))
        self.shuffle = shuffle
        # prepare multiscale config
        # TODO: error happens when using Sequence data generator with
        #       multiscale input shape, disable multiscale first
        if rescale_interval != -1:
            raise ValueError("tf.keras.Sequence generator doesn't support multiscale input, pls remove related config")
        #self.rescale_interval = rescale_interval
        self.rescale_interval = -1

        self.rescale_step = 0
        self.input_shape_list = get_multiscale_list() 
Example #2
Source File: data.py    From keras-YOLOv3-model-set with MIT License 6 votes vote down vote up
def __init__(self, annotation_lines, batch_size, input_shape, anchors, num_classes, enhance_augment=None, rescale_interval=-1, shuffle=True):
        self.annotation_lines = annotation_lines
        self.batch_size = batch_size
        self.input_shape = input_shape
        self.anchors = anchors
        self.num_classes = num_classes
        self.enhance_augment = enhance_augment
        self.indexes = np.arange(len(self.annotation_lines))
        self.shuffle = shuffle
        # prepare multiscale config
        # TODO: error happens when using Sequence data generator with
        #       multiscale input shape, disable multiscale first
        if rescale_interval != -1:
            raise ValueError("tf.keras.Sequence generator doesn't support multiscale input, pls remove related config")
        #self.rescale_interval = rescale_interval
        self.rescale_interval = -1

        self.rescale_step = 0
        self.input_shape_list = get_multiscale_list() 
Example #3
Source File: base_sequence.py    From U-Time with MIT License 5 votes vote down vote up
def seed(self):
        """
        If multiprocessing, the processes will inherit the RNG state of the
        main process - here we reseed each process once so that the batches
        are randomly generated across multi-processes calls to the Sequence
        batch generator methods

        If multi-threading this method will just re-seed the 'MainProcess'
        process once
        """
        pname = current_process().name
        if pname not in self.is_seeded or not self.is_seeded[pname]:
            # Re-seed this process
            np.random.seed()
            self.is_seeded[pname] = True 
Example #4
Source File: base_sequence.py    From U-Time with MIT License 5 votes vote down vote up
def _assert_scaled(self, warn_mean=5, warn_std=5, n_batches=5):
        """
        Samples n_batches random batches from the sub-class Sequencer object
        and computes the mean and STD of the values across the batches. If
        their absolute values are higher than 'warn_mean' and 'warn_std'
        respectively, a warning is printed.

        Note: Does not raise an Error or Warning

        Args:
            warn_mean: Maximum allowed abs(mean) before warning is invoked
            warn_std:  Maximum allowed std before warning is invoked
            n_batches: Number of batches to sample for mean/std computation
        """
        # Get a set of random batches
        batches = []
        for ind in np.random.randint(0, len(self), n_batches):
            X, _ = self[ind]  # Use __getitem__ of the given Sequence class
            batches.append(X)
        mean, std = np.abs(np.mean(batches)), np.std(batches)
        self.logger("Mean assertion ({} batches):  {:.3f}".format(n_batches,
                                                                  mean))
        self.logger("Scale assertion ({} batches): {:.3f}".format(n_batches,
                                                                  std))
        if mean > warn_mean or std > warn_std:
            self.logger.warn("OBS: Found large abs(mean) and std values over 5"
                             " sampled batches ({:.3f} and {:.3f})."
                             " Make sure scaling is active at either the "
                             "global level (attribute 'scaler' has been set on"
                             " individual SleepStudy objects, typically via the"
                             " SleepStudyDataset set_scaler method), or "
                             "batch-wise via the batch_scaler attribute of the"
                             " Sequence object.".format(mean, std)) 
Example #5
Source File: multi_task_sequence.py    From MultiPlanarUNet with MIT License 5 votes vote down vote up
def __len__(self):
        """Number of batch in the Sequence.

        Returns:
            The number of batches in the Sequence.
        """
        return sum([len(seq) for seq in self.sequencers]) 
Example #6
Source File: test_models.py    From megnet with BSD 3-Clause "New" or "Revised" License 4 votes vote down vote up
def setUpClass(cls):
        cls.n_feature = 3
        cls.n_bond_features = 10
        cls.n_global_features = 2

        class Generator(Sequence):
            def __init__(self, x, y):
                self.x = x
                self.y = y
            def __len__(self):
                return 10
            def __getitem__(self, index):
                return  self.x, self.y

        x_crystal = [np.array([1, 2, 3, 4]).reshape((1, -1)),
                     np.random.normal(size=(1, 6, cls.n_bond_features)),
                     np.random.normal(size=(1, 2, cls.n_global_features)),
                     np.array([[0, 0, 1, 1, 2, 3]]),
                     np.array([[1, 1, 0, 0, 3, 2]]),
                     np.array([[0, 0, 1, 1]]),
                     np.array([[0, 0, 0, 0, 1, 1]]),
                     ]

        y = np.random.normal(size=(1, 2, 1))
        cls.train_gen_crystal = Generator(x_crystal, y)
        x_mol = [np.random.normal(size=(1, 4, cls.n_feature)),
                 np.random.normal(size=(1, 6, cls.n_bond_features)),
                 np.random.normal(size=(1, 2, cls.n_global_features)),
                 np.array([[0, 0, 1, 1, 2, 3]]),
                 np.array([[1, 1, 0, 0, 3, 2]]),
                 np.array([[0, 0, 1, 1]]),
                 np.array([[0, 0, 0, 0, 1, 1]]),
                 ]
        y = np.random.normal(size=(1, 2, 1))
        cls.train_gen_mol = Generator(x_mol, y)

        cls.model = MEGNetModel(10, 2, nblocks=1, lr=1e-2,
                                n1=4, n2=4, n3=4, npass=1, ntarget=1,
                                graph_converter=CrystalGraph(bond_converter=GaussianDistance(np.linspace(0, 5, 10), 0.5)),
                                )
        cls.model2 = MEGNetModel(10, 2, nblocks=1, lr=1e-2,
                                 n1=4, n2=4, n3=4, npass=1, ntarget=2,
                                 graph_converter=CrystalGraph(bond_converter=GaussianDistance(np.linspace(0, 5, 10), 0.5)),
                                 )