Module delta.ml.ml_config

Configuration options specific to machine learning.

Expand source code
# Copyright © 2020, United States Government, as represented by the
# Administrator of the National Aeronautics and Space Administration.
# All rights reserved.
#
# The DELTA (Deep Earth Learning, Tools, and Analysis) platform is
# licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
#        http://www.apache.org/licenses/LICENSE-2.0.
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

"""
Configuration options specific to machine learning.
"""
# Please do not put any tensorflow imports in this file as it will greatly slow loading
# when tensorflow isn't needed
import os.path

from typing import Optional

import appdirs
import pkg_resources
import yaml

from delta.imagery.imagery_config import ImageSet, ImageSetConfig, load_images_labels
import delta.config as config

class ValidationSet:#pylint:disable=too-few-public-methods
    """
    Specifies the images and labels in a validation set.
    """
    def __init__(self, images: Optional[ImageSet]=None, labels: Optional[ImageSet]=None,
                 from_training: bool=False, steps: int=1000):
        """
        Parameters
        ----------
        images: ImageSet
            Validation images.
        labels: ImageSet
            Optional, validation labels.
        from_training: bool
            If true, ignore images and labels arguments and take data from the training imagery.
            The validation data will not be used for training.
        steps: int
            If from_training is true, take this many batches for validation.
        """
        self.images = images
        self.labels = labels
        self.from_training = from_training
        self.steps = steps

class TrainingSpec:#pylint:disable=too-few-public-methods,too-many-arguments
    """
    Options used in training by `delta.ml.train.train`.
    """
    def __init__(self, batch_size, epochs, loss, metrics, validation=None, steps=None,
                 stride=None, optimizer='Adam', max_tile_offset=None):
        self.batch_size = batch_size
        self.epochs = epochs
        self.loss = loss
        self.validation = validation
        self.steps = steps
        self.metrics = metrics
        self.stride = stride
        self.optimizer = optimizer
        self.max_tile_offset = max_tile_offset

class NetworkConfig(config.DeltaConfigComponent):
    """
    Configuration for a neural network.
    """
    def __init__(self):
        super().__init__()
        self.register_field('yaml_file', str, 'yaml_file', config.validate_path,
                            'A YAML file describing the network to train.')
        self.register_field('params', dict, None, None, None)
        self.register_field('layers', list, None, None, None)

    # overwrite model entirely if updated (don't want combined layers from multiple files)
    def _load_dict(self, d : dict, base_dir):
        super()._load_dict(d, base_dir)
        if 'layers' in d and d['layers'] is not None:
            self._config_dict['yaml_file'] = None
        elif 'yaml_file' in d and d['yaml_file'] is not None:
            self._config_dict['layers'] = None
            if 'layers' in d and d['layers'] is not None:
                raise ValueError('Specified both yaml file and layers in model.')
            yaml_file = d['yaml_file']
            resource = os.path.join('config', yaml_file)
            if not os.path.exists(yaml_file) and pkg_resources.resource_exists('delta', resource):
                yaml_file = pkg_resources.resource_filename('delta', resource)
            if not os.path.exists(yaml_file):
                raise ValueError('Model yaml_file does not exist: ' + yaml_file)
            with open(yaml_file, 'r') as f:
                self._config_dict.update(yaml.safe_load(f))

def validate_size(size, _):
    """
    Validate an image region size.
    """
    if size is None:
        return size
    assert len(size) == 2, 'Size must be tuple.'
    assert isinstance(size[0], int) and isinstance(size[1], int), 'Size must be integer.'
    assert size[0] > 0 and size[1] > 0, 'Size must be positive.'
    return size

class ValidationConfig(config.DeltaConfigComponent):
    """
    Configuration for training validation.
    """
    def __init__(self):
        super().__init__()
        self.register_field('steps', int, 'steps', config.validate_positive,
                            'If from training, validate for this many steps.')
        self.register_field('from_training', bool, 'from_training', None,
                            'Take validation data from training data.')
        self.register_component(ImageSetConfig(), 'images', '__image_comp')
        self.register_component(ImageSetConfig(), 'labels', '__label_comp')
        self.__images = None
        self.__labels = None

    def reset(self):
        super().reset()
        self.__images = None
        self.__labels = None

    def images(self) -> ImageSet:
        """
        Returns the training images.
        """
        if self.__images is None:
            (self.__images, self.__labels) = load_images_labels(self._components['images'],
                                                                self._components['labels'],
                                                                config.config.dataset.classes)
        return self.__images

    def labels(self) -> ImageSet:
        """
        Returns the label images.
        """
        if self.__labels is None:
            (self.__images, self.__labels) = load_images_labels(self._components['images'],
                                                                self._components['labels'],
                                                                config.config.dataset.classes)
        return self.__labels

def _validate_stride(stride, _):
    if stride is None:
        return None
    if isinstance(stride, int):
        stride = (stride, stride)
    assert len(stride) == 2, 'Stride must have two components.'
    assert isinstance(stride[0], int) and isinstance(stride[1], int), 'Stride must be integer.'
    assert stride[0] > 0 and stride[1] > 0, 'Stride must be positive.'
    return stride

class TrainingConfig(config.DeltaConfigComponent):
    """
    Configuration for training.
    """
    def __init__(self):
        super().__init__(section_header='Training')
        self.register_field('stride', (list, int, None), None, _validate_stride,
                            'Pixels to skip when iterating over chunks. A value of 1 means to take every chunk.')
        self.register_field('epochs', int, None, config.validate_positive,
                            'Number of times to repeat training on the dataset.')
        self.register_field('batch_size', int, None, config.validate_positive,
                            'Features to group into each training batch.')
        self.register_field('max_tile_offset', int, None, None,
                            'Choose random tile offset each epoch within this range.')
        self.register_field('loss', (str, dict), None, None, 'Keras loss function.')
        self.register_field('metrics', list, None, None, 'List of metrics to apply.')
        self.register_field('steps', int, None, config.validate_non_negative, 'Batches to train per epoch.')
        self.register_field('optimizer', (str, dict), None, None, 'Keras optimizer to use.')
        self.register_field('callbacks', list, 'callbacks', None, 'Callbacks used to modify training')
        self.register_field('disable_mixed_precision', bool, 'disable_mixed_precision', None,
                            'Disables mixed precision tensorflow policy. By default DELTA will use mixed '
                            'precision if the hardware supports it. Details on ways to improve mixed '
                            'precision performance: '
                            'https://www.tensorflow.org/guide/mixed_precision#summary')
        self.register_arg('epochs', '--epochs')
        self.register_arg('batch_size', '--batch-size')
        self.register_arg('steps', '--steps')
        self.register_arg('disable_mixed_precision', '--disable-mixed-precision', action="store_true", type=None)
        self.register_field('augmentations', list, None, None, None)
        self.register_component(ValidationConfig(), 'validation')
        self.register_component(NetworkConfig(), 'network')
        self.__training = None

    def spec(self) -> TrainingSpec:
        """
        Returns the options configuring training.
        """
        if not self.__training:
            from_training = self._components['validation'].from_training()
            vsteps = self._components['validation'].steps()
            (vimg, vlabels) = (None, None)
            if not from_training:
                (vimg, vlabels) = (self._components['validation'].images(), self._components['validation'].labels())
            validation = ValidationSet(vimg, vlabels, from_training, vsteps)
            self.__training = TrainingSpec(batch_size=self._config_dict['batch_size'],
                                           epochs=self._config_dict['epochs'],
                                           loss=self._config_dict['loss'],
                                           metrics=self._config_dict['metrics'],
                                           validation=validation,
                                           steps=self._config_dict['steps'],
                                           stride=self._config_dict['stride'],
                                           optimizer=self._config_dict['optimizer'],
                                           max_tile_offset=self._config_dict['max_tile_offset'])
        return self.__training

    def augmentations(self):
        return self._config_dict['augmentations']

    def _load_dict(self, d : dict, base_dir):
        self.__training = None
        super()._load_dict(d, base_dir)


class MLFlowCheckpointsConfig(config.DeltaConfigComponent):
    """
    Configure MLFlow checkpoints.
    """
    def __init__(self):
        super().__init__()
        self.register_field('frequency', int, 'frequency', None,
                            'Frequency in batches to store neural network checkpoints.')
        self.register_field('only_save_latest', bool, 'only_save_latest', None,
                            'If true, only keep the most recent checkpoint.')

class MLFlowConfig(config.DeltaConfigComponent):
    """
    Configure MLFlow.
    """
    def __init__(self):
        super().__init__()
        self.register_field('enabled', bool, 'enabled', None, 'Enable MLFlow.')
        self.register_field('uri', str, None, None, 'URI to store MLFlow data.')
        self.register_field('frequency', int, 'frequency', config.validate_positive,
                            'Frequency to store metrics.')
        self.register_field('experiment_name', str, 'experiment', None, 'Experiment name in MLFlow.')

        self.register_arg('enabled', '--disable-mlflow', action='store_const', const=False, type=None)
        self.register_arg('enabled', '--enable-mlflow', action='store_const', const=True, type=None)
        self.register_component(MLFlowCheckpointsConfig(), 'checkpoints')

    def uri(self) -> str:
        """
        Returns the URI for MLFlow to store data.
        """
        uri = self._config_dict['uri']
        if uri == 'default':
            uri = 'file://' + os.path.join(appdirs.AppDirs('delta', 'nasa').user_data_dir, 'mlflow')
        return uri

class TensorboardConfig(config.DeltaConfigComponent):
    """
    Tensorboard configuration.
    """
    def __init__(self):
        super().__init__()
        self.register_field('enabled', bool, 'enabled', None, 'Enable Tensorboard.')
        self.register_field('dir', str, None, None, 'Directory to store Tensorboard data.')

    def dir(self) -> str:
        """
        Returns the directory for tensorboard to store to.
        """
        tbd = self._config_dict['dir']
        if tbd == 'default':
            tbd = os.path.join(appdirs.AppDirs('delta', 'nasa').user_data_dir, 'tensorboard')
        return tbd

class ClassifyConfig(config.DeltaConfigComponent):
    """
    Configure classification options.
    """
    def __init__(self):
        super().__init__()
        self.register_field('prob_image', bool, 'prob_image', None, 'Set true to save a probability image.')
        self.register_field('overlap', int, 'overlap', None, 'Amount to overlap processed tiles.')
        self.register_field('regions', list, None, None,
                            'List of region tags to compute statistics over, default is all tags.')
        self.register_field('metrics', list, None, None, 'List of metrics to apply.')
        self.register_field('wkt_dir', str, None, None,
                            'Look for WKT files in this folder, default is look in the input image folder.')

        self.register_arg('prob_image', '--prob', action='store_const', const=True, type=None)
        self.register_arg('prob_image', '--noprob', action='store_const', const=False, type=None)
        self.register_arg('overlap', '--overlap')

    def regions(self):
        if 'regions' in self._config_dict:
            return self._config_dict['regions']
        return None

    def wkt_dir(self):
        if 'wkt_dir' in self._config_dict:
            return self._config_dict['wkt_dir']
        return None

    def metrics(self):
        if 'metrics' in self._config_dict:
            return self._config_dict['metrics']
        return []

def register():
    """
    Registers machine learning config options with the global config manager.

    The arguments enable command line arguments for different components.
    """
    config.config.general.register_field('gpus', int, 'gpus', None, 'Number of gpus to use.')
    config.config.general.register_arg('gpus', '--gpus')

    config.config.register_component(TrainingConfig(), 'train')
    config.config.register_component(MLFlowConfig(), 'mlflow')
    config.config.register_component(TensorboardConfig(), 'tensorboard')
    config.config.register_component(ClassifyConfig(), 'classify')

Functions

def register()

Registers machine learning config options with the global config manager.

The arguments enable command line arguments for different components.

Expand source code
def register():
    """
    Registers machine learning config options with the global config manager.

    The arguments enable command line arguments for different components.
    """
    config.config.general.register_field('gpus', int, 'gpus', None, 'Number of gpus to use.')
    config.config.general.register_arg('gpus', '--gpus')

    config.config.register_component(TrainingConfig(), 'train')
    config.config.register_component(MLFlowConfig(), 'mlflow')
    config.config.register_component(TensorboardConfig(), 'tensorboard')
    config.config.register_component(ClassifyConfig(), 'classify')
def validate_size(size, _)

Validate an image region size.

Expand source code
def validate_size(size, _):
    """
    Validate an image region size.
    """
    if size is None:
        return size
    assert len(size) == 2, 'Size must be tuple.'
    assert isinstance(size[0], int) and isinstance(size[1], int), 'Size must be integer.'
    assert size[0] > 0 and size[1] > 0, 'Size must be positive.'
    return size

Classes

class ClassifyConfig

Configure classification options.

Parameters

section_header : Optional[str]
The title of the section for command line arguments in the help.
Expand source code
class ClassifyConfig(config.DeltaConfigComponent):
    """
    Configure classification options.
    """
    def __init__(self):
        super().__init__()
        self.register_field('prob_image', bool, 'prob_image', None, 'Set true to save a probability image.')
        self.register_field('overlap', int, 'overlap', None, 'Amount to overlap processed tiles.')
        self.register_field('regions', list, None, None,
                            'List of region tags to compute statistics over, default is all tags.')
        self.register_field('metrics', list, None, None, 'List of metrics to apply.')
        self.register_field('wkt_dir', str, None, None,
                            'Look for WKT files in this folder, default is look in the input image folder.')

        self.register_arg('prob_image', '--prob', action='store_const', const=True, type=None)
        self.register_arg('prob_image', '--noprob', action='store_const', const=False, type=None)
        self.register_arg('overlap', '--overlap')

    def regions(self):
        if 'regions' in self._config_dict:
            return self._config_dict['regions']
        return None

    def wkt_dir(self):
        if 'wkt_dir' in self._config_dict:
            return self._config_dict['wkt_dir']
        return None

    def metrics(self):
        if 'metrics' in self._config_dict:
            return self._config_dict['metrics']
        return []

Ancestors

Methods

def metrics(self)
Expand source code
def metrics(self):
    if 'metrics' in self._config_dict:
        return self._config_dict['metrics']
    return []
def regions(self)
Expand source code
def regions(self):
    if 'regions' in self._config_dict:
        return self._config_dict['regions']
    return None
def wkt_dir(self)
Expand source code
def wkt_dir(self):
    if 'wkt_dir' in self._config_dict:
        return self._config_dict['wkt_dir']
    return None

Inherited members

class MLFlowCheckpointsConfig

Configure MLFlow checkpoints.

Parameters

section_header : Optional[str]
The title of the section for command line arguments in the help.
Expand source code
class MLFlowCheckpointsConfig(config.DeltaConfigComponent):
    """
    Configure MLFlow checkpoints.
    """
    def __init__(self):
        super().__init__()
        self.register_field('frequency', int, 'frequency', None,
                            'Frequency in batches to store neural network checkpoints.')
        self.register_field('only_save_latest', bool, 'only_save_latest', None,
                            'If true, only keep the most recent checkpoint.')

Ancestors

Inherited members

class MLFlowConfig

Configure MLFlow.

Parameters

section_header : Optional[str]
The title of the section for command line arguments in the help.
Expand source code
class MLFlowConfig(config.DeltaConfigComponent):
    """
    Configure MLFlow.
    """
    def __init__(self):
        super().__init__()
        self.register_field('enabled', bool, 'enabled', None, 'Enable MLFlow.')
        self.register_field('uri', str, None, None, 'URI to store MLFlow data.')
        self.register_field('frequency', int, 'frequency', config.validate_positive,
                            'Frequency to store metrics.')
        self.register_field('experiment_name', str, 'experiment', None, 'Experiment name in MLFlow.')

        self.register_arg('enabled', '--disable-mlflow', action='store_const', const=False, type=None)
        self.register_arg('enabled', '--enable-mlflow', action='store_const', const=True, type=None)
        self.register_component(MLFlowCheckpointsConfig(), 'checkpoints')

    def uri(self) -> str:
        """
        Returns the URI for MLFlow to store data.
        """
        uri = self._config_dict['uri']
        if uri == 'default':
            uri = 'file://' + os.path.join(appdirs.AppDirs('delta', 'nasa').user_data_dir, 'mlflow')
        return uri

Ancestors

Methods

def uri(self) ‑> str

Returns the URI for MLFlow to store data.

Expand source code
def uri(self) -> str:
    """
    Returns the URI for MLFlow to store data.
    """
    uri = self._config_dict['uri']
    if uri == 'default':
        uri = 'file://' + os.path.join(appdirs.AppDirs('delta', 'nasa').user_data_dir, 'mlflow')
    return uri

Inherited members

class NetworkConfig

Configuration for a neural network.

Parameters

section_header : Optional[str]
The title of the section for command line arguments in the help.
Expand source code
class NetworkConfig(config.DeltaConfigComponent):
    """
    Configuration for a neural network.
    """
    def __init__(self):
        super().__init__()
        self.register_field('yaml_file', str, 'yaml_file', config.validate_path,
                            'A YAML file describing the network to train.')
        self.register_field('params', dict, None, None, None)
        self.register_field('layers', list, None, None, None)

    # overwrite model entirely if updated (don't want combined layers from multiple files)
    def _load_dict(self, d : dict, base_dir):
        super()._load_dict(d, base_dir)
        if 'layers' in d and d['layers'] is not None:
            self._config_dict['yaml_file'] = None
        elif 'yaml_file' in d and d['yaml_file'] is not None:
            self._config_dict['layers'] = None
            if 'layers' in d and d['layers'] is not None:
                raise ValueError('Specified both yaml file and layers in model.')
            yaml_file = d['yaml_file']
            resource = os.path.join('config', yaml_file)
            if not os.path.exists(yaml_file) and pkg_resources.resource_exists('delta', resource):
                yaml_file = pkg_resources.resource_filename('delta', resource)
            if not os.path.exists(yaml_file):
                raise ValueError('Model yaml_file does not exist: ' + yaml_file)
            with open(yaml_file, 'r') as f:
                self._config_dict.update(yaml.safe_load(f))

Ancestors

Inherited members

class TensorboardConfig

Tensorboard configuration.

Parameters

section_header : Optional[str]
The title of the section for command line arguments in the help.
Expand source code
class TensorboardConfig(config.DeltaConfigComponent):
    """
    Tensorboard configuration.
    """
    def __init__(self):
        super().__init__()
        self.register_field('enabled', bool, 'enabled', None, 'Enable Tensorboard.')
        self.register_field('dir', str, None, None, 'Directory to store Tensorboard data.')

    def dir(self) -> str:
        """
        Returns the directory for tensorboard to store to.
        """
        tbd = self._config_dict['dir']
        if tbd == 'default':
            tbd = os.path.join(appdirs.AppDirs('delta', 'nasa').user_data_dir, 'tensorboard')
        return tbd

Ancestors

Methods

def dir(self) ‑> str

Returns the directory for tensorboard to store to.

Expand source code
def dir(self) -> str:
    """
    Returns the directory for tensorboard to store to.
    """
    tbd = self._config_dict['dir']
    if tbd == 'default':
        tbd = os.path.join(appdirs.AppDirs('delta', 'nasa').user_data_dir, 'tensorboard')
    return tbd

Inherited members

class TrainingConfig

Configuration for training.

Parameters

section_header : Optional[str]
The title of the section for command line arguments in the help.
Expand source code
class TrainingConfig(config.DeltaConfigComponent):
    """
    Configuration for training.
    """
    def __init__(self):
        super().__init__(section_header='Training')
        self.register_field('stride', (list, int, None), None, _validate_stride,
                            'Pixels to skip when iterating over chunks. A value of 1 means to take every chunk.')
        self.register_field('epochs', int, None, config.validate_positive,
                            'Number of times to repeat training on the dataset.')
        self.register_field('batch_size', int, None, config.validate_positive,
                            'Features to group into each training batch.')
        self.register_field('max_tile_offset', int, None, None,
                            'Choose random tile offset each epoch within this range.')
        self.register_field('loss', (str, dict), None, None, 'Keras loss function.')
        self.register_field('metrics', list, None, None, 'List of metrics to apply.')
        self.register_field('steps', int, None, config.validate_non_negative, 'Batches to train per epoch.')
        self.register_field('optimizer', (str, dict), None, None, 'Keras optimizer to use.')
        self.register_field('callbacks', list, 'callbacks', None, 'Callbacks used to modify training')
        self.register_field('disable_mixed_precision', bool, 'disable_mixed_precision', None,
                            'Disables mixed precision tensorflow policy. By default DELTA will use mixed '
                            'precision if the hardware supports it. Details on ways to improve mixed '
                            'precision performance: '
                            'https://www.tensorflow.org/guide/mixed_precision#summary')
        self.register_arg('epochs', '--epochs')
        self.register_arg('batch_size', '--batch-size')
        self.register_arg('steps', '--steps')
        self.register_arg('disable_mixed_precision', '--disable-mixed-precision', action="store_true", type=None)
        self.register_field('augmentations', list, None, None, None)
        self.register_component(ValidationConfig(), 'validation')
        self.register_component(NetworkConfig(), 'network')
        self.__training = None

    def spec(self) -> TrainingSpec:
        """
        Returns the options configuring training.
        """
        if not self.__training:
            from_training = self._components['validation'].from_training()
            vsteps = self._components['validation'].steps()
            (vimg, vlabels) = (None, None)
            if not from_training:
                (vimg, vlabels) = (self._components['validation'].images(), self._components['validation'].labels())
            validation = ValidationSet(vimg, vlabels, from_training, vsteps)
            self.__training = TrainingSpec(batch_size=self._config_dict['batch_size'],
                                           epochs=self._config_dict['epochs'],
                                           loss=self._config_dict['loss'],
                                           metrics=self._config_dict['metrics'],
                                           validation=validation,
                                           steps=self._config_dict['steps'],
                                           stride=self._config_dict['stride'],
                                           optimizer=self._config_dict['optimizer'],
                                           max_tile_offset=self._config_dict['max_tile_offset'])
        return self.__training

    def augmentations(self):
        return self._config_dict['augmentations']

    def _load_dict(self, d : dict, base_dir):
        self.__training = None
        super()._load_dict(d, base_dir)

Ancestors

Methods

def augmentations(self)
Expand source code
def augmentations(self):
    return self._config_dict['augmentations']
def spec(self) ‑> TrainingSpec

Returns the options configuring training.

Expand source code
def spec(self) -> TrainingSpec:
    """
    Returns the options configuring training.
    """
    if not self.__training:
        from_training = self._components['validation'].from_training()
        vsteps = self._components['validation'].steps()
        (vimg, vlabels) = (None, None)
        if not from_training:
            (vimg, vlabels) = (self._components['validation'].images(), self._components['validation'].labels())
        validation = ValidationSet(vimg, vlabels, from_training, vsteps)
        self.__training = TrainingSpec(batch_size=self._config_dict['batch_size'],
                                       epochs=self._config_dict['epochs'],
                                       loss=self._config_dict['loss'],
                                       metrics=self._config_dict['metrics'],
                                       validation=validation,
                                       steps=self._config_dict['steps'],
                                       stride=self._config_dict['stride'],
                                       optimizer=self._config_dict['optimizer'],
                                       max_tile_offset=self._config_dict['max_tile_offset'])
    return self.__training

Inherited members

class TrainingSpec (batch_size, epochs, loss, metrics, validation=None, steps=None, stride=None, optimizer='Adam', max_tile_offset=None)

Options used in training by train().

Expand source code
class TrainingSpec:#pylint:disable=too-few-public-methods,too-many-arguments
    """
    Options used in training by `delta.ml.train.train`.
    """
    def __init__(self, batch_size, epochs, loss, metrics, validation=None, steps=None,
                 stride=None, optimizer='Adam', max_tile_offset=None):
        self.batch_size = batch_size
        self.epochs = epochs
        self.loss = loss
        self.validation = validation
        self.steps = steps
        self.metrics = metrics
        self.stride = stride
        self.optimizer = optimizer
        self.max_tile_offset = max_tile_offset
class ValidationConfig

Configuration for training validation.

Parameters

section_header : Optional[str]
The title of the section for command line arguments in the help.
Expand source code
class ValidationConfig(config.DeltaConfigComponent):
    """
    Configuration for training validation.
    """
    def __init__(self):
        super().__init__()
        self.register_field('steps', int, 'steps', config.validate_positive,
                            'If from training, validate for this many steps.')
        self.register_field('from_training', bool, 'from_training', None,
                            'Take validation data from training data.')
        self.register_component(ImageSetConfig(), 'images', '__image_comp')
        self.register_component(ImageSetConfig(), 'labels', '__label_comp')
        self.__images = None
        self.__labels = None

    def reset(self):
        super().reset()
        self.__images = None
        self.__labels = None

    def images(self) -> ImageSet:
        """
        Returns the training images.
        """
        if self.__images is None:
            (self.__images, self.__labels) = load_images_labels(self._components['images'],
                                                                self._components['labels'],
                                                                config.config.dataset.classes)
        return self.__images

    def labels(self) -> ImageSet:
        """
        Returns the label images.
        """
        if self.__labels is None:
            (self.__images, self.__labels) = load_images_labels(self._components['images'],
                                                                self._components['labels'],
                                                                config.config.dataset.classes)
        return self.__labels

Ancestors

Methods

def images(self) ‑> ImageSet

Returns the training images.

Expand source code
def images(self) -> ImageSet:
    """
    Returns the training images.
    """
    if self.__images is None:
        (self.__images, self.__labels) = load_images_labels(self._components['images'],
                                                            self._components['labels'],
                                                            config.config.dataset.classes)
    return self.__images
def labels(self) ‑> ImageSet

Returns the label images.

Expand source code
def labels(self) -> ImageSet:
    """
    Returns the label images.
    """
    if self.__labels is None:
        (self.__images, self.__labels) = load_images_labels(self._components['images'],
                                                            self._components['labels'],
                                                            config.config.dataset.classes)
    return self.__labels

Inherited members

class ValidationSet (images: Optional[ImageSet] = None, labels: Optional[ImageSet] = None, from_training: bool = False, steps: int = 1000)

Specifies the images and labels in a validation set.

Parameters

images : ImageSet
Validation images.
labels : ImageSet
Optional, validation labels.
from_training : bool
If true, ignore images and labels arguments and take data from the training imagery. The validation data will not be used for training.
steps : int
If from_training is true, take this many batches for validation.
Expand source code
class ValidationSet:#pylint:disable=too-few-public-methods
    """
    Specifies the images and labels in a validation set.
    """
    def __init__(self, images: Optional[ImageSet]=None, labels: Optional[ImageSet]=None,
                 from_training: bool=False, steps: int=1000):
        """
        Parameters
        ----------
        images: ImageSet
            Validation images.
        labels: ImageSet
            Optional, validation labels.
        from_training: bool
            If true, ignore images and labels arguments and take data from the training imagery.
            The validation data will not be used for training.
        steps: int
            If from_training is true, take this many batches for validation.
        """
        self.images = images
        self.labels = labels
        self.from_training = from_training
        self.steps = steps