Python google.protobuf.text_format.Merge() Examples

The following are 30 code examples of google.protobuf.text_format.Merge(). You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may also want to check out all available functions/classes of the module google.protobuf.text_format , or try the search function .
Example #1
Source File: optimizer_builder_test.py    From DOTA_models with Apache License 2.0 6 votes vote down vote up
def testBuildMomentumOptimizer(self):
    optimizer_text_proto = """
      momentum_optimizer: {
        learning_rate: {
          constant_learning_rate {
            learning_rate: 0.001
          }
        }
        momentum_optimizer_value: 0.99
      }
      use_moving_average: false
    """
    global_summaries = set([])
    optimizer_proto = optimizer_pb2.Optimizer()
    text_format.Merge(optimizer_text_proto, optimizer_proto)
    optimizer = optimizer_builder.build(optimizer_proto, global_summaries)
    self.assertTrue(isinstance(optimizer, tf.train.MomentumOptimizer)) 
Example #2
Source File: hyperparams_builder_test.py    From DOTA_models with Apache License 2.0 6 votes vote down vote up
def test_use_relu_6_activation(self):
    conv_hyperparams_text_proto = """
      regularizer {
        l2_regularizer {
        }
      }
      initializer {
        truncated_normal_initializer {
        }
      }
      activation: RELU_6
    """
    conv_hyperparams_proto = hyperparams_pb2.Hyperparams()
    text_format.Merge(conv_hyperparams_text_proto, conv_hyperparams_proto)
    scope = hyperparams_builder.build(conv_hyperparams_proto, is_training=True)
    conv_scope_arguments = scope.values()[0]
    self.assertEqual(conv_scope_arguments['activation_fn'], tf.nn.relu6) 
Example #3
Source File: hyperparams_builder_test.py    From DOTA_models with Apache License 2.0 6 votes vote down vote up
def test_variance_in_range_with_variance_scaling_initializer_fan_in(self):
    conv_hyperparams_text_proto = """
      regularizer {
        l2_regularizer {
        }
      }
      initializer {
        variance_scaling_initializer {
          factor: 2.0
          mode: FAN_IN
          uniform: false
        }
      }
    """
    conv_hyperparams_proto = hyperparams_pb2.Hyperparams()
    text_format.Merge(conv_hyperparams_text_proto, conv_hyperparams_proto)
    scope = hyperparams_builder.build(conv_hyperparams_proto, is_training=True)
    conv_scope_arguments = scope.values()[0]
    initializer = conv_scope_arguments['weights_initializer']
    self._assert_variance_in_range(initializer, shape=[100, 40],
                                   variance=2. / 100.) 
Example #4
Source File: hyperparams_builder_test.py    From DOTA_models with Apache License 2.0 6 votes vote down vote up
def test_variance_in_range_with_variance_scaling_initializer_fan_out(self):
    conv_hyperparams_text_proto = """
      regularizer {
        l2_regularizer {
        }
      }
      initializer {
        variance_scaling_initializer {
          factor: 2.0
          mode: FAN_OUT
          uniform: false
        }
      }
    """
    conv_hyperparams_proto = hyperparams_pb2.Hyperparams()
    text_format.Merge(conv_hyperparams_text_proto, conv_hyperparams_proto)
    scope = hyperparams_builder.build(conv_hyperparams_proto, is_training=True)
    conv_scope_arguments = scope.values()[0]
    initializer = conv_scope_arguments['weights_initializer']
    self._assert_variance_in_range(initializer, shape=[100, 40],
                                   variance=2. / 40.) 
Example #5
Source File: post_processing_builder_test.py    From DOTA_models with Apache License 2.0 6 votes vote down vote up
def test_build_non_max_suppressor_with_correct_parameters(self):
    post_processing_text_proto = """
      batch_non_max_suppression {
        score_threshold: 0.7
        iou_threshold: 0.6
        max_detections_per_class: 100
        max_total_detections: 300
      }
    """
    post_processing_config = post_processing_pb2.PostProcessing()
    text_format.Merge(post_processing_text_proto, post_processing_config)
    non_max_suppressor, _ = post_processing_builder.build(
        post_processing_config)
    self.assertEqual(non_max_suppressor.keywords['max_size_per_class'], 100)
    self.assertEqual(non_max_suppressor.keywords['max_total_size'], 300)
    self.assertAlmostEqual(non_max_suppressor.keywords['score_thresh'], 0.7)
    self.assertAlmostEqual(non_max_suppressor.keywords['iou_thresh'], 0.6) 
Example #6
Source File: hyperparams_builder_test.py    From DOTA_models with Apache License 2.0 6 votes vote down vote up
def test_variance_in_range_with_variance_scaling_initializer_fan_avg(self):
    conv_hyperparams_text_proto = """
      regularizer {
        l2_regularizer {
        }
      }
      initializer {
        variance_scaling_initializer {
          factor: 2.0
          mode: FAN_AVG
          uniform: false
        }
      }
    """
    conv_hyperparams_proto = hyperparams_pb2.Hyperparams()
    text_format.Merge(conv_hyperparams_text_proto, conv_hyperparams_proto)
    scope = hyperparams_builder.build(conv_hyperparams_proto, is_training=True)
    conv_scope_arguments = scope.values()[0]
    initializer = conv_scope_arguments['weights_initializer']
    self._assert_variance_in_range(initializer, shape=[100, 40],
                                   variance=4. / (100. + 40.)) 
Example #7
Source File: hyperparams_builder_test.py    From DOTA_models with Apache License 2.0 6 votes vote down vote up
def test_variance_in_range_with_truncated_normal_initializer(self):
    conv_hyperparams_text_proto = """
      regularizer {
        l2_regularizer {
        }
      }
      initializer {
        truncated_normal_initializer {
          mean: 0.0
          stddev: 0.8
        }
      }
    """
    conv_hyperparams_proto = hyperparams_pb2.Hyperparams()
    text_format.Merge(conv_hyperparams_text_proto, conv_hyperparams_proto)
    scope = hyperparams_builder.build(conv_hyperparams_proto, is_training=True)
    conv_scope_arguments = scope.values()[0]
    initializer = conv_scope_arguments['weights_initializer']
    self._assert_variance_in_range(initializer, shape=[100, 40],
                                   variance=0.49, tol=1e-1) 
Example #8
Source File: hyperparams_builder_test.py    From DOTA_models with Apache License 2.0 6 votes vote down vote up
def test_use_relu_activation(self):
    conv_hyperparams_text_proto = """
      regularizer {
        l2_regularizer {
        }
      }
      initializer {
        truncated_normal_initializer {
        }
      }
      activation: RELU
    """
    conv_hyperparams_proto = hyperparams_pb2.Hyperparams()
    text_format.Merge(conv_hyperparams_text_proto, conv_hyperparams_proto)
    scope = hyperparams_builder.build(conv_hyperparams_proto, is_training=True)
    conv_scope_arguments = scope.values()[0]
    self.assertEqual(conv_scope_arguments['activation_fn'], tf.nn.relu) 
Example #9
Source File: hyperparams_builder_test.py    From DOTA_models with Apache License 2.0 6 votes vote down vote up
def test_do_not_use_batch_norm_if_default(self):
    conv_hyperparams_text_proto = """
      regularizer {
        l2_regularizer {
        }
      }
      initializer {
        truncated_normal_initializer {
        }
      }
    """
    conv_hyperparams_proto = hyperparams_pb2.Hyperparams()
    text_format.Merge(conv_hyperparams_text_proto, conv_hyperparams_proto)
    scope = hyperparams_builder.build(conv_hyperparams_proto, is_training=True)
    conv_scope_arguments = scope.values()[0]
    self.assertEqual(conv_scope_arguments['normalizer_fn'], None)
    self.assertEqual(conv_scope_arguments['normalizer_params'], None) 
Example #10
Source File: optimizer_builder_test.py    From DOTA_models with Apache License 2.0 6 votes vote down vote up
def testBuildManualStepLearningRate(self):
    learning_rate_text_proto = """
      manual_step_learning_rate {
        schedule {
          step: 0
          learning_rate: 0.006
        }
        schedule {
          step: 90000
          learning_rate: 0.00006
        }
      }
    """
    global_summaries = set([])
    learning_rate_proto = optimizer_pb2.LearningRate()
    text_format.Merge(learning_rate_text_proto, learning_rate_proto)
    learning_rate = optimizer_builder._create_learning_rate(
        learning_rate_proto, global_summaries)
    self.assertTrue(isinstance(learning_rate, tf.Tensor)) 
Example #11
Source File: optimizer_builder_test.py    From DOTA_models with Apache License 2.0 6 votes vote down vote up
def testBuildAdamOptimizer(self):
    optimizer_text_proto = """
      adam_optimizer: {
        learning_rate: {
          constant_learning_rate {
            learning_rate: 0.002
          }
        }
      }
      use_moving_average: false
    """
    global_summaries = set([])
    optimizer_proto = optimizer_pb2.Optimizer()
    text_format.Merge(optimizer_text_proto, optimizer_proto)
    optimizer = optimizer_builder.build(optimizer_proto, global_summaries)
    self.assertTrue(isinstance(optimizer, tf.train.AdamOptimizer)) 
Example #12
Source File: eval.py    From DOTA_models with Apache License 2.0 6 votes vote down vote up
def get_configs_from_pipeline_file():
  """Reads evaluation configuration from a pipeline_pb2.TrainEvalPipelineConfig.

  Reads evaluation config from file specified by pipeline_config_path flag.

  Returns:
    model_config: a model_pb2.DetectionModel
    eval_config: a eval_pb2.EvalConfig
    input_config: a input_reader_pb2.InputReader
  """
  pipeline_config = pipeline_pb2.TrainEvalPipelineConfig()
  with tf.gfile.GFile(FLAGS.pipeline_config_path, 'r') as f:
    text_format.Merge(f.read(), pipeline_config)

  model_config = pipeline_config.model
  if FLAGS.eval_training_data:
    eval_config = pipeline_config.train_config
  else:
    eval_config = pipeline_config.eval_config
  input_config = pipeline_config.eval_input_reader

  return model_config, eval_config, input_config 
Example #13
Source File: optimizer_builder_test.py    From DOTA_models with Apache License 2.0 6 votes vote down vote up
def testBuildMovingAverageOptimizer(self):
    optimizer_text_proto = """
      adam_optimizer: {
        learning_rate: {
          constant_learning_rate {
            learning_rate: 0.002
          }
        }
      }
      use_moving_average: True
    """
    global_summaries = set([])
    optimizer_proto = optimizer_pb2.Optimizer()
    text_format.Merge(optimizer_text_proto, optimizer_proto)
    optimizer = optimizer_builder.build(optimizer_proto, global_summaries)
    self.assertTrue(
        isinstance(optimizer, tf.contrib.opt.MovingAverageOptimizer)) 
Example #14
Source File: optimizer_builder_test.py    From DOTA_models with Apache License 2.0 6 votes vote down vote up
def testBuildMovingAverageOptimizerWithNonDefaultDecay(self):
    optimizer_text_proto = """
      adam_optimizer: {
        learning_rate: {
          constant_learning_rate {
            learning_rate: 0.002
          }
        }
      }
      use_moving_average: True
      moving_average_decay: 0.2
    """
    global_summaries = set([])
    optimizer_proto = optimizer_pb2.Optimizer()
    text_format.Merge(optimizer_text_proto, optimizer_proto)
    optimizer = optimizer_builder.build(optimizer_proto, global_summaries)
    self.assertTrue(
        isinstance(optimizer, tf.contrib.opt.MovingAverageOptimizer))
    # TODO: Find a way to not depend on the private members.
    self.assertAlmostEqual(optimizer._ema._decay, 0.2) 
Example #15
Source File: hyperparams_builder_test.py    From DOTA_models with Apache License 2.0 6 votes vote down vote up
def test_return_l2_regularizer_weights(self):
    conv_hyperparams_text_proto = """
      regularizer {
        l2_regularizer {
          weight: 0.42
        }
      }
      initializer {
        truncated_normal_initializer {
        }
      }
    """
    conv_hyperparams_proto = hyperparams_pb2.Hyperparams()
    text_format.Merge(conv_hyperparams_text_proto, conv_hyperparams_proto)
    scope = hyperparams_builder.build(conv_hyperparams_proto, is_training=True)
    conv_scope_arguments = scope.values()[0]

    regularizer = conv_scope_arguments['weights_regularizer']
    weights = np.array([1., -1, 4., 2.])
    with self.test_session() as sess:
      result = sess.run(regularizer(tf.constant(weights)))
    self.assertAllClose(np.power(weights, 2).sum() / 2.0 * 0.42, result) 
Example #16
Source File: label_map_util.py    From DOTA_models with Apache License 2.0 6 votes vote down vote up
def load_labelmap(path):
  """Loads label map proto.

  Args:
    path: path to StringIntLabelMap proto text file.
  Returns:
    a StringIntLabelMapProto
  """
  with tf.gfile.GFile(path, 'r') as fid:
    label_map_string = fid.read()
    label_map = string_int_label_map_pb2.StringIntLabelMap()
    try:
      text_format.Merge(label_map_string, label_map)
    except text_format.ParseError:
      label_map.ParseFromString(label_map_string)
  _validate_label_map(label_map)
  return label_map 
Example #17
Source File: anchor_generator_builder_test.py    From DOTA_models with Apache License 2.0 6 votes vote down vote up
def test_build_grid_anchor_generator_with_defaults(self):
    anchor_generator_text_proto = """
      grid_anchor_generator {
      }
     """
    anchor_generator_proto = anchor_generator_pb2.AnchorGenerator()
    text_format.Merge(anchor_generator_text_proto, anchor_generator_proto)
    anchor_generator_object = anchor_generator_builder.build(
        anchor_generator_proto)
    self.assertTrue(isinstance(anchor_generator_object,
                               grid_anchor_generator.GridAnchorGenerator))
    self.assertListEqual(anchor_generator_object._scales, [])
    self.assertListEqual(anchor_generator_object._aspect_ratios, [])
    with self.test_session() as sess:
      base_anchor_size, anchor_offset, anchor_stride = sess.run(
          [anchor_generator_object._base_anchor_size,
           anchor_generator_object._anchor_offset,
           anchor_generator_object._anchor_stride])
    self.assertAllEqual(anchor_offset, [0, 0])
    self.assertAllEqual(anchor_stride, [16, 16])
    self.assertAllEqual(base_anchor_size, [256, 256]) 
Example #18
Source File: hyperparams_builder_test.py    From DOTA_models with Apache License 2.0 6 votes vote down vote up
def test_separable_conv2d_and_conv2d_and_transpose_have_same_parameters(self):
    conv_hyperparams_text_proto = """
      regularizer {
        l1_regularizer {
        }
      }
      initializer {
        truncated_normal_initializer {
        }
      }
    """
    conv_hyperparams_proto = hyperparams_pb2.Hyperparams()
    text_format.Merge(conv_hyperparams_text_proto, conv_hyperparams_proto)
    scope = hyperparams_builder.build(conv_hyperparams_proto, is_training=True)
    kwargs_1, kwargs_2, kwargs_3 = scope.values()
    self.assertDictEqual(kwargs_1, kwargs_2)
    self.assertDictEqual(kwargs_1, kwargs_3) 
Example #19
Source File: hyperparams_builder_test.py    From DOTA_models with Apache License 2.0 6 votes vote down vote up
def test_explicit_fc_op_arg_scope_has_fully_connected_op(self):
    conv_hyperparams_text_proto = """
      op: FC
      regularizer {
        l1_regularizer {
        }
      }
      initializer {
        truncated_normal_initializer {
        }
      }
    """
    conv_hyperparams_proto = hyperparams_pb2.Hyperparams()
    text_format.Merge(conv_hyperparams_text_proto, conv_hyperparams_proto)
    scope = hyperparams_builder.build(conv_hyperparams_proto, is_training=True)
    self.assertTrue(self._get_scope_key(slim.fully_connected) in scope) 
Example #20
Source File: preprocessor_builder_test.py    From DOTA_models with Apache License 2.0 6 votes vote down vote up
def test_build_normalize_image(self):
    preprocessor_text_proto = """
    normalize_image {
      original_minval: 0.0
      original_maxval: 255.0
      target_minval: -1.0
      target_maxval: 1.0
    }
    """
    preprocessor_proto = preprocessor_pb2.PreprocessingStep()
    text_format.Merge(preprocessor_text_proto, preprocessor_proto)
    function, args = preprocessor_builder.build(preprocessor_proto)
    self.assertEqual(function, preprocessor.normalize_image)
    self.assertEqual(args, {
        'original_minval': 0.0,
        'original_maxval': 255.0,
        'target_minval': -1.0,
        'target_maxval': 1.0,
    }) 
Example #21
Source File: preprocessor_builder_test.py    From DOTA_models with Apache License 2.0 5 votes vote down vote up
def test_build_random_crop_to_aspect_ratio(self):
    preprocessor_text_proto = """
    random_crop_to_aspect_ratio {
      aspect_ratio: 0.85
      overlap_thresh: 0.35
    }
    """
    preprocessor_proto = preprocessor_pb2.PreprocessingStep()
    text_format.Merge(preprocessor_text_proto, preprocessor_proto)
    function, args = preprocessor_builder.build(preprocessor_proto)
    self.assertEqual(function, preprocessor.random_crop_to_aspect_ratio)
    self.assert_dictionary_close(args, {'aspect_ratio': 0.85,
                                        'overlap_thresh': 0.35}) 
Example #22
Source File: preprocessor_builder_test.py    From DOTA_models with Apache License 2.0 5 votes vote down vote up
def test_build_resize_image(self):
    preprocessor_text_proto = """
    resize_image {
      new_height: 75
      new_width: 100
      method: BICUBIC
    }
    """
    preprocessor_proto = preprocessor_pb2.PreprocessingStep()
    text_format.Merge(preprocessor_text_proto, preprocessor_proto)
    function, args = preprocessor_builder.build(preprocessor_proto)
    self.assertEqual(function, preprocessor.resize_image)
    self.assertEqual(args, {'new_height': 75,
                            'new_width': 100,
                            'method': tf.image.ResizeMethod.BICUBIC}) 
Example #23
Source File: preprocessor_builder_test.py    From DOTA_models with Apache License 2.0 5 votes vote down vote up
def test_build_random_black_patches(self):
    preprocessor_text_proto = """
    random_black_patches {
      max_black_patches: 20
      probability: 0.95
      size_to_image_ratio: 0.12
    }
    """
    preprocessor_proto = preprocessor_pb2.PreprocessingStep()
    text_format.Merge(preprocessor_text_proto, preprocessor_proto)
    function, args = preprocessor_builder.build(preprocessor_proto)
    self.assertEqual(function, preprocessor.random_black_patches)
    self.assert_dictionary_close(args, {'max_black_patches': 20,
                                        'probability': 0.95,
                                        'size_to_image_ratio': 0.12}) 
Example #24
Source File: preprocessor_builder_test.py    From DOTA_models with Apache License 2.0 5 votes vote down vote up
def test_build_random_crop_pad_image(self):
    preprocessor_text_proto = """
    random_crop_pad_image {
      min_object_covered: 0.75
      min_aspect_ratio: 0.75
      max_aspect_ratio: 1.5
      min_area: 0.25
      max_area: 0.875
      overlap_thresh: 0.5
      random_coef: 0.125
    }
    """
    preprocessor_proto = preprocessor_pb2.PreprocessingStep()
    text_format.Merge(preprocessor_text_proto, preprocessor_proto)
    function, args = preprocessor_builder.build(preprocessor_proto)
    self.assertEqual(function, preprocessor.random_crop_pad_image)
    self.assertEqual(args, {
        'min_object_covered': 0.75,
        'aspect_ratio_range': (0.75, 1.5),
        'area_range': (0.25, 0.875),
        'overlap_thresh': 0.5,
        'random_coef': 0.125,
        'min_padded_size_ratio': None,
        'max_padded_size_ratio': None,
        'pad_color': None,
    }) 
Example #25
Source File: preprocessor_builder_test.py    From DOTA_models with Apache License 2.0 5 votes vote down vote up
def test_build_ssd_random_crop(self):
    preprocessor_text_proto = """
    ssd_random_crop {
      operations {
        min_object_covered: 0.0
        min_aspect_ratio: 0.875
        max_aspect_ratio: 1.125
        min_area: 0.5
        max_area: 1.0
        overlap_thresh: 0.0
        random_coef: 0.375
      }
      operations {
        min_object_covered: 0.25
        min_aspect_ratio: 0.75
        max_aspect_ratio: 1.5
        min_area: 0.5
        max_area: 1.0
        overlap_thresh: 0.25
        random_coef: 0.375
      }
    }
    """
    preprocessor_proto = preprocessor_pb2.PreprocessingStep()
    text_format.Merge(preprocessor_text_proto, preprocessor_proto)
    function, args = preprocessor_builder.build(preprocessor_proto)
    self.assertEqual(function, preprocessor.ssd_random_crop)
    self.assertEqual(args, {'min_object_covered': [0.0, 0.25],
                            'aspect_ratio_range': [(0.875, 1.125), (0.75, 1.5)],
                            'area_range': [(0.5, 1.0), (0.5, 1.0)],
                            'overlap_thresh': [0.0, 0.25],
                            'random_coef': [0.375, 0.375]}) 
Example #26
Source File: preprocessor_builder_test.py    From DOTA_models with Apache License 2.0 5 votes vote down vote up
def test_build_random_jitter_boxes(self):
    preprocessor_text_proto = """
    random_jitter_boxes {
      ratio: 0.1
    }
    """
    preprocessor_proto = preprocessor_pb2.PreprocessingStep()
    text_format.Merge(preprocessor_text_proto, preprocessor_proto)
    function, args = preprocessor_builder.build(preprocessor_proto)
    self.assertEqual(function, preprocessor.random_jitter_boxes)
    self.assert_dictionary_close(args, {'ratio': 0.1}) 
Example #27
Source File: preprocessor_builder_test.py    From DOTA_models with Apache License 2.0 5 votes vote down vote up
def test_build_random_resize_method(self):
    preprocessor_text_proto = """
    random_resize_method {
      target_height: 75
      target_width: 100
    }
    """
    preprocessor_proto = preprocessor_pb2.PreprocessingStep()
    text_format.Merge(preprocessor_text_proto, preprocessor_proto)
    function, args = preprocessor_builder.build(preprocessor_proto)
    self.assertEqual(function, preprocessor.random_resize_method)
    self.assert_dictionary_close(args, {'target_size': [75, 100]}) 
Example #28
Source File: preprocessor_builder_test.py    From DOTA_models with Apache License 2.0 5 votes vote down vote up
def test_build_ssd_random_crop_empty_operations(self):
    preprocessor_text_proto = """
    ssd_random_crop {
    }
    """
    preprocessor_proto = preprocessor_pb2.PreprocessingStep()
    text_format.Merge(preprocessor_text_proto, preprocessor_proto)
    function, args = preprocessor_builder.build(preprocessor_proto)
    self.assertEqual(function, preprocessor.ssd_random_crop)
    self.assertEqual(args, {}) 
Example #29
Source File: hyperparams_builder_test.py    From DOTA_models with Apache License 2.0 5 votes vote down vote up
def test_default_arg_scope_has_conv2d_op(self):
    conv_hyperparams_text_proto = """
      regularizer {
        l1_regularizer {
        }
      }
      initializer {
        truncated_normal_initializer {
        }
      }
    """
    conv_hyperparams_proto = hyperparams_pb2.Hyperparams()
    text_format.Merge(conv_hyperparams_text_proto, conv_hyperparams_proto)
    scope = hyperparams_builder.build(conv_hyperparams_proto, is_training=True)
    self.assertTrue(self._get_scope_key(slim.conv2d) in scope) 
Example #30
Source File: hyperparams_builder_test.py    From DOTA_models with Apache License 2.0 5 votes vote down vote up
def test_default_arg_scope_has_separable_conv2d_op(self):
    conv_hyperparams_text_proto = """
      regularizer {
        l1_regularizer {
        }
      }
      initializer {
        truncated_normal_initializer {
        }
      }
    """
    conv_hyperparams_proto = hyperparams_pb2.Hyperparams()
    text_format.Merge(conv_hyperparams_text_proto, conv_hyperparams_proto)
    scope = hyperparams_builder.build(conv_hyperparams_proto, is_training=True)
    self.assertTrue(self._get_scope_key(slim.separable_conv2d) in scope)