Python google.protobuf.json_format.MessageToDict() Examples

The following are 30 code examples of google.protobuf.json_format.MessageToDict(). You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may also want to check out all available functions/classes of the module google.protobuf.json_format , or try the search function .
Example #1
Source File: test_api.py    From recipes-py with Apache License 2.0 6 votes vote down vote up
def environ(self, *proto_msgs, **kwargs):
    """Sets environment data for this test case."""
    ret = self.test(None)

    to_apply = []

    for msg in proto_msgs:
      if not isinstance(msg, PBMessage):
        raise ValueError(
            'Positional arguments for api.properties must be protobuf messages.'
            ' Got: %r (type %r)' % (msg, type(msg)))
      to_apply.append(jsonpb.MessageToDict(
          msg, preserving_proto_field_name=True))

    to_apply.append(kwargs)

    for dictionary in to_apply:
      for key, value in dictionary.iteritems():
        if not isinstance(value, (int, float, basestring)):
          raise ValueError(
              'Environment values must be int, float or string. '
              'Got: %r=%r (type %r)' % (key, value, type(value)))
        ret.environ[key] = str(value)

    return ret 
Example #2
Source File: cmd.py    From recipes-py with Apache License 2.0 6 votes vote down vote up
def test_rolls(recipe_deps, verbose_json):
  candidates, rejected_candidates, repos = get_roll_candidates(recipe_deps)

  roll_details = []
  picked_roll_details = None
  trivial = True
  if candidates:
    trivial, picked_roll_details, roll_details = process_candidates(
        recipe_deps, candidates, repos, verbose_json)

  ret = {
    # it counts as success if there are no candidates at all :)
    'success': bool(not candidates or picked_roll_details),
    'trivial': trivial,
    'roll_details': roll_details,
    'picked_roll_details': picked_roll_details,
    'rejected_candidates_count': len(rejected_candidates),
  }
  if verbose_json:
    ret['rejected_candidate_specs'] = [
      jsonpb.MessageToDict(c.repo_spec, preserving_proto_field_name=True)
      for c in rejected_candidates
    ]
  return ret 
Example #3
Source File: dlp.py    From airflow with Apache License 2.0 6 votes vote down vote up
def execute(self, context):
        hook = CloudDLPHook(gcp_conn_id=self.gcp_conn_id)
        try:
            template = hook.create_deidentify_template(
                organization_id=self.organization_id,
                project_id=self.project_id,
                deidentify_template=self.deidentify_template,
                template_id=self.template_id,
                retry=self.retry,
                timeout=self.timeout,
                metadata=self.metadata,
            )
        except AlreadyExists:
            template = hook.get_deidentify_template(
                organization_id=self.organization_id,
                project_id=self.project_id,
                template_id=self.template_id,
                retry=self.retry,
                timeout=self.timeout,
                metadata=self.metadata,
            )

        return MessageToDict(template) 
Example #4
Source File: json_format_test.py    From lambda-packs with MIT License 6 votes vote down vote up
def testExtensionSerializationDictMatchesProto3Spec(self):
    """See go/proto3-json-spec for spec.
    """
    message = unittest_mset_pb2.TestMessageSetContainer()
    ext1 = unittest_mset_pb2.TestMessageSetExtension1.message_set_extension
    ext2 = unittest_mset_pb2.TestMessageSetExtension2.message_set_extension
    message.message_set.Extensions[ext1].i = 23
    message.message_set.Extensions[ext2].str = 'foo'
    message_dict = json_format.MessageToDict(
        message
    )
    golden_dict = {
        'messageSet': {
            '[protobuf_unittest.'
            'TestMessageSetExtension1.messageSetExtension]': {
                'i': 23,
            },
            '[protobuf_unittest.'
            'TestMessageSetExtension2.messageSetExtension]': {
                'str': 'foo',
            },
        },
    }
    self.assertEqual(golden_dict, message_dict) 
Example #5
Source File: dlp.py    From airflow with Apache License 2.0 6 votes vote down vote up
def execute(self, context):
        hook = CloudDLPHook(gcp_conn_id=self.gcp_conn_id)
        try:
            template = hook.create_inspect_template(
                organization_id=self.organization_id,
                project_id=self.project_id,
                inspect_template=self.inspect_template,
                template_id=self.template_id,
                retry=self.retry,
                timeout=self.timeout,
                metadata=self.metadata,
            )
        except AlreadyExists:
            template = hook.get_inspect_template(
                organization_id=self.organization_id,
                project_id=self.project_id,
                template_id=self.template_id,
                retry=self.retry,
                timeout=self.timeout,
                metadata=self.metadata,
            )
        return MessageToDict(template) 
Example #6
Source File: dlp.py    From airflow with Apache License 2.0 6 votes vote down vote up
def execute(self, context):
        hook = CloudDLPHook(gcp_conn_id=self.gcp_conn_id)
        try:
            trigger = hook.create_job_trigger(
                project_id=self.project_id,
                job_trigger=self.job_trigger,
                trigger_id=self.trigger_id,
                retry=self.retry,
                timeout=self.timeout,
                metadata=self.metadata,
            )
        except AlreadyExists:
            trigger = hook.get_job_trigger(
                project_id=self.project_id,
                job_trigger_id=self.trigger_id,
                retry=self.retry,
                timeout=self.timeout,
                metadata=self.metadata,
            )
        return MessageToDict(trigger) 
Example #7
Source File: dlp.py    From airflow with Apache License 2.0 6 votes vote down vote up
def execute(self, context):
        hook = CloudDLPHook(gcp_conn_id=self.gcp_conn_id)
        try:
            info = hook.create_stored_info_type(
                organization_id=self.organization_id,
                project_id=self.project_id,
                config=self.config,
                stored_info_type_id=self.stored_info_type_id,
                retry=self.retry,
                timeout=self.timeout,
                metadata=self.metadata,
            )
        except AlreadyExists:
            info = hook.get_stored_info_type(
                organization_id=self.organization_id,
                project_id=self.project_id,
                stored_info_type_id=self.stored_info_type_id,
                retry=self.retry,
                timeout=self.timeout,
                metadata=self.metadata,
            )
        return MessageToDict(info) 
Example #8
Source File: test_vision.py    From airflow with Apache License 2.0 6 votes vote down vote up
def test_update_product_no_explicit_name(self, get_conn):
        # Given
        product = Product()
        update_product_method = get_conn.return_value.update_product
        update_product_method.return_value = product
        product_name = ProductSearchClient.product_path(PROJECT_ID_TEST, LOC_ID_TEST, PRODUCT_ID_TEST)
        # When
        result = self.hook.update_product(
            location=LOC_ID_TEST,
            product_id=PRODUCT_ID_TEST,
            product=product,
            update_mask=None,
            project_id=PROJECT_ID_TEST,
            retry=None,
            timeout=None,
            metadata=None,
        )
        # Then
        self.assertEqual(result, MessageToDict(product))
        update_product_method.assert_called_once_with(
            product=Product(name=product_name), metadata=None, retry=None, timeout=None, update_mask=None
        ) 
Example #9
Source File: automl.py    From airflow with Apache License 2.0 6 votes vote down vote up
def execute(self, context):
        hook = CloudAutoMLHook(gcp_conn_id=self.gcp_conn_id)
        self.log.info("Requesting datasets")
        page_iterator = hook.list_datasets(
            location=self.location,
            project_id=self.project_id,
            retry=self.retry,
            timeout=self.timeout,
            metadata=self.metadata,
        )
        result = [MessageToDict(dataset) for dataset in page_iterator]
        self.log.info("Datasets obtained.")

        self.xcom_push(
            context,
            key="dataset_id_list",
            value=[hook.extract_object_id(d) for d in result],
        )
        return result 
Example #10
Source File: test_api.py    From recipes-py with Apache License 2.0 6 votes vote down vote up
def __call__(self, hostname=None, triggers=None):
    """Emulates scheduler module state.

    triggers must be None or a list of triggers_pb2.Trigger objects.
    """
    assert hostname is None or isinstance(hostname, basestring)
    assert not triggers or all(
        isinstance(t, triggers_pb2.Trigger) for t in triggers)
    ret = self.test(None)
    ret.properties.update(**{
      '$recipe_engine/scheduler': {
        'hostname': hostname,
        'triggers': [json_format.MessageToDict(t) for t in triggers or []],
      },
    })
    return ret 
Example #11
Source File: automl.py    From airflow with Apache License 2.0 6 votes vote down vote up
def execute(self, context):
        hook = CloudAutoMLHook(gcp_conn_id=self.gcp_conn_id)
        self.log.info("Requesting table specs for %s.", self.dataset_id)
        page_iterator = hook.list_table_specs(
            dataset_id=self.dataset_id,
            filter_=self.filter_,
            page_size=self.page_size,
            location=self.location,
            project_id=self.project_id,
            retry=self.retry,
            timeout=self.timeout,
            metadata=self.metadata,
        )
        result = [MessageToDict(spec) for spec in page_iterator]
        self.log.info(result)
        self.log.info("Table specs obtained.")
        return result 
Example #12
Source File: automl.py    From airflow with Apache License 2.0 6 votes vote down vote up
def execute(self, context):
        hook = CloudAutoMLHook(gcp_conn_id=self.gcp_conn_id)
        self.log.info("Deploying model_id %s", self.model_id)

        operation = hook.deploy_model(
            model_id=self.model_id,
            location=self.location,
            project_id=self.project_id,
            image_detection_metadata=self.image_detection_metadata,
            retry=self.retry,
            timeout=self.timeout,
            metadata=self.metadata,
        )
        result = MessageToDict(operation.result())
        self.log.info("Model deployed.")
        return result 
Example #13
Source File: protobuf.py    From hiku with BSD 3-Clause "New" or "Revised" License 6 votes vote down vote up
def _transform(pb_node):
    fields = []
    for i in pb_node.items:
        item_type = i.WhichOneof('value')
        if item_type == 'field':
            if not i.field.name:
                raise TypeError('Field name is empty: {!r}'.format(i))
            options = None
            if i.field.HasField('options'):
                options = MessageToDict(i.field.options)
            fields.append(Field(i.field.name, options))
        elif item_type == 'link':
            if not i.link.name:
                raise TypeError('Link name is empty: {!r}'.format(i))
            options = None
            if i.link.HasField('options'):
                options = MessageToDict(i.link.options)
            fields.append(Link(i.link.name, _transform(i.link.node), options))
        else:
            raise TypeError('Node item is empty: {!r}'.format(i))
    return Node(fields) 
Example #14
Source File: get.py    From recipes-py with Apache License 2.0 6 votes vote down vote up
def GenTests(api):
  yield (
      api.test('basic') +
      api.buildbucket.simulated_get(build_pb2.Build(
          id=9016911228971028736, status=common_pb2.SUCCESS,
      )) +
      api.buildbucket.simulated_get_multi([
        build_pb2.Build(id=9016911228971028736, status=common_pb2.SUCCESS),
        build_pb2.Build(id=9016911228971028737, status=common_pb2.SUCCESS),
      ]) +
      api.buildbucket.simulated_buildbucket_output(None, step_name='legacy_get')
  )


  yield (
      api.test('failed request') +
      api.step_data(
          'buildbucket.get',
          api.json.output_stream(
              json_format.MessageToDict(builds_service_pb2.BatchResponse(
                  responses=[dict(error=dict(message='there was a problem'))],
              )),
          ),
        )
  ) 
Example #15
Source File: automl.py    From airflow with Apache License 2.0 6 votes vote down vote up
def execute(self, context):
        hook = CloudAutoMLHook(gcp_conn_id=self.gcp_conn_id)
        self.log.info("Creating model.")
        operation = hook.create_model(
            model=self.model,
            location=self.location,
            project_id=self.project_id,
            retry=self.retry,
            timeout=self.timeout,
            metadata=self.metadata,
        )
        result = MessageToDict(operation.result())
        model_id = hook.extract_object_id(result)
        self.log.info("Model created: %s", model_id)

        self.xcom_push(context, key="model_id", value=model_id)
        return result 
Example #16
Source File: automl.py    From airflow with Apache License 2.0 6 votes vote down vote up
def execute(self, context):
        hook = CloudAutoMLHook(gcp_conn_id=self.gcp_conn_id)
        self.log.info("Fetch batch prediction.")
        operation = hook.batch_predict(
            model_id=self.model_id,
            input_config=self.input_config,
            output_config=self.output_config,
            project_id=self.project_id,
            location=self.location,
            params=self.params,
            retry=self.retry,
            timeout=self.timeout,
            metadata=self.metadata,
        )
        result = MessageToDict(operation.result())
        self.log.info("Batch prediction ready.")
        return result 
Example #17
Source File: automl.py    From airflow with Apache License 2.0 6 votes vote down vote up
def execute(self, context):
        hook = CloudAutoMLHook(gcp_conn_id=self.gcp_conn_id)
        self.log.info("Creating dataset")
        result = hook.create_dataset(
            dataset=self.dataset,
            location=self.location,
            project_id=self.project_id,
            retry=self.retry,
            timeout=self.timeout,
            metadata=self.metadata,
        )
        result = MessageToDict(result)
        dataset_id = hook.extract_object_id(result)
        self.log.info("Creating completed. Dataset id: %s", dataset_id)

        self.xcom_push(context, key="dataset_id", value=dataset_id)
        return result 
Example #18
Source File: automl.py    From airflow with Apache License 2.0 6 votes vote down vote up
def execute(self, context):
        hook = CloudAutoMLHook(gcp_conn_id=self.gcp_conn_id)
        self.log.info("Requesting column specs.")
        page_iterator = hook.list_column_specs(
            dataset_id=self.dataset_id,
            table_spec_id=self.table_spec_id,
            field_mask=self.field_mask,
            filter_=self.filter_,
            page_size=self.page_size,
            location=self.location,
            project_id=self.project_id,
            retry=self.retry,
            timeout=self.timeout,
            metadata=self.metadata,
        )
        result = [MessageToDict(spec) for spec in page_iterator]
        self.log.info("Columns specs obtained.")

        return result 
Example #19
Source File: dlp.py    From airflow with Apache License 2.0 5 votes vote down vote up
def execute(self, context):
        hook = CloudDLPHook(gcp_conn_id=self.gcp_conn_id)
        info = hook.update_stored_info_type(
            stored_info_type_id=self.stored_info_type_id,
            organization_id=self.organization_id,
            project_id=self.project_id,
            config=self.config,
            update_mask=self.update_mask,
            retry=self.retry,
            timeout=self.timeout,
            metadata=self.metadata,
        )
        return MessageToDict(info) 
Example #20
Source File: dlp.py    From airflow with Apache License 2.0 5 votes vote down vote up
def execute(self, context):
        hook = CloudDLPHook(gcp_conn_id=self.gcp_conn_id)
        template = hook.update_inspect_template(
            template_id=self.template_id,
            organization_id=self.organization_id,
            project_id=self.project_id,
            inspect_template=self.inspect_template,
            update_mask=self.update_mask,
            retry=self.retry,
            timeout=self.timeout,
            metadata=self.metadata,
        )
        return MessageToDict(template) 
Example #21
Source File: dlp.py    From airflow with Apache License 2.0 5 votes vote down vote up
def execute(self, context):
        hook = CloudDLPHook(gcp_conn_id=self.gcp_conn_id)
        trigger = hook.update_job_trigger(
            job_trigger_id=self.job_trigger_id,
            project_id=self.project_id,
            job_trigger=self.job_trigger,
            update_mask=self.update_mask,
            retry=self.retry,
            timeout=self.timeout,
            metadata=self.metadata,
        )
        return MessageToDict(trigger) 
Example #22
Source File: dlp.py    From airflow with Apache License 2.0 5 votes vote down vote up
def execute(self, context):
        hook = CloudDLPHook(gcp_conn_id=self.gcp_conn_id)
        template = hook.update_deidentify_template(
            template_id=self.template_id,
            organization_id=self.organization_id,
            project_id=self.project_id,
            deidentify_template=self.deidentify_template,
            update_mask=self.update_mask,
            retry=self.retry,
            timeout=self.timeout,
            metadata=self.metadata,
        )
        return MessageToDict(template) 
Example #23
Source File: dlp.py    From airflow with Apache License 2.0 5 votes vote down vote up
def execute(self, context):
        hook = CloudDLPHook(gcp_conn_id=self.gcp_conn_id)
        template = hook.get_deidentify_template(
            template_id=self.template_id,
            organization_id=self.organization_id,
            project_id=self.project_id,
            retry=self.retry,
            timeout=self.timeout,
            metadata=self.metadata,
        )
        return MessageToDict(template) 
Example #24
Source File: natural_language.py    From airflow with Apache License 2.0 5 votes vote down vote up
def execute(self, context):
        hook = CloudNaturalLanguageHook(gcp_conn_id=self.gcp_conn_id)

        self.log.info("Start text classify")
        response = hook.classify_text(
            document=self.document, retry=self.retry, timeout=self.timeout, metadata=self.metadata
        )
        self.log.info("Finished text classify")

        return MessageToDict(response) 
Example #25
Source File: natural_language.py    From airflow with Apache License 2.0 5 votes vote down vote up
def execute(self, context):
        hook = CloudNaturalLanguageHook(gcp_conn_id=self.gcp_conn_id)

        self.log.info("Start sentiment analyze")
        response = hook.analyze_sentiment(
            document=self.document, retry=self.retry, timeout=self.timeout, metadata=self.metadata
        )
        self.log.info("Finished sentiment analyze")

        return MessageToDict(response) 
Example #26
Source File: natural_language.py    From airflow with Apache License 2.0 5 votes vote down vote up
def execute(self, context):
        hook = CloudNaturalLanguageHook(gcp_conn_id=self.gcp_conn_id)

        self.log.info("Start entity sentiment analyze")
        response = hook.analyze_entity_sentiment(
            document=self.document,
            encoding_type=self.encoding_type,
            retry=self.retry,
            timeout=self.timeout,
            metadata=self.metadata,
        )
        self.log.info("Finished entity sentiment analyze")

        return MessageToDict(response) 
Example #27
Source File: natural_language.py    From airflow with Apache License 2.0 5 votes vote down vote up
def execute(self, context):
        hook = CloudNaturalLanguageHook(gcp_conn_id=self.gcp_conn_id)

        self.log.info("Start analyzing entities")
        response = hook.analyze_entities(
            document=self.document, retry=self.retry, timeout=self.timeout, metadata=self.metadata
        )
        self.log.info("Finished analyzing entities")

        return MessageToDict(response) 
Example #28
Source File: bigquery_dts.py    From airflow with Apache License 2.0 5 votes vote down vote up
def execute(self, context):
        hook = BiqQueryDataTransferServiceHook(gcp_conn_id=self.gcp_conn_id)
        self.log.info("Creating DTS transfer config")
        response = hook.create_transfer_config(
            project_id=self.project_id,
            transfer_config=self.transfer_config,
            authorization_code=self.authorization_code,
            retry=self.retry,
            timeout=self.timeout,
            metadata=self.metadata,
        )
        result = MessageToDict(response)
        self.log.info("Created DTS transfer config %s", get_object_id(result))
        self.xcom_push(context, key="transfer_config_id", value=get_object_id(result))
        return result 
Example #29
Source File: automl.py    From airflow with Apache License 2.0 5 votes vote down vote up
def execute(self, context):
        hook = CloudAutoMLHook(gcp_conn_id=self.gcp_conn_id)
        operation = hook.delete_model(
            model_id=self.model_id,
            location=self.location,
            project_id=self.project_id,
            retry=self.retry,
            timeout=self.timeout,
            metadata=self.metadata,
        )
        result = MessageToDict(operation.result())
        return result 
Example #30
Source File: automl.py    From airflow with Apache License 2.0 5 votes vote down vote up
def execute(self, context):
        hook = CloudAutoMLHook(gcp_conn_id=self.gcp_conn_id)
        self.log.info("Updating AutoML dataset %s.", self.dataset["name"])
        result = hook.update_dataset(
            dataset=self.dataset,
            update_mask=self.update_mask,
            retry=self.retry,
            timeout=self.timeout,
            metadata=self.metadata,
        )
        self.log.info("Dataset updated.")
        return MessageToDict(result)