Python google.protobuf.json_format.ParseDict() Examples

The following are 30 code examples of google.protobuf.json_format.ParseDict(). You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may also want to check out all available functions/classes of the module google.protobuf.json_format , or try the search function .
Example #1
Source File: engine.py    From recipes-py with Apache License 2.0 6 votes vote down vote up
def _get_engine_properties(properties):
  """Retrieve and resurrect JSON serialized engine properties from all
  properties passed to recipe.

  The serialized value is associated with key '$recipe_engine'.

  Args:

    * properties (dict): All input properties for passed to recipe

  Returns a engine_properties_pb2.EngineProperties object
  """
  return jsonpb.ParseDict(
    properties.get('$recipe_engine', {}),
    engine_properties_pb2.EngineProperties(),
    ignore_unknown_fields=True) 
Example #2
Source File: model.py    From python-bigquery with Apache License 2.0 6 votes vote down vote up
def from_api_repr(cls, resource):
        """Factory:  construct a model reference given its API representation

        Args:
            resource (Dict[str, object]):
                Model reference representation returned from the API

        Returns:
            google.cloud.bigquery.model.ModelReference:
                Model reference parsed from ``resource``.
        """
        ref = cls()
        # Keep a reference to the resource as a workaround to find unknown
        # field values.
        ref._properties = resource
        ref._proto = json_format.ParseDict(
            resource, types.ModelReference(), ignore_unknown_fields=True
        )
        return ref 
Example #3
Source File: bigquery_dts.py    From airflow with Apache License 2.0 6 votes vote down vote up
def _disable_auto_scheduling(config: Union[dict, TransferConfig]) -> TransferConfig:
        """
        In the case of Airflow, the customer needs to create a transfer config
        with the automatic scheduling disabled (UI, CLI or an Airflow operator) and
        then trigger a transfer run using a specialized Airflow operator that will
        call start_manual_transfer_runs.

        :param config: Data transfer configuration to create.
        :type config: Union[dict, google.cloud.bigquery_datatransfer_v1.types.TransferConfig]
        """
        config = MessageToDict(config) if isinstance(config, TransferConfig) else config
        new_config = copy(config)
        schedule_options = new_config.get("schedule_options")
        if schedule_options:
            disable_auto_scheduling = schedule_options.get(
                "disable_auto_scheduling", None
            )
            if disable_auto_scheduling is None:
                schedule_options["disable_auto_scheduling"] = True
        else:
            new_config["schedule_options"] = {"disable_auto_scheduling": True}
        return ParseDict(new_config, TransferConfig()) 
Example #4
Source File: kubernetes_runner_test.py    From tfx with Apache License 2.0 5 votes vote down vote up
def _CreateKubernetesRunner(self, k8s_config_dict=None):
    self._serving_spec = infra_validator_pb2.ServingSpec()
    json_format.ParseDict({
        'tensorflow_serving': {
            'tags': ['1.15.0']},
        'kubernetes': k8s_config_dict or {},
        'model_name': self._model_name,
    }, self._serving_spec)
    serving_binary = serving_bins.parse_serving_binaries(self._serving_spec)[0]

    return kubernetes_runner.KubernetesRunner(
        model_path=path_utils.serving_model_path(self._model.uri),
        serving_binary=serving_binary,
        serving_spec=self._serving_spec) 
Example #5
Source File: common_serializers_test.py    From Cirq with Apache License 2.0 5 votes vote down vote up
def op_proto(json_dict: Dict) -> v2.program_pb2.Operation:
    op = v2.program_pb2.Operation()
    json_format.ParseDict(json_dict, op)
    return op 
Example #6
Source File: api.py    From tfjs-to-tf with MIT License 5 votes vote down vote up
def _convert_graph_def(message_dict: Dict[str, Any]) -> GraphDef:
    """
    Convert JSON to TF GraphDef message

    Args:
        message_dict: deserialised JSON message

    Returns:
        TF GraphDef message
    """
    message_dict = quirks.fix_node_attributes(message_dict)
    return ParseDict(message_dict, tf.compat.v1.GraphDef()) 
Example #7
Source File: testutils.py    From tfjs-to-tf with MIT License 5 votes vote down vote up
def graph_to_model(graph: Union[tf.Graph, GraphDef, str],
                   weight_dict: Dict[str, Tensor] = {}) -> Callable:
    """Convert a TF v1 frozen graph to a TF v2 function for easy inference"""
    graph_def = graph
    if isinstance(graph, tf.Graph):
        graph_def = graph.as_graph_def()
    elif isinstance(graph, str):
        # graph is a file name: load graph from disk
        if graph.endswith('.json'):
            with open(graph, 'r') as json_file:
                message_dict = json.loads(json_file.read())
            graph_def = ParseDict(message_dict, GraphDef())
        elif graph.endswith('.h5'):
            # Keras model - just load and return as-is
            return tf.keras.models.load_model(graph)
        else:
            with open(graph, 'rb') as proto_file:
                string = proto_file.read()
            graph_def = GraphDef()
            graph_def.ParseFromString(string)

    tensor_dict = dict()

    def _imports_graph_def():
        for name, data in weight_dict.items():
            tensor_dict[name] = tf.convert_to_tensor(data)
        tf.graph_util.import_graph_def(graph_def, tensor_dict, name='')

    wrapped_import = tf.compat.v1.wrap_function(_imports_graph_def, [])
    import_graph = wrapped_import.graph
    inputs = [(node.name+':0') for node in get_inputs(graph_def)]
    outputs = [(node.name+':0') for node in get_outputs(graph_def)]
    return wrapped_import.prune(
        tf.nest.map_structure(import_graph.as_graph_element, inputs),
        tf.nest.map_structure(import_graph.as_graph_element, outputs)) 
Example #8
Source File: testutils.py    From tfjs-to-tf with MIT License 5 votes vote down vote up
def node_proto_from_json(node_json: str) -> NodeDef:
    """Return a nodedef protobuf message from a raw JSON string"""
    node_dict = json.loads(node_json)
    node_def = ParseDict(node_dict, NodeDef())
    return node_def 
Example #9
Source File: routine.py    From python-bigquery with Apache License 2.0 5 votes vote down vote up
def return_type(self):
        """google.cloud.bigquery_v2.types.StandardSqlDataType: Return type of
        the routine.

        If absent, the return type is inferred from
        :attr:`~google.cloud.bigquery.routine.Routine.body` at query time in
        each query that references this routine. If present, then the
        evaluated result will be cast to the specified returned type at query
        time.

        See:
        https://cloud.google.com/bigquery/docs/reference/rest/v2/routines#Routine.FIELDS.return_type
        """
        resource = self._properties.get(self._PROPERTY_TO_API_FIELD["return_type"])
        if not resource:
            return resource
        output = google.cloud.bigquery_v2.types.StandardSqlDataType()
        output = json_format.ParseDict(resource, output, ignore_unknown_fields=True)
        return output 
Example #10
Source File: routine.py    From python-bigquery with Apache License 2.0 5 votes vote down vote up
def data_type(self):
        """Optional[google.cloud.bigquery_v2.types.StandardSqlDataType]: Type
        of a variable, e.g., a function argument.

        See:
        https://cloud.google.com/bigquery/docs/reference/rest/v2/routines#Argument.FIELDS.data_type
        """
        resource = self._properties.get(self._PROPERTY_TO_API_FIELD["data_type"])
        if not resource:
            return resource
        output = google.cloud.bigquery_v2.types.StandardSqlDataType()
        output = json_format.ParseDict(resource, output, ignore_unknown_fields=True)
        return output 
Example #11
Source File: model.py    From python-bigquery with Apache License 2.0 5 votes vote down vote up
def from_api_repr(cls, resource):
        """Factory: construct a model resource given its API representation

        Args:
            resource (Dict[str, object]):
                Model resource representation from the API

        Returns:
            google.cloud.bigquery.model.Model: Model parsed from ``resource``.
        """
        this = cls(None)
        # Keep a reference to the resource as a workaround to find unknown
        # field values.
        this._properties = resource

        # Convert from millis-from-epoch to timestamp well-known type.
        # TODO: Remove this hack once CL 238585470 hits prod.
        resource = copy.deepcopy(resource)
        for training_run in resource.get("trainingRuns", ()):
            start_time = training_run.get("startTime")
            if not start_time or "-" in start_time:  # Already right format?
                continue
            start_time = datetime_helpers.from_microseconds(1e3 * float(start_time))
            training_run["startTime"] = datetime_helpers.to_rfc3339(start_time)

        this._proto = json_format.ParseDict(
            resource, types.Model(), ignore_unknown_fields=True
        )
        return this 
Example #12
Source File: tensorflow_serving_client_test.py    From tfx with Apache License 2.0 5 votes vote down vote up
def _make_response(
    payload: Dict[Text, Any]) -> get_model_status_pb2.GetModelStatusResponse:
  result = get_model_status_pb2.GetModelStatusResponse()
  json_format.ParseDict(payload, result)
  return result 
Example #13
Source File: executor_test.py    From tfx with Apache License 2.0 5 votes vote down vote up
def _make_serving_spec(
    payload: Dict[Text, Any]) -> infra_validator_pb2.ServingSpec:
  result = infra_validator_pb2.ServingSpec()
  json_format.ParseDict(payload, result)
  return result 
Example #14
Source File: executor_test.py    From tfx with Apache License 2.0 5 votes vote down vote up
def _make_validation_spec(
    payload: Dict[Text, Any]) -> infra_validator_pb2.ValidationSpec:
  result = infra_validator_pb2.ValidationSpec()
  json_format.ParseDict(payload, result)
  return result 
Example #15
Source File: executor_test.py    From tfx with Apache License 2.0 5 votes vote down vote up
def _make_request_spec(
    payload: Dict[Text, Any]) -> infra_validator_pb2.RequestSpec:
  result = infra_validator_pb2.RequestSpec()
  json_format.ParseDict(payload, result)
  return result 
Example #16
Source File: arg_func_langs_test.py    From Cirq with Apache License 2.0 5 votes vote down vote up
def test_serialize_conversion(value: ARG_LIKE, proto: v2.program_pb2.Arg):
    msg = v2.program_pb2.Arg()
    json_format.ParseDict(proto, msg)
    packed = json_format.MessageToDict(_arg_to_proto(value,
                                                     arg_function_language=''),
                                       including_default_value_fields=True,
                                       preserving_proto_field_name=True,
                                       use_integers_for_enums=True)
    assert packed == proto 
Example #17
Source File: local_docker_runner_test.py    From tfx with Apache License 2.0 5 votes vote down vote up
def _create_serving_spec(payload: Dict[Text, Any]):
  result = infra_validator_pb2.ServingSpec()
  json_format.ParseDict(payload, result)
  return result 
Example #18
Source File: request_builder_test.py    From tfx with Apache License 2.0 5 votes vote down vote up
def _make_saved_model(payload: Dict[Text, Any]):
  result = saved_model_pb2.SavedModel()
  json_format.ParseDict(payload, result)
  return result 
Example #19
Source File: request_builder_test.py    From tfx with Apache License 2.0 5 votes vote down vote up
def _make_signature_def(payload: Dict[Text, Any]):
  result = meta_graph_pb2.SignatureDef()
  json_format.ParseDict(payload, result)
  return result 
Example #20
Source File: request_builder_test.py    From tfx with Apache License 2.0 5 votes vote down vote up
def _make_request_spec(payload: Dict[Text, Any]):
  result = infra_validator_pb2.RequestSpec()
  json_format.ParseDict(payload, result)
  return result 
Example #21
Source File: taranis_service.py    From taranis with BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
def create_database(self, database: NewDatabaseModel):

        t = int((datetime.utcnow() - datetime(1970, 1, 1)).total_seconds() * 1000)
        new_db = dict(name=database.name, created_at=t, updated_at=t, size=0)
        res = self.repo.create_one_database(new_db)
        # TODO Check result
        return ParseDict(new_db, DatabaseModel(), ignore_unknown_fields=True) 
Example #22
Source File: taranis_service.py    From taranis with BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
def get_database(self, db_name):
        database = self.repo.find_one_database_by_name(db_name)
        if database is None:
            raise TaranisNotFoundError("Database {} not found".format(db_name))
        return ParseDict(database, DatabaseModel(), ignore_unknown_fields=True) 
Example #23
Source File: taranis_service.py    From taranis with BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
def create_index(self, index: NewIndexModel):
        try:
            t = int((datetime.utcnow() - datetime(1970, 1, 1)).total_seconds() * 1000)

            new_index = IndexModel()
            new_index.created_at = t
            new_index.updated_at = t
            new_index.state = IndexModel.State.CREATED

            new_dict_index = MessageToDict(ParseDict(MessageToDict(index, preserving_proto_field_name=True), new_index),
                                           preserving_proto_field_name=True)

            res = self.repo.create_one_index(new_dict_index)

            config = json.loads(index.config)

            if config["index_type"] == "IVFPQ":
                dimension = config["dimension"]
                n_list = config["n_list"]
                n_probes = config["n_probes"]
                index_type = "IVF{},PQ{}np".format(n_list, n_probes)

                metric_type = cpp_taranis.Faiss.MetricType.METRIC_L2
                if config["metric"] == "METRIC_L1":
                    metric_type = cpp_taranis.Faiss.MetricType.METRIC_L1
                elif config["metric"] == "METRIC_L2":
                    metric_type = cpp_taranis.Faiss.MetricType.METRIC_L2

                self.faiss_wrapper.create_index(index.db_name, index.index_name, dimension, index_type, metric_type,
                                                n_probes)
            else:
                raise TaranisNotImplementedError(
                    "Can't create index because of unknown index type {}".format(index.config["index_type"]))
        except DuplicateKeyError as e:
            raise TaranisAlreadyExistsError("Index name {} already exists".format(index.index_name))
        return index 
Example #24
Source File: taranis_service.py    From taranis with BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
def get_index(self, db_name, index_name):
        index = self.faiss_wrapper.get_index(db_name, index_name)
        if index is None:
            raise TaranisNotFoundError("Can't find index {} for database {}".format(index_name, db_name))
        res = self.repo.find_one_index_by_index_name_and_db_name(index_name, db_name)
        if res is None:
            raise TaranisNotFoundError("Can't find index {} for database {}".format(index_name, db_name))
        return ParseDict(res, IndexModel(), ignore_unknown_fields=True) 
Example #25
Source File: json_format_test.py    From Serverless-Deep-Learning-with-TensorFlow-and-AWS-Lambda with MIT License 5 votes vote down vote up
def testExtensionToDictAndBack(self):
    message = unittest_mset_pb2.TestMessageSetContainer()
    ext1 = unittest_mset_pb2.TestMessageSetExtension1.message_set_extension
    ext2 = unittest_mset_pb2.TestMessageSetExtension2.message_set_extension
    message.message_set.Extensions[ext1].i = 23
    message.message_set.Extensions[ext2].str = 'foo'
    message_dict = json_format.MessageToDict(
        message
    )
    parsed_message = unittest_mset_pb2.TestMessageSetContainer()
    json_format.ParseDict(message_dict, parsed_message)
    self.assertEqual(message, parsed_message) 
Example #26
Source File: json_format_test.py    From Serverless-Deep-Learning-with-TensorFlow-and-AWS-Lambda with MIT License 5 votes vote down vote up
def testParseDict(self):
    expected = 12345
    js_dict = {'int32Value': expected}
    message = json_format_proto3_pb2.TestMessage()
    json_format.ParseDict(js_dict, message)
    self.assertEqual(expected, message.int32_value) 
Example #27
Source File: json_format_test.py    From keras-lambda with MIT License 5 votes vote down vote up
def testParseDict(self):
    expected = 12345
    js_dict = {'int32Value': expected}
    message = json_format_proto3_pb2.TestMessage()
    json_format.ParseDict(js_dict, message)
    self.assertEqual(expected, message.int32_value) 
Example #28
Source File: test_stackdriver.py    From airflow with Apache License 2.0 5 votes vote down vote up
def test_stackdriver_disable_notification_channel(self, mock_channel_client,
                                                      mock_get_creds_and_project_id):
        hook = stackdriver.StackdriverHook()
        notification_channel_enabled = ParseDict(TEST_NOTIFICATION_CHANNEL_1,
                                                 monitoring_v3.types.notification_pb2.NotificationChannel())
        notification_channel_disabled = ParseDict(TEST_NOTIFICATION_CHANNEL_2,
                                                  monitoring_v3.types.notification_pb2.NotificationChannel())
        mock_channel_client.return_value.list_notification_channels.return_value = [
            notification_channel_enabled,
            notification_channel_disabled
        ]

        hook.disable_notification_channels(
            filter_=TEST_FILTER,
            project_id=PROJECT_ID,
        )

        notification_channel_enabled.enabled.value = False  # pylint: disable=no-member
        mask = monitoring_v3.types.field_mask_pb2.FieldMask()
        mask.paths.append('enabled')  # pylint: disable=no-member
        mock_channel_client.return_value.update_notification_channel.assert_called_once_with(
            notification_channel=notification_channel_enabled,
            update_mask=mask,
            retry=DEFAULT,
            timeout=DEFAULT,
            metadata=None,
        ) 
Example #29
Source File: api.py    From recipes-py with Apache License 2.0 5 votes vote down vote up
def initialize(self):
    # Add other LUCI_CONTEXT sections in the following dict to support
    # modification through this module.
    init_sections = {
      'luciexe': sections_pb2.LUCIExe,
    }
    ctx = self._lucictx_client.context
    for section_key, section_msg_class in init_sections.iteritems():
      if section_key in ctx:
        self._state.luci_context[section_key] = (
            jsonpb.ParseDict(ctx[section_key],
                             section_msg_class(),
                             ignore_unknown_fields=True)) 
Example #30
Source File: common.py    From recipes-py with Apache License 2.0 5 votes vote down vote up
def deserialize(data):
  """Deserializes an invocation bundle. Opposite of serialize()."""
  ret = {}

  def parse_msg(msg, body):
    return json_format.ParseDict(
        body, msg,
        # Do not fail the build because recipe's proto copy is stale.
        ignore_unknown_fields=True
    )

  for line in data.splitlines():
    entry = json.loads(line)
    assert isinstance(entry, dict), line

    inv_id = entry['invocationId']
    inv = ret.get(inv_id)
    if not inv:
      inv = Invocation()
      ret[inv_id] = inv

    inv_dict = entry.get('invocation')
    if inv_dict is not None:
      # Invocation is special because there can be only one invocation
      # per invocation id.
      parse_msg(inv.proto, inv_dict)
      continue

    found = False
    for attr_name, type, key in Invocation._COLLECTIONS:
      if key in entry:
        found = True
        collection = getattr(inv, attr_name)
        collection.append(parse_msg(type(), entry[key]))
        break
    assert found, entry

  return ret