Python google.protobuf.message.DecodeError() Examples

The following are 30 code examples of google.protobuf.message.DecodeError(). You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may also want to check out all available functions/classes of the module google.protobuf.message , or try the search function .
Example #1
Source File: model_analyzer.py    From Serverless-Deep-Learning-with-TensorFlow-and-AWS-Lambda with MIT License 6 votes vote down vote up
def profile_name_scope(self, options):
    """Profile the statistics of graph nodes, organized by name scope.

    Args:
      options: A dict of options. See core/profiler/g3doc/options.md.
    Returns:
      a GraphNodeProto that records the results.
    """
    opts = _build_options(options)
    tfprof_node = tfprof_output_pb2.GraphNodeProto()
    try:
      tfprof_node.ParseFromString(
          print_mdl.Profile('scope'.encode('utf-8'), opts.SerializeToString()))
    except message.DecodeError as _:
      pass
    return tfprof_node 
Example #2
Source File: utils.py    From mars with Apache License 2.0 6 votes vote down vote up
def deserialize_graph(ser_graph, graph_cls=None):
    from google.protobuf.message import DecodeError
    from .serialize.protos.graph_pb2 import GraphDef
    from .graph import DirectedGraph
    graph_cls = graph_cls or DirectedGraph
    ser_graph_bin = to_binary(ser_graph)
    g = GraphDef()
    try:
        ser_graph = ser_graph
        g.ParseFromString(ser_graph_bin)
        return graph_cls.from_pb(g)
    except DecodeError:
        pass

    try:
        ser_graph_bin = zlib.decompress(ser_graph_bin)
        g.ParseFromString(ser_graph_bin)
        return graph_cls.from_pb(g)
    except (zlib.error, DecodeError):
        pass

    json_obj = json.loads(to_str(ser_graph))
    return graph_cls.from_json(json_obj) 
Example #3
Source File: handlers.py    From sawtooth-core with Apache License 2.0 6 votes vote down vote up
def handle(self, connection_id, message_content):
        # If this is the configured consensus engine, make it active. This is
        # necessary for setting the active engine when the configured engine is
        # changed to an engine that is not registered yet
        request = consensus_pb2.ConsensusRegisterRequest()

        try:
            request.ParseFromString(message_content)
        except DecodeError:
            LOGGER.exception("Unable to decode ConsensusRegisterRequest")
            return HandlerResult(status=HandlerResult.DROP)

        if request.additional_protocols is not None:
            additional_protocols = \
                [(p.name, p.version) for p in request.additional_protocols]
        else:
            additional_protocols = []

        self._proxy.activate_if_configured(
            request.name, request.version, additional_protocols)

        return HandlerResult(status=HandlerStatus.PASS) 
Example #4
Source File: message_test.py    From lambda-packs with MIT License 6 votes vote down vote up
def testParseErrors(self, message_module):
    msg = message_module.TestAllTypes()
    self.assertRaises(TypeError, msg.FromString, 0)
    self.assertRaises(Exception, msg.FromString, '0')
    # TODO(jieluo): Fix cpp extension to raise error instead of warning.
    # b/27494216
    end_tag = encoder.TagBytes(1, 4)
    if api_implementation.Type() == 'python':
      with self.assertRaises(message.DecodeError) as context:
        msg.FromString(end_tag)
      self.assertEqual('Unexpected end-group tag.', str(context.exception))
    else:
      with warnings.catch_warnings(record=True) as w:
        # Cause all warnings to always be triggered.
        warnings.simplefilter('always')
        msg.FromString(end_tag)
        assert len(w) == 1
        assert issubclass(w[-1].category, RuntimeWarning)
        self.assertEqual('Unexpected end-group tag: Not all data was converted',
                         str(w[-1].message)) 
Example #5
Source File: messaging.py    From sawtooth-core with Apache License 2.0 6 votes vote down vote up
def start(self):
        """Starts receiving messages on the underlying socket and passes them
        to the message router.
        """
        self._is_running = True

        while self._is_running:
            try:
                zmq_msg = await self._socket.recv_multipart()

                message = Message()
                message.ParseFromString(zmq_msg[-1])

                await self._msg_router.route_msg(message)
            except DecodeError as e:
                LOGGER.warning('Unable to decode: %s', e)
            except zmq.ZMQError as e:
                LOGGER.warning('Unable to receive: %s', e)
                return
            except asyncio.CancelledError:
                self._is_running = False 
Example #6
Source File: route_handlers.py    From sawtooth-core with Apache License 2.0 6 votes vote down vote up
def _parse_header(cls, header_proto, resource):
        """Deserializes a resource's base64 encoded Protobuf header.
        """
        header = header_proto()
        try:
            header_bytes = base64.b64decode(resource['header'])
            header.ParseFromString(header_bytes)
        except (KeyError, TypeError, ValueError, DecodeError):
            header = resource.get('header', None)
            LOGGER.error(
                'The validator sent a resource with %s %s',
                'a missing header' if header is None else 'an invalid header:',
                header or '')
            raise errors.ResourceHeaderInvalid()

        resource['header'] = cls._message_to_dict(header)
        return resource 
Example #7
Source File: client_handlers.py    From sawtooth-core with Apache License 2.0 6 votes vote down vote up
def handle(self, connection_id, message_content):
        """Handles parsing incoming requests, and wrapping the final response.

        Args:
            connection_id (str): ZMQ identity sent over ZMQ socket
            message_content (bytes): Byte encoded request protobuf to be parsed

        Returns:
            HandlerResult: result to be sent in response back to client
        """
        try:
            request = self._request_proto()
            request.ParseFromString(message_content)
        except DecodeError:
            LOGGER.info('Protobuf %s failed to deserialize', request)
            return self._wrap_result(self._status.INTERNAL_ERROR)

        try:
            response = self._respond(request)
        except _ResponseFailed as e:
            response = e.status

        return self._wrap_result(response) 
Example #8
Source File: test_message_parser.py    From python_moztelemetry with Mozilla Public License 2.0 6 votes vote down vote up
def test_unpack(data_dir, heka_format, try_snappy, strict, expected_count,
                expected_exception):
    count = 0
    threw_exception = False
    filename = "{}/test_{}.heka".format(data_dir, heka_format)
    with open(filename, "rb") as o:
        if "gzip" in heka_format:
            o = streaming_gzip_wrapper(o)
        try:
            for r, b in message_parser.unpack(o, try_snappy=try_snappy, strict=strict):
                j = json.loads(r.message.payload)
                assert count == j["seq"]
                count += 1
        except DecodeError:
            threw_exception = True

    assert count == expected_count
    assert threw_exception == expected_exception 
Example #9
Source File: predictor.py    From sagemaker-python-sdk with Apache License 2.0 6 votes vote down vote up
def __call__(self, stream, content_type):
        """
        Args:
            stream:
            content_type:
        """
        try:
            data = stream.read()
        finally:
            stream.close()

        for possible_response in _possible_responses():
            try:
                response = possible_response()
                response.ParseFromString(data)
                return response
            except (UnicodeDecodeError, DecodeError):
                # given that the payload does not have the response type, there no way to infer
                # the response without keeping state, so I'm iterating all the options.
                pass
        raise ValueError("data is not in the expected format") 
Example #10
Source File: predictor.py    From sagemaker-python-sdk with Apache License 2.0 6 votes vote down vote up
def __call__(self, stream, content_type):
        """
        Args:
            stream:
            content_type:
        """
        try:
            data = stream.read()
        finally:
            stream.close()

        for possible_response in _possible_responses():
            try:
                return protobuf_to_dict(json_format.Parse(data, possible_response()))
            except (UnicodeDecodeError, DecodeError, json_format.ParseError):
                # given that the payload does not have the response type, there no way to infer
                # the response without keeping state, so I'm iterating all the options.
                pass
        return json.loads(data.decode()) 
Example #11
Source File: delegation.py    From luci-py with Apache License 2.0 6 votes vote down vote up
def deserialize_token(blob):
  """Coverts urlsafe base64 text to delegation_pb2.DelegationToken.

  Raises:
    BadTokenError if blob doesn't look like a valid DelegationToken.
  """
  if isinstance(blob, unicode):
    blob = blob.encode('ascii', 'ignore')
  try:
    as_bytes = b64.decode(blob)
  except (TypeError, ValueError) as exc:
    raise exceptions.BadTokenError('Not base64: %s' % exc)
  if len(as_bytes) > MAX_TOKEN_SIZE:
    raise exceptions.BadTokenError(
        'Unexpectedly huge token (%d bytes)' % len(as_bytes))
  try:
    return delegation_pb2.DelegationToken.FromString(as_bytes)
  except message.DecodeError as exc:
    raise exceptions.BadTokenError('Bad proto: %s' % exc) 
Example #12
Source File: delegation.py    From luci-py with Apache License 2.0 6 votes vote down vote up
def deserialize_token(blob):
  """Coverts urlsafe base64 text to delegation_pb2.DelegationToken.

  Raises:
    BadTokenError if blob doesn't look like a valid DelegationToken.
  """
  if isinstance(blob, unicode):
    blob = blob.encode('ascii', 'ignore')
  try:
    as_bytes = b64.decode(blob)
  except (TypeError, ValueError) as exc:
    raise exceptions.BadTokenError('Not base64: %s' % exc)
  if len(as_bytes) > MAX_TOKEN_SIZE:
    raise exceptions.BadTokenError(
        'Unexpectedly huge token (%d bytes)' % len(as_bytes))
  try:
    return delegation_pb2.DelegationToken.FromString(as_bytes)
  except message.DecodeError as exc:
    raise exceptions.BadTokenError('Bad proto: %s' % exc) 
Example #13
Source File: delegation.py    From luci-py with Apache License 2.0 6 votes vote down vote up
def deserialize_token(blob):
  """Coverts urlsafe base64 text to delegation_pb2.DelegationToken.

  Raises:
    BadTokenError if blob doesn't look like a valid DelegationToken.
  """
  if isinstance(blob, unicode):
    blob = blob.encode('ascii', 'ignore')
  try:
    as_bytes = b64.decode(blob)
  except (TypeError, ValueError) as exc:
    raise exceptions.BadTokenError('Not base64: %s' % exc)
  if len(as_bytes) > MAX_TOKEN_SIZE:
    raise exceptions.BadTokenError(
        'Unexpectedly huge token (%d bytes)' % len(as_bytes))
  try:
    return delegation_pb2.DelegationToken.FromString(as_bytes)
  except message.DecodeError as exc:
    raise exceptions.BadTokenError('Bad proto: %s' % exc) 
Example #14
Source File: uploader.py    From tensorboard with Apache License 2.0 6 votes vote down vote up
def _filtered_graph_bytes(graph_bytes):
    try:
        graph_def = graph_pb2.GraphDef().FromString(graph_bytes)
    # The reason for the RuntimeWarning catch here is b/27494216, whereby
    # some proto parsers incorrectly raise that instead of DecodeError
    # on certain kinds of malformed input. Triggering this seems to require
    # a combination of mysterious circumstances.
    except (message.DecodeError, RuntimeWarning):
        logger.warning(
            "Could not parse GraphDef of size %d. Skipping.", len(graph_bytes),
        )
        return None
    # Use the default filter parameters:
    # limit_attr_size=1024, large_attrs_key="_too_large_attrs"
    process_graph.prepare_graph_for_ui(graph_def)
    return graph_def.SerializeToString() 
Example #15
Source File: beholder_plugin.py    From tensorboard with Apache License 2.0 6 votes vote down vote up
def _fetch_current_frame(self):
        path = "{}/{}".format(
            self.PLUGIN_LOGDIR, shared_config.SUMMARY_FILENAME
        )
        with self._lock:
            try:
                frame = file_system_tools.read_tensor_summary(path).astype(
                    np.uint8
                )
                self.most_recent_frame = frame
                return frame
            except (message.DecodeError, IOError, tf.errors.NotFoundError):
                if self.most_recent_frame is None:
                    self.most_recent_frame = im_util.get_image_relative_to_script(
                        "no-data.png"
                    )
                return self.most_recent_frame 
Example #16
Source File: serialization.py    From dragon with BSD 2-Clause "Simplified" License 6 votes vote down vote up
def _deserialize(s, proto):
    if not isinstance(s, bytes):
        raise ValueError(
            'Parameter s must be bytes, '
            'but got type: {}'
            .format(type(s))
        )

    if not (hasattr(proto, 'ParseFromString') and
            callable(proto.ParseFromString)):
        raise ValueError(
            'No ParseFromString method is detected. '
            '\ntype is {}'.format(type(proto))
        )

    decoded = cast(Optional[int], proto.ParseFromString(s))
    if decoded is not None and decoded != len(s):
        raise message.DecodeError(
            "Protobuf decoding consumed too few bytes: {} out of {}"
            .format(decoded, len(s))
        )
    return proto 
Example #17
Source File: message_test.py    From Serverless-Deep-Learning-with-TensorFlow-and-AWS-Lambda with MIT License 6 votes vote down vote up
def testParseErrors(self, message_module):
    msg = message_module.TestAllTypes()
    self.assertRaises(TypeError, msg.FromString, 0)
    self.assertRaises(Exception, msg.FromString, '0')
    # TODO(jieluo): Fix cpp extension to raise error instead of warning.
    # b/27494216
    end_tag = encoder.TagBytes(1, 4)
    if api_implementation.Type() == 'python':
      with self.assertRaises(message.DecodeError) as context:
        msg.FromString(end_tag)
      self.assertEqual('Unexpected end-group tag.', str(context.exception))
    else:
      with warnings.catch_warnings(record=True) as w:
        # Cause all warnings to always be triggered.
        warnings.simplefilter('always')
        msg.FromString(end_tag)
        assert len(w) == 1
        assert issubclass(w[-1].category, RuntimeWarning)
        self.assertEqual('Unexpected end-group tag: Not all data was converted',
                         str(w[-1].message)) 
Example #18
Source File: model_analyzer.py    From Serverless-Deep-Learning-with-TensorFlow-and-AWS-Lambda with MIT License 6 votes vote down vote up
def profile_python(self, options):
    """Profile the statistics of the Python codes.

      By default, it shows the call stack from root. To avoid
      redundant output, you may use options to filter as below
        options['show_name_regexes'] = ['.*my_code.py.*']

    Args:
      options: A dict of options. See core/profiler/g3doc/options.md.
    Returns:
      a MultiGraphNodeProto that records the results.
    """
    opts = _build_options(options)
    tfprof_node = tfprof_output_pb2.MultiGraphNodeProto()
    try:
      tfprof_node.ParseFromString(
          print_mdl.Profile('code'.encode('utf-8'), opts.SerializeToString()))
    except message.DecodeError as _:
      pass
    return tfprof_node 
Example #19
Source File: model_analyzer.py    From Serverless-Deep-Learning-with-TensorFlow-and-AWS-Lambda with MIT License 6 votes vote down vote up
def profile_operations(self, options):
    """Profile the statistics of the Operation types (e.g. MatMul, Conv2D).

    Args:
      options: A dict of options. See core/profiler/g3doc/options.md.
    Returns:
      a MultiGraphNodeProto that records the results.
    """
    opts = _build_options(options)
    tfprof_node = tfprof_output_pb2.MultiGraphNodeProto()
    try:
      tfprof_node.ParseFromString(
          print_mdl.Profile('op'.encode('utf-8'), opts.SerializeToString()))
    except message.DecodeError as _:
      pass
    return tfprof_node 
Example #20
Source File: delegation.py    From luci-py with Apache License 2.0 6 votes vote down vote up
def deserialize_token(blob):
  """Coverts urlsafe base64 text to delegation_pb2.DelegationToken.

  Raises:
    BadTokenError if blob doesn't look like a valid DelegationToken.
  """
  if isinstance(blob, unicode):
    blob = blob.encode('ascii', 'ignore')
  try:
    as_bytes = b64.decode(blob)
  except (TypeError, ValueError) as exc:
    raise exceptions.BadTokenError('Not base64: %s' % exc)
  if len(as_bytes) > MAX_TOKEN_SIZE:
    raise exceptions.BadTokenError(
        'Unexpectedly huge token (%d bytes)' % len(as_bytes))
  try:
    return delegation_pb2.DelegationToken.FromString(as_bytes)
  except message.DecodeError as exc:
    raise exceptions.BadTokenError('Bad proto: %s' % exc) 
Example #21
Source File: maps.py    From asciimatics with Apache License 2.0 6 votes vote down vote up
def _get_vector_tile(self, x_tile, y_tile, z_tile):
        """Load up a single vector tile."""
        cache_file = "mapscache/{}.{}.{}.json".format(z_tile, x_tile, y_tile)
        if cache_file not in self._tiles:
            if os.path.isfile(cache_file):
                with open(cache_file, 'rb') as f:
                    tile = json.loads(f.read().decode('utf-8'))
            else:
                url = _VECTOR_URL.format(z_tile, x_tile, y_tile, _KEY)
                data = requests.get(url).content
                try:
                    tile = mapbox_vector_tile.decode(data)
                    with open(cache_file, mode='w') as f:
                        json.dump(literal_eval(repr(tile)), f)
                except DecodeError:
                    tile = None
            if tile:
                self._tiles[cache_file] = [x_tile, y_tile, z_tile, tile, False]
                if len(self._tiles) > _CACHE_SIZE:
                    self._tiles.popitem(False)
                self._screen.force_update() 
Example #22
Source File: vgsl.py    From kraken with Apache License 2.0 5 votes vote down vote up
def load_model(cls, path: str):
        """
        Deserializes a VGSL model from a CoreML file.

        Args:
            path (str): CoreML file

        Returns:
            A TorchVGSLModel instance.

        Raises:
            KrakenInvalidModelException if the model data is invalid (not a
            string, protobuf file, or without appropriate metadata).
            FileNotFoundError if the path doesn't point to a file.
        """
        try:
            mlmodel = MLModel(path)
        except TypeError as e:
            raise KrakenInvalidModelException(str(e))
        except DecodeError as e:
            raise KrakenInvalidModelException('Failure parsing model protobuf: {}'.format(str(e)))
        if 'vgsl' not in mlmodel.user_defined_metadata:
            raise KrakenInvalidModelException('No VGSL spec in model metadata')
        vgsl_spec = mlmodel.user_defined_metadata['vgsl']
        nn = cls(vgsl_spec)
        for name, layer in nn.nn.named_children():
            layer.deserialize(name, mlmodel.get_spec())

        if 'codec' in mlmodel.user_defined_metadata:
            nn.add_codec(PytorchCodec(json.loads(mlmodel.user_defined_metadata['codec'])))

        nn.user_metadata = {'accuracy': [], 'seg_type': 'bbox', 'one_channel_mode': '1', 'model_type': None, 'hyper_params': {}}  # type: dict[str, str]
        if 'kraken_meta' in mlmodel.user_defined_metadata:
            nn.user_metadata.update(json.loads(mlmodel.user_defined_metadata['kraken_meta']))
        return nn 
Example #23
Source File: compat.py    From lbry-sdk with MIT License 5 votes vote down vote up
def from_old_json_schema(claim, payload: bytes):
    try:
        value = json.loads(payload)
    except:
        raise DecodeError('Could not parse JSON.')
    stream = claim.stream
    stream.source.sd_hash = value['sources']['lbry_sd_hash']
    stream.source.media_type = (
            value.get('content_type', value.get('content-type')) or
            'application/octet-stream'
    )
    stream.title = value.get('title', '')
    stream.description = value.get('description', '')
    if value.get('thumbnail', ''):
        stream.thumbnail.url = value.get('thumbnail', '')
    stream.author = value.get('author', '')
    stream.license = value.get('license', '')
    stream.license_url = value.get('license_url', '')
    language = value.get('language', '')
    if language:
        if language.lower() == 'english':
            language = 'en'
        try:
            stream.languages.append(language)
        except:
            pass
    if value.get('nsfw', False):
        stream.tags.append('mature')
    if "fee" in value and isinstance(value['fee'], dict):
        fee = value["fee"]
        currency = list(fee.keys())[0]
        if currency == 'LBC':
            stream.fee.lbc = Decimal(fee[currency]['amount'])
        elif currency == 'USD':
            stream.fee.usd = Decimal(fee[currency]['amount'])
        elif currency == 'BTC':
            stream.fee.btc = Decimal(fee[currency]['amount'])
        else:
            raise DecodeError(f'Unknown currency: {currency}')
        stream.fee.address = fee[currency]['address']
    return claim 
Example #24
Source File: claim.py    From lbry-sdk with MIT License 5 votes vote down vote up
def from_bytes(cls, data: bytes) -> 'Claim':
        try:
            return super().from_bytes(data)
        except DecodeError:
            claim = cls()
            if data[0] == ord('{'):
                claim.version = 0
                compat.from_old_json_schema(claim, data)
            elif data[0] not in (0, 1):
                claim.version = 1
                compat.from_types_v1(claim, data)
            else:
                raise
            return claim 
Example #25
Source File: purchase.py    From lbry-sdk with MIT License 5 votes vote down vote up
def from_bytes(cls, data: bytes):
        purchase = cls()
        if purchase.has_start_byte(data):
            purchase.message.ParseFromString(data[1:])
        else:
            raise DecodeError('Message does not start with correct byte.')
        return purchase 
Example #26
Source File: dsrf_report_processor.py    From dsrf with Apache License 2.0 5 votes vote down vote up
def read_blocks_from_queue(self):
    """Returns a generator of the blocks in the queue.

    Override this method if you wish to change the queue (blocks transformation)
    form.

    Yields:
      Each yield is a single block object (block_pb2.Block).
    """
    message_lines = []
    for line in sys.stdin:
      if constants.QUEUE_DELIMITER in line:
        block = block_pb2.Block()
        try:
          block.ParseFromString('\n'.join(message_lines))
        except message_mod.DecodeError:
          sys.stderr.write(
              'ERROR: Can not read protocol buffer from queue. Is '
              'human_readable perhaps set to true? I am not a human. '
              'Aborting...\n')
          sys.exit(-1)

        yield block
        message_lines = []
      else:
        message_lines.append(line.rstrip('\n')) 
Example #27
Source File: importer.py    From ngraph-onnx with Apache License 2.0 5 votes vote down vote up
def import_onnx_file(filename):  # type: (str) -> List[Function]
    """
    Import ONNX model from a Protocol Buffers file and convert to ngraph functions.

    :param filename: path to an ONNX file
    :return: List of imported ngraph Functions (see docs for import_onnx_model).
    """
    try:
        onnx_protobuf = onnx.load(filename)
    except DecodeError:
        raise UserInputError("The provided file doesn't contain a properly formatted ONNX model.")

    return onnx_import.import_onnx_model(onnx_protobuf.SerializeToString()) 
Example #28
Source File: reflection_test.py    From Serverless-Deep-Learning-with-TensorFlow-and-AWS-Lambda with MIT License 5 votes vote down vote up
def testParseTruncated(self):
    # This test is only applicable for the Python implementation of the API.
    if api_implementation.Type() != 'python':
      return

    first_proto = unittest_pb2.TestAllTypes()
    test_util.SetAllFields(first_proto)
    serialized = first_proto.SerializeToString()

    for truncation_point in range(len(serialized) + 1):
      try:
        second_proto = unittest_pb2.TestAllTypes()
        unknown_fields = unittest_pb2.TestEmptyMessage()
        pos = second_proto._InternalParse(serialized, 0, truncation_point)
        # If we didn't raise an error then we read exactly the amount expected.
        self.assertEqual(truncation_point, pos)

        # Parsing to unknown fields should not throw if parsing to known fields
        # did not.
        try:
          pos2 = unknown_fields._InternalParse(serialized, 0, truncation_point)
          self.assertEqual(truncation_point, pos2)
        except message.DecodeError:
          self.fail('Parsing unknown fields failed when parsing known fields '
                    'did not.')
      except message.DecodeError:
        # Parsing unknown fields should also fail.
        self.assertRaises(message.DecodeError, unknown_fields._InternalParse,
                          serialized, 0, truncation_point) 
Example #29
Source File: route_handlers.py    From sawtooth-core with Apache License 2.0 5 votes vote down vote up
def _parse_response(proto, response):
        """Parses the content from a validator response Message.
        """
        try:
            content = proto()
            content.ParseFromString(response.content)
            return content
        except (DecodeError, AttributeError):
            LOGGER.error('Validator response was not parsable: %s', response)
            raise errors.ValidatorResponseInvalid() 
Example #30
Source File: compat.py    From lbry-sdk with MIT License 5 votes vote down vote up
def from_types_v1(claim, payload: bytes):
    old = OldClaimMessage()
    old.ParseFromString(payload)
    if old.claimType == 2:
        channel = claim.channel
        channel.public_key_bytes = old.certificate.publicKey
    else:
        stream = claim.stream
        stream.title = old.stream.metadata.title
        stream.description = old.stream.metadata.description
        stream.author = old.stream.metadata.author
        stream.license = old.stream.metadata.license
        stream.license_url = old.stream.metadata.licenseUrl
        stream.thumbnail.url = old.stream.metadata.thumbnail
        if old.stream.metadata.HasField('language'):
            stream.languages.add().message.language = old.stream.metadata.language
        stream.source.media_type = old.stream.source.contentType
        stream.source.sd_hash_bytes = old.stream.source.source
        if old.stream.metadata.nsfw:
            stream.tags.append('mature')
        if old.stream.metadata.HasField('fee'):
            fee = old.stream.metadata.fee
            stream.fee.address_bytes = fee.address
            currency = FeeMessage.Currency.Name(fee.currency)
            if currency == 'LBC':
                stream.fee.lbc = Decimal(fee.amount)
            elif currency == 'USD':
                stream.fee.usd = Decimal(fee.amount)
            elif currency == 'BTC':
                stream.fee.btc = Decimal(fee.amount)
            else:
                raise DecodeError(f'Unsupported currency: {currency}')
        if old.HasField('publisherSignature'):
            sig = old.publisherSignature
            claim.signature = sig.signature
            claim.signature_type = KeyType.Name(sig.signatureType)
            claim.signing_channel_hash = sig.certificateId[::-1]
            old.ClearField("publisherSignature")
            claim.unsigned_payload = old.SerializeToString()
    return claim