Java Code Examples for android.media.MediaFormat#getInteger()

The following examples show how to use android.media.MediaFormat#getInteger() . You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: MediaCodecRenderer.java    From Telegram with GNU General Public License v2.0 5 votes vote down vote up
/**
 * Processes a new output format.
 */
private void processOutputFormat() throws ExoPlaybackException {
  MediaFormat format = codec.getOutputFormat();
  if (codecAdaptationWorkaroundMode != ADAPTATION_WORKAROUND_MODE_NEVER
      && format.getInteger(MediaFormat.KEY_WIDTH) == ADAPTATION_WORKAROUND_SLICE_WIDTH_HEIGHT
      && format.getInteger(MediaFormat.KEY_HEIGHT) == ADAPTATION_WORKAROUND_SLICE_WIDTH_HEIGHT) {
    // We assume this format changed event was caused by the adaptation workaround.
    shouldSkipAdaptationWorkaroundOutputBuffer = true;
    return;
  }
  if (codecNeedsMonoChannelCountWorkaround) {
    format.setInteger(MediaFormat.KEY_CHANNEL_COUNT, 1);
  }
  onOutputFormatChanged(codec, format);
}
 
Example 2
Source File: Android720pFormatStrategy.java    From phoenix with Apache License 2.0 5 votes vote down vote up
@Override
public MediaFormat createVideoOutputFormat(MediaFormat inputFormat) {
    int width = inputFormat.getInteger(MediaFormat.KEY_WIDTH);
    int height = inputFormat.getInteger(MediaFormat.KEY_HEIGHT);
    int longer, shorter, outWidth, outHeight;
    if (width >= height) {
        longer = width;
        shorter = height;
        outWidth = LONGER_LENGTH;
        outHeight = SHORTER_LENGTH;
    } else {
        shorter = width;
        longer = height;
        outWidth = SHORTER_LENGTH;
        outHeight = LONGER_LENGTH;
    }
    if (longer * 9 != shorter * 16) {
        throw new OutputFormatUnavailableException("This video is not 16:9, and is not able to transcode. (" + width + "x" + height + ")");
    }
    if (shorter <= SHORTER_LENGTH) {
        Log.d(TAG, "This video is less or equal to 720p, pass-through. (" + width + "x" + height + ")");
        return null;
    }
    MediaFormat format = MediaFormat.createVideoFormat("video/avc", outWidth, outHeight);
    // From Nexus 4 Camera in 720p
    format.setInteger(MediaFormat.KEY_BIT_RATE, mVideoBitrate);
    format.setInteger(MediaFormat.KEY_FRAME_RATE, 30);
    format.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, 3);
    format.setInteger(MediaFormat.KEY_COLOR_FORMAT, MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface);
    return format;
}
 
Example 3
Source File: MediaCodecAudioRenderer.java    From TelePlus-Android with GNU General Public License v2.0 5 votes vote down vote up
@Override
protected void onOutputFormatChanged(MediaCodec codec, MediaFormat outputFormat)
    throws ExoPlaybackException {
  @C.Encoding int encoding;
  MediaFormat format;
  if (passthroughMediaFormat != null) {
    encoding = MimeTypes.getEncoding(passthroughMediaFormat.getString(MediaFormat.KEY_MIME));
    format = passthroughMediaFormat;
  } else {
    encoding = pcmEncoding;
    format = outputFormat;
  }
  int channelCount = format.getInteger(MediaFormat.KEY_CHANNEL_COUNT);
  int sampleRate = format.getInteger(MediaFormat.KEY_SAMPLE_RATE);
  int[] channelMap;
  if (codecNeedsDiscardChannelsWorkaround && channelCount == 6 && this.channelCount < 6) {
    channelMap = new int[this.channelCount];
    for (int i = 0; i < this.channelCount; i++) {
      channelMap[i] = i;
    }
  } else {
    channelMap = null;
  }

  try {
    audioSink.configure(encoding, channelCount, sampleRate, 0, channelMap, encoderDelay,
        encoderPadding);
  } catch (AudioSink.ConfigurationException e) {
    throw ExoPlaybackException.createForRenderer(e, getIndex());
  }
}
 
Example 4
Source File: AudioUtils.java    From AlexaAndroid with GNU General Public License v2.0 5 votes vote down vote up
@TargetApi(Build.VERSION_CODES.JELLY_BEAN)
public static void showMetrics(MediaFormat format, int numBytesSubmitted, int numBytesDequeued) {
    if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.JELLY_BEAN) {
        Log.i("queued a total of " + numBytesSubmitted + " bytes, " + "dequeued " + numBytesDequeued + " bytes.");
        int sampleRate = format.getInteger(MediaFormat.KEY_SAMPLE_RATE);
        int channelCount = format.getInteger(MediaFormat.KEY_CHANNEL_COUNT);
        int inBitrate = sampleRate * channelCount * 16;  // bit/sec
        int outBitrate = format.getInteger(MediaFormat.KEY_BIT_RATE);
        float desiredRatio = (float) outBitrate / (float) inBitrate;
        float actualRatio = (float) numBytesDequeued / (float) numBytesSubmitted;
        Log.i("desiredRatio = " + desiredRatio + ", actualRatio = " + actualRatio);
    }
}
 
Example 5
Source File: Android720pFormatStrategy.java    From Conversations with GNU General Public License v3.0 5 votes vote down vote up
@RequiresApi(api = Build.VERSION_CODES.JELLY_BEAN_MR2)
@Override
public MediaFormat createVideoOutputFormat(MediaFormat inputFormat) {
    int width = inputFormat.getInteger(MediaFormat.KEY_WIDTH);
    int height = inputFormat.getInteger(MediaFormat.KEY_HEIGHT);
    int longer, shorter, outWidth, outHeight;
    if (width >= height) {
        longer = width;
        shorter = height;
        outWidth = LONGER_LENGTH;
        outHeight = SHORTER_LENGTH;
    } else {
        shorter = width;
        longer = height;
        outWidth = SHORTER_LENGTH;
        outHeight = LONGER_LENGTH;
    }
    if (longer * 9 != shorter * 16) {
        throw new OutputFormatUnavailableException("This video is not 16:9, and is not able to transcode. (" + width + "x" + height + ")");
    }
    if (shorter <= SHORTER_LENGTH) {
        Log.d(Config.LOGTAG, "This video is less or equal to 720p, pass-through. (" + width + "x" + height + ")");
        return null;
    }
    MediaFormat format = MediaFormat.createVideoFormat("video/avc", outWidth, outHeight);
    format.setInteger(MediaFormat.KEY_BIT_RATE, mVideoBitrate);
    format.setInteger(MediaFormat.KEY_FRAME_RATE, 30);
    format.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, 3);
    format.setInteger(MediaFormat.KEY_COLOR_FORMAT, MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface);
    if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M) {
        format.setInteger(MediaFormat.KEY_PROFILE ,MediaCodecInfo.CodecProfileLevel.AVCProfileBaseline);
        format.setInteger(MediaFormat.KEY_LEVEL, MediaCodecInfo.CodecProfileLevel.AVCLevel13);
    }
    return format;
}
 
Example 6
Source File: ExportPreset960x540Strategy.java    From Pix-Art-Messenger with GNU General Public License v3.0 5 votes vote down vote up
@Override
public MediaFormat createVideoOutputFormat(MediaFormat inputFormat) {
    // TODO: detect non-baseline profile and throw exception
    int width = inputFormat.getInteger(MediaFormat.KEY_WIDTH);
    int height = inputFormat.getInteger(MediaFormat.KEY_HEIGHT);
    MediaFormat outputFormat = MediaFormatPresets.getExportPreset960x540(width, height);
    int outWidth = outputFormat.getInteger(MediaFormat.KEY_WIDTH);
    int outHeight = outputFormat.getInteger(MediaFormat.KEY_HEIGHT);
    Log.d(TAG, String.format("inputFormat: %dx%d => outputFormat: %dx%d", width, height, outWidth, outHeight));
    return outputFormat;
}
 
Example 7
Source File: Android480pFormatStrategy.java    From phoenix with Apache License 2.0 5 votes vote down vote up
@Override
public MediaFormat createVideoOutputFormat(MediaFormat inputFormat) {
    int width = inputFormat.getInteger(MediaFormat.KEY_WIDTH);
    int height = inputFormat.getInteger(MediaFormat.KEY_HEIGHT);
    int longer, shorter, outWidth, outHeight;
    if (width >= height) {
        longer = width;
        shorter = height;
        outWidth = LONGER_LENGTH;
        outHeight = SHORTER_LENGTH;
    } else {
        shorter = width;
        longer = height;
        outWidth = SHORTER_LENGTH;
        outHeight = LONGER_LENGTH;
    }
    if (longer * 9 != shorter * 16) {
        throw new OutputFormatUnavailableException("This video is not 16:9, and is not able to transcode. (" + width + "x" + height + ")");
    }
    if (shorter <= SHORTER_LENGTH) {
        Log.d(TAG, "This video is less or equal to 480p, pass-through. (" + width + "x" + height + ")");
        return null;
    }
    MediaFormat format = MediaFormat.createVideoFormat("video/avc", outWidth, outHeight);
    // From Nexus 4 Camera in 480p
    format.setInteger(MediaFormat.KEY_BIT_RATE, mVideoBitrate);
    format.setInteger(MediaFormat.KEY_FRAME_RATE, 30);
    format.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, 3);
    format.setInteger(MediaFormat.KEY_COLOR_FORMAT, MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface);
    return format;
}
 
Example 8
Source File: AudioUtil.java    From VideoProcessor with Apache License 2.0 5 votes vote down vote up
public static int getAudioBitrate(MediaFormat format) {
    if (format.containsKey(MediaFormat.KEY_BIT_RATE)) {
        return format.getInteger(MediaFormat.KEY_BIT_RATE);
    } else {
        return VideoProcessor.DEFAULT_AAC_BITRATE;
    }
}
 
Example 9
Source File: GlVideoRenderer.java    From LiTr with BSD 2-Clause "Simplified" License 5 votes vote down vote up
@Override
public void init(@Nullable Surface outputSurface, @Nullable MediaFormat sourceMediaFormat, @Nullable MediaFormat targetMediaFormat) {
    if (outputSurface == null) {
        throw new IllegalArgumentException("GlVideoRenderer requires an output surface");
    }
    if (targetMediaFormat == null) {
        throw new IllegalArgumentException("GlVideoRenderer requires target media format");
    }

    triangleVertices = ByteBuffer.allocateDirect(
            triangleVerticesData.length * FLOAT_SIZE_BYTES)
            .order(ByteOrder.nativeOrder()).asFloatBuffer();
    triangleVertices.put(triangleVerticesData).position(0);

    // prioritize target video rotation value, fall back to source video rotation value
    int rotation = 0;
    if (targetMediaFormat.containsKey(KEY_ROTATION)) {
        rotation = targetMediaFormat.getInteger(KEY_ROTATION);
    } else if (sourceMediaFormat != null && sourceMediaFormat.containsKey(KEY_ROTATION)) {
        rotation = sourceMediaFormat.getInteger(KEY_ROTATION);
    }
    float aspectRatio = 1;
    if (targetMediaFormat.containsKey(MediaFormat.KEY_WIDTH) && targetMediaFormat.containsKey(MediaFormat.KEY_HEIGHT)) {
        aspectRatio = (float) targetMediaFormat.getInteger(MediaFormat.KEY_WIDTH) / targetMediaFormat.getInteger(MediaFormat.KEY_HEIGHT);
    }

    this.outputSurface = new VideoRenderOutputSurface(outputSurface);

    inputSurface = new VideoRenderInputSurface();
    initMvpMatrix(rotation, aspectRatio);
    initGl();

    for (GlFilter filter : filters) {
        filter.init(Arrays.copyOf(mvpMatrix, mvpMatrix.length), 0);
    }
}
 
Example 10
Source File: MediaTransformer.java    From LiTr with BSD 2-Clause "Simplified" License 5 votes vote down vote up
@Nullable
private MediaFormat createTargetMediaFormat(@NonNull MediaSource mediaSource,
                                            int sourceTrackIndex) {
    MediaFormat sourceMediaFormat = mediaSource.getTrackFormat(sourceTrackIndex);
    MediaFormat targetMediaFormat = null;

    String mimeType = null;
    if (sourceMediaFormat.containsKey(MediaFormat.KEY_MIME)) {
        mimeType = sourceMediaFormat.getString(MediaFormat.KEY_MIME);
    }

    if (mimeType != null) {
        if (mimeType.startsWith("video")) {
            targetMediaFormat = MediaFormat.createVideoFormat(mimeType,
                                                              sourceMediaFormat.getInteger(MediaFormat.KEY_WIDTH),
                                                              sourceMediaFormat.getInteger(MediaFormat.KEY_HEIGHT));
            int targetBitrate = TranscoderUtils.estimateVideoTrackBitrate(mediaSource, sourceTrackIndex);
            targetMediaFormat.setInteger(MediaFormat.KEY_BIT_RATE, targetBitrate);

            int targetKeyFrameInterval = DEFAULT_KEY_FRAME_INTERVAL;
            if (sourceMediaFormat.containsKey(MediaFormat.KEY_I_FRAME_INTERVAL)) {
                targetKeyFrameInterval = sourceMediaFormat.getInteger(MediaFormat.KEY_I_FRAME_INTERVAL);
            }
            targetMediaFormat.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, targetKeyFrameInterval);
        } else if (mimeType.startsWith("audio")) {
            targetMediaFormat = MediaFormat.createAudioFormat(mimeType,
                                                              sourceMediaFormat.getInteger(MediaFormat.KEY_SAMPLE_RATE),
                                                              sourceMediaFormat.getInteger(MediaFormat.KEY_CHANNEL_COUNT));
            targetMediaFormat.setInteger(MediaFormat.KEY_BIT_RATE, sourceMediaFormat.getInteger(MediaFormat.KEY_BIT_RATE));
        }
    }

    return targetMediaFormat;
}
 
Example 11
Source File: Android720pFormatStrategy.java    From phoenix with Apache License 2.0 5 votes vote down vote up
@Override
public MediaFormat createVideoOutputFormat(MediaFormat inputFormat) {
    int width = inputFormat.getInteger(MediaFormat.KEY_WIDTH);
    int height = inputFormat.getInteger(MediaFormat.KEY_HEIGHT);
    int longer, shorter, outWidth, outHeight;
    if (width >= height) {
        longer = width;
        shorter = height;
        outWidth = LONGER_LENGTH;
        outHeight = SHORTER_LENGTH;
    } else {
        shorter = width;
        longer = height;
        outWidth = SHORTER_LENGTH;
        outHeight = LONGER_LENGTH;
    }
    if (longer * 9 != shorter * 16) {
        throw new OutputFormatUnavailableException("This video is not 16:9, and is not able to transcode. (" + width + "x" + height + ")");
    }
    if (shorter <= SHORTER_LENGTH) {
        Log.d(TAG, "This video is less or equal to 720p, pass-through. (" + width + "x" + height + ")");
        return null;
    }
    MediaFormat format = MediaFormat.createVideoFormat("video/avc", outWidth, outHeight);
    // From Nexus 4 Camera in 720p
    format.setInteger(MediaFormat.KEY_BIT_RATE, mVideoBitrate);
    format.setInteger(MediaFormat.KEY_FRAME_RATE, 30);
    format.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, 3);
    format.setInteger(MediaFormat.KEY_COLOR_FORMAT, MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface);
    return format;
}
 
Example 12
Source File: MediaCodecBridge.java    From 365browser with Apache License 2.0 4 votes vote down vote up
private void maybeSetMaxInputSize(MediaFormat format) {
    if (format.containsKey(android.media.MediaFormat.KEY_MAX_INPUT_SIZE)) {
        // Already set. The source of the format may know better, so do nothing.
        return;
    }
    int maxHeight = format.getInteger(MediaFormat.KEY_HEIGHT);
    if (mAdaptivePlaybackSupported && format.containsKey(MediaFormat.KEY_MAX_HEIGHT)) {
        maxHeight = Math.max(maxHeight, format.getInteger(MediaFormat.KEY_MAX_HEIGHT));
    }
    int maxWidth = format.getInteger(MediaFormat.KEY_WIDTH);
    if (mAdaptivePlaybackSupported && format.containsKey(MediaFormat.KEY_MAX_WIDTH)) {
        maxWidth = Math.max(maxHeight, format.getInteger(MediaFormat.KEY_MAX_WIDTH));
    }
    int maxPixels;
    int minCompressionRatio;
    switch (format.getString(MediaFormat.KEY_MIME)) {
        case MimeTypes.VIDEO_H264:
            if ("BRAVIA 4K 2015".equals(Build.MODEL)) {
                // The Sony BRAVIA 4k TV has input buffers that are too small for the calculated
                // 4k video maximum input size, so use the default value.
                return;
            }
            // Round up width/height to an integer number of macroblocks.
            maxPixels = ((maxWidth + 15) / 16) * ((maxHeight + 15) / 16) * 16 * 16;
            minCompressionRatio = 2;
            break;
        case MimeTypes.VIDEO_VP8:
            // VPX does not specify a ratio so use the values from the platform's SoftVPX.cpp.
            maxPixels = maxWidth * maxHeight;
            minCompressionRatio = 2;
            break;
        case MimeTypes.VIDEO_H265:
        case MimeTypes.VIDEO_VP9:
            maxPixels = maxWidth * maxHeight;
            minCompressionRatio = 4;
            break;
        default:
            // Leave the default max input size.
            return;
    }
    // Estimate the maximum input size assuming three channel 4:2:0 subsampled input frames.
    int maxInputSize = (maxPixels * 3) / (2 * minCompressionRatio);
    format.setInteger(MediaFormat.KEY_MAX_INPUT_SIZE, maxInputSize);
}
 
Example 13
Source File: MediaCodecVideoDecoder.java    From MediaPlayer-Extended with Apache License 2.0 4 votes vote down vote up
public int getVideoHeight() {
    MediaFormat format = getFormat();
    return format != null ? format.getInteger(MediaFormat.KEY_HEIGHT) : 0;
}
 
Example 14
Source File: MediaController.java    From SiliCompressor with Apache License 2.0 4 votes vote down vote up
@TargetApi(16)
private long readAndWriteTrack(MediaExtractor extractor, MP4Builder mediaMuxer, MediaCodec.BufferInfo info, long start, long end, File file, boolean isAudio) throws Exception {
    int trackIndex = selectTrack(extractor, isAudio);
    if (trackIndex >= 0) {
        extractor.selectTrack(trackIndex);
        MediaFormat trackFormat = extractor.getTrackFormat(trackIndex);
        int muxerTrackIndex = mediaMuxer.addTrack(trackFormat, isAudio);
        int maxBufferSize = trackFormat.getInteger(MediaFormat.KEY_MAX_INPUT_SIZE);
        boolean inputDone = false;
        if (start > 0) {
            extractor.seekTo(start, MediaExtractor.SEEK_TO_PREVIOUS_SYNC);
        } else {
            extractor.seekTo(0, MediaExtractor.SEEK_TO_PREVIOUS_SYNC);
        }
        ByteBuffer buffer = ByteBuffer.allocateDirect(maxBufferSize);
        long startTime = -1;

        while (!inputDone) {

            boolean eof = false;
            int index = extractor.getSampleTrackIndex();
            if (index == trackIndex) {
                info.size = extractor.readSampleData(buffer, 0);

                if (info.size < 0) {
                    info.size = 0;
                    eof = true;
                } else {
                    info.presentationTimeUs = extractor.getSampleTime();
                    if (start > 0 && startTime == -1) {
                        startTime = info.presentationTimeUs;
                    }
                    if (end < 0 || info.presentationTimeUs < end) {
                        info.offset = 0;
                        info.flags = extractor.getSampleFlags();
                        if (mediaMuxer.writeSampleData(muxerTrackIndex, buffer, info, isAudio)) {
                            // didWriteData(messageObject, file, false, false);
                        }
                        extractor.advance();
                    } else {
                        eof = true;
                    }
                }
            } else if (index == -1) {
                eof = true;
            }
            if (eof) {
                inputDone = true;
            }
        }

        extractor.unselectTrack(trackIndex);
        return startTime;
    }
    return -1;
}
 
Example 15
Source File: AudioDecoder.java    From ssj with GNU General Public License v3.0 4 votes vote down vote up
/**
 * Decodes audio file into a raw file. This method accepts audio file formats with valid
 * headers (like .mp3, .mp4, and .wav).
 * @param filepath Path of the file to decode.
 * @return Decoded raw audio file.
 * @throws IOException when file cannot be read.
 */
private File decode(String filepath) throws IOException
{
	// Set selected audio file as a source.
	MediaExtractor extractor = new MediaExtractor();
	extractor.setDataSource(filepath);

	// Get audio format.
	MediaFormat format = extractor.getTrackFormat(0);
	String mime = format.getString(MediaFormat.KEY_MIME);

	// Cache necessary audio attributes.
	sampleRate = format.getInteger(MediaFormat.KEY_SAMPLE_RATE);
	channelCount = format.getInteger(MediaFormat.KEY_CHANNEL_COUNT);

	// Create and configure decoder based on audio format.
	MediaCodec decoder = MediaCodec.createDecoderByType(mime);
	decoder.configure(format, null, null, 0);
	decoder.start();

	// Create input/output buffers.
	ByteBuffer[] inputBuffers = decoder.getInputBuffers();
	ByteBuffer[] outputBuffers = decoder.getOutputBuffers();
	MediaCodec.BufferInfo bufferInfo = new MediaCodec.BufferInfo();
	extractor.selectTrack(0);

	File dst = new File(FileCons.SSJ_EXTERNAL_STORAGE + File.separator + "output.raw");
	FileOutputStream f = new FileOutputStream(dst);

	boolean endOfStreamReached = false;

	while (true)
	{
		if (!endOfStreamReached)
		{
			int inputBufferIndex = decoder.dequeueInputBuffer(10 * 1000);
			if (inputBufferIndex >= 0)
			{
				ByteBuffer inputBuffer = inputBuffers[inputBufferIndex];
				int sampleSize = extractor.readSampleData(inputBuffer, 0);
				if (sampleSize < 0)
				{
					// Pass empty buffer and the end of stream flag to the codec.
					decoder.queueInputBuffer(inputBufferIndex, 0, 0,
											 0, MediaCodec.BUFFER_FLAG_END_OF_STREAM);
					endOfStreamReached = true;
				}
				else
				{
					// Pass data-filled buffer to the decoder.
					decoder.queueInputBuffer(inputBufferIndex, 0, sampleSize,
											 extractor.getSampleTime(), 0);
					extractor.advance();
				}
			}
		}

		int outputBufferIndex = decoder.dequeueOutputBuffer(bufferInfo, 10 * 1000);
		if (outputBufferIndex >= 0)
		{
			ByteBuffer outputBuffer = outputBuffers[outputBufferIndex];
			byte[] data = new byte[bufferInfo.size];
			outputBuffer.get(data);
			outputBuffer.clear();

			if (data.length > 0)
			{
				f.write(data, 0, data.length);
			}
			decoder.releaseOutputBuffer(outputBufferIndex, false);

			if ((bufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0)
			{
				endOfStreamReached = true;
			}
		}
		else if (outputBufferIndex == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED)
		{
			outputBuffers = decoder.getOutputBuffers();
		}

		if ((bufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0)
		{
			return dst;
		}
	}
}
 
Example 16
Source File: DetailsBottomSheet.java    From YTPlayer with GNU General Public License v3.0 4 votes vote down vote up
@Nullable
@Override
public View onCreateView(@NonNull LayoutInflater inflater, @Nullable ViewGroup container, @Nullable Bundle savedInstanceState) {
    View v = inflater.inflate(R.layout.bottom_sheet_details, container, false);
    initViews(v);

    Bundle args = getArguments();
    String file = args.getString("filepath");
    if (file!=null) {
        File f = new File(file);
        if (f.exists()) {
            mTxtfilepath.setText(file);
            mTxtfilename.setText(f.getName());
            mTxtsize.setText(YTutils.getSize(f.length()));
            MediaExtractor mex = new MediaExtractor();
            try {
                MediaMetadataRetriever mmr = new MediaMetadataRetriever();
                mmr.setDataSource(getActivity(), Uri.fromFile(f));
                mex.setDataSource(file);

                String duration = mmr.extractMetadata(MediaMetadataRetriever.METADATA_KEY_DURATION);
                Long seconds = Long.parseLong(duration);

                MediaFormat mf = mex.getTrackFormat(0);
                int bitRate = mf.getInteger(MediaFormat.KEY_BIT_RATE);
                int sampleRate = mf.getInteger(MediaFormat.KEY_SAMPLE_RATE);
                mTxtbitrate.setText(YTutils.getSizeNoDecimal(bitRate)+"/s");
                mTxtsamplingrate.setText(sampleRate+" Hz");
                mTxtlength.setText(YTutils.milliSecondsToTimer(
                        seconds
                ));
                String format = YTutils.getAudioFormat(f);
                if (format!=null)
                    mTxtformat.setText(format);
            }catch (Exception e) {
                Log.e(TAG, "onCreateView: "+e.getMessage());
            }
        }
    }
    mButton.setOnClickListener(view -> dismiss());
    return v;
}
 
Example 17
Source File: EncoderDebugger.java    From libstreaming with Apache License 2.0 4 votes vote down vote up
/**
 * Converts the image obtained from the decoder to NV21.
 */
private void convertToNV21(int k) {		
	byte[] buffer = new byte[3*mSize/2];

	int stride = mWidth, sliceHeight = mHeight;
	int colorFormat = mDecoderColorFormat;
	boolean planar = false;

	if (mDecOutputFormat != null) {
		MediaFormat format = mDecOutputFormat;
		if (format != null) {
			if (format.containsKey("slice-height")) {
				sliceHeight = format.getInteger("slice-height");
				if (sliceHeight<mHeight) sliceHeight = mHeight;
			}
			if (format.containsKey("stride")) {
				stride = format.getInteger("stride");
				if (stride<mWidth) stride = mWidth;
			}
			if (format.containsKey(MediaFormat.KEY_COLOR_FORMAT) && format.getInteger(MediaFormat.KEY_COLOR_FORMAT)>0) {
				colorFormat = format.getInteger(MediaFormat.KEY_COLOR_FORMAT);
			}
		}
	}

	switch (colorFormat) {
	case MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420SemiPlanar:
	case MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420PackedSemiPlanar:
	case MediaCodecInfo.CodecCapabilities.COLOR_TI_FormatYUV420PackedSemiPlanar:
		planar = false;
		break;	
	case MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420Planar:
	case MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420PackedPlanar:
		planar = true;
		break;
	}

	for (int i=0;i<mSize;i++) {
		if (i%mWidth==0) i+=stride-mWidth;
		buffer[i] = mDecodedVideo[k][i];
	}

	if (!planar) {
		for (int i=0,j=0;j<mSize/4;i+=1,j+=1) {
			if (i%mWidth/2==0) i+=(stride-mWidth)/2;
			buffer[mSize+2*j+1] = mDecodedVideo[k][stride*sliceHeight+2*i];
			buffer[mSize+2*j] = mDecodedVideo[k][stride*sliceHeight+2*i+1];
		}
	} else {
		for (int i=0,j=0;j<mSize/4;i+=1,j+=1) {
			if (i%mWidth/2==0) i+=(stride-mWidth)/2;
			buffer[mSize+2*j+1] = mDecodedVideo[k][stride*sliceHeight+i];
			buffer[mSize+2*j] = mDecodedVideo[k][stride*sliceHeight*5/4+i];
		}
	}

	mDecodedVideo[k] = buffer;

}
 
Example 18
Source File: AudioPlayback.java    From MediaPlayer-Extended with Apache License 2.0 4 votes vote down vote up
/**
 * Initializes or reinitializes the audio track with the supplied format for playback
 * while keeping the playstate. Keeps the current configuration and skips reinitialization
 * if the new format is the same as the current format.
 */
public void init(MediaFormat format) {
    Log.d(TAG, "init");

    boolean playing = false;

    if(isInitialized()) {
        if(!checkIfReinitializationRequired(format)) {
            // Set new format that equals the old one (in case we compare references somewhere)
            mAudioFormat = format;
            return;
        }

        playing = isPlaying();
        pause();
        stopAndRelease(false);
    } else {
        // deferred creation of the audio thread until its first use
        mAudioThread = new AudioThread();
        mAudioThread.setPaused(true);
        mAudioThread.start();
    }

    mAudioFormat = format;

    int channelCount = format.getInteger(MediaFormat.KEY_CHANNEL_COUNT);
    int bytesPerSample = 2;
    mFrameSize = bytesPerSample * channelCount;
    mSampleRate = format.getInteger(MediaFormat.KEY_SAMPLE_RATE);

    int channelConfig = AudioFormat.CHANNEL_OUT_DEFAULT;
    switch(channelCount) {
        case 1:
            channelConfig = AudioFormat.CHANNEL_OUT_MONO;
            break;
        case 2:
            channelConfig = AudioFormat.CHANNEL_OUT_STEREO;
            break;
        case 4:
            channelConfig = AudioFormat.CHANNEL_OUT_QUAD;
            break;
        case 6:
            channelConfig = AudioFormat.CHANNEL_OUT_5POINT1;
            break;
        case 8:
            channelConfig = AudioFormat.CHANNEL_OUT_7POINT1;
    }

    mPlaybackBufferSize = mFrameChunkSize * channelCount;

    mAudioTrack = new AudioTrack(
            mAudioStreamType,
            mSampleRate,
            channelConfig,
            AudioFormat.ENCODING_PCM_16BIT,
            mPlaybackBufferSize, // at least twice the size to enable double buffering (according to docs)
            AudioTrack.MODE_STREAM, mAudioSessionId);

    if(mAudioTrack.getState() != AudioTrack.STATE_INITIALIZED) {
        stopAndRelease();
        throw new IllegalStateException("audio track init failed");
    }

    mAudioSessionId = mAudioTrack.getAudioSessionId();
    mAudioStreamType = mAudioTrack.getStreamType();
    setStereoVolume(mVolumeLeft, mVolumeRight);
    mPresentationTimeOffsetUs = PTS_NOT_SET;

    if(playing) {
        play();
    }
}
 
Example 19
Source File: MediaCodecVideoDecoder.java    From webrtc_android with MIT License 4 votes vote down vote up
@CalledByNativeUnchecked
private   DecodedOutputBuffer dequeueOutputBuffer(int dequeueTimeoutMs) {
  checkOnMediaCodecThread();
  if (decodeStartTimeMs.isEmpty()) {
    return null;
  }
  // Drain the decoder until receiving a decoded buffer or hitting
  // MediaCodec.INFO_TRY_AGAIN_LATER.
  final MediaCodec.BufferInfo info = new MediaCodec.BufferInfo();
  while (true) {
    final int result =
        mediaCodec.dequeueOutputBuffer(info, TimeUnit.MILLISECONDS.toMicros(dequeueTimeoutMs));
    switch (result) {
      case MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED:
        outputBuffers = mediaCodec.getOutputBuffers();
        Logging.d(TAG, "Decoder output buffers changed: " + outputBuffers.length);
        if (hasDecodedFirstFrame) {
          throw new RuntimeException("Unexpected output buffer change event.");
        }
        break;
      case MediaCodec.INFO_OUTPUT_FORMAT_CHANGED:
        MediaFormat format = mediaCodec.getOutputFormat();
        Logging.d(TAG, "Decoder format changed: " + format.toString());
        final int newWidth;
        final int newHeight;
        if (format.containsKey(FORMAT_KEY_CROP_LEFT) && format.containsKey(FORMAT_KEY_CROP_RIGHT)
            && format.containsKey(FORMAT_KEY_CROP_BOTTOM)
            && format.containsKey(FORMAT_KEY_CROP_TOP)) {
          newWidth = 1 + format.getInteger(FORMAT_KEY_CROP_RIGHT)
              - format.getInteger(FORMAT_KEY_CROP_LEFT);
          newHeight = 1 + format.getInteger(FORMAT_KEY_CROP_BOTTOM)
              - format.getInteger(FORMAT_KEY_CROP_TOP);
        } else {
          newWidth = format.getInteger(MediaFormat.KEY_WIDTH);
          newHeight = format.getInteger(MediaFormat.KEY_HEIGHT);
        }
        if (hasDecodedFirstFrame && (newWidth != width || newHeight != height)) {
          throw new RuntimeException("Unexpected size change. Configured " + width + "*" + height
              + ". New " + newWidth + "*" + newHeight);
        }
        width = newWidth;
        height = newHeight;
        if (textureListener != null) {
          textureListener.setSize(width, height);
        }

        if (!useSurface() && format.containsKey(MediaFormat.KEY_COLOR_FORMAT)) {
          colorFormat = format.getInteger(MediaFormat.KEY_COLOR_FORMAT);
          Logging.d(TAG, "Color: 0x" + Integer.toHexString(colorFormat));
          if (!supportedColorList.contains(colorFormat)) {
            throw new IllegalStateException("Non supported color format: " + colorFormat);
          }
        }
        if (format.containsKey(FORMAT_KEY_STRIDE)) {
          stride = format.getInteger(FORMAT_KEY_STRIDE);
        }
        if (format.containsKey(FORMAT_KEY_SLICE_HEIGHT)) {
          sliceHeight = format.getInteger(FORMAT_KEY_SLICE_HEIGHT);
        }
        Logging.d(TAG, "Frame stride and slice height: " + stride + " x " + sliceHeight);
        stride = Math.max(width, stride);
        sliceHeight = Math.max(height, sliceHeight);
        break;
      case MediaCodec.INFO_TRY_AGAIN_LATER:
        return null;
      default:
        hasDecodedFirstFrame = true;
        TimeStamps timeStamps = decodeStartTimeMs.remove();
        long decodeTimeMs = SystemClock.elapsedRealtime() - timeStamps.decodeStartTimeMs;
        if (decodeTimeMs > MAX_DECODE_TIME_MS) {
          Logging.e(TAG, "Very high decode time: " + decodeTimeMs + "ms"
                  + ". Q size: " + decodeStartTimeMs.size()
                  + ". Might be caused by resuming H264 decoding after a pause.");
          decodeTimeMs = MAX_DECODE_TIME_MS;
        }
        return new DecodedOutputBuffer(result, info.offset, info.size,
            TimeUnit.MICROSECONDS.toMillis(info.presentationTimeUs), timeStamps.timeStampMs,
            timeStamps.ntpTimeStampMs, decodeTimeMs, SystemClock.elapsedRealtime());
    }
  }
}
 
Example 20
Source File: AudioPlayback.java    From MediaPlayer-Extended with Apache License 2.0 4 votes vote down vote up
private boolean checkIfReinitializationRequired(MediaFormat newFormat) {
    return mAudioFormat.getInteger(MediaFormat.KEY_CHANNEL_COUNT) != newFormat.getInteger(MediaFormat.KEY_CHANNEL_COUNT)
            || mAudioFormat.getInteger(MediaFormat.KEY_SAMPLE_RATE) != newFormat.getInteger(MediaFormat.KEY_SAMPLE_RATE)
            || !mAudioFormat.getString(MediaFormat.KEY_MIME).equals(newFormat.getString(MediaFormat.KEY_MIME));
}