org.jcodec.scale.AWTUtil Java Examples

The following examples show how to use org.jcodec.scale.AWTUtil. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: H264FrameEncoder.java    From amazon-kinesis-video-streams-parser-library with Apache License 2.0 6 votes vote down vote up
public EncodedFrame encodeFrame(final BufferedImage bi) {

        // Perform conversion from buffered image to pic
        out.clear();
        toEncode = AWTUtil.fromBufferedImage(bi, ColorSpace.YUV420J);

        // First frame is treated as I Frame (IDR Frame)
        final SliceType sliceType = this.frameNumber == 0 ? SliceType.I : SliceType.P;
        log.debug("Encoding frame no: {}, frame type : {}", frameNumber, sliceType);

        final boolean idr = this.frameNumber == 0;

        // Encode image into H.264 frame, the result is stored in 'out' buffer
        final ByteBuffer data = encoder.doEncodeFrame(toEncode, out, idr, this.frameNumber++, sliceType);
        return EncodedFrame.builder()
                .byteBuffer(data)
                .isKeyFrame(idr)
                .cpd(ByteBuffer.wrap(cpd))
                .build();
    }
 
Example #2
Source File: H264FrameDecoder.java    From amazon-kinesis-video-streams-parser-library with Apache License 2.0 5 votes vote down vote up
public BufferedImage decodeH264Frame(final Frame frame, final MkvTrackMetadata trackMetadata) {
    final ByteBuffer frameBuffer = frame.getFrameData();
    final int pixelWidth = trackMetadata.getPixelWidth().get().intValue();
    final int pixelHeight = trackMetadata.getPixelHeight().get().intValue();
    codecPrivateData = trackMetadata.getCodecPrivateData().array();
    log.debug("Decoding frames ... ");
    // Read the bytes that appear to comprise the header
    // See: https://www.matroska.org/technical/specs/index.html#simpleblock_structure

    final Picture rgb = Picture.create(pixelWidth, pixelHeight, ColorSpace.RGB);
    final BufferedImage bufferedImage = new BufferedImage(pixelWidth, pixelHeight, BufferedImage.TYPE_3BYTE_BGR);
    final AvcCBox avcC = AvcCBox.parseAvcCBox(ByteBuffer.wrap(codecPrivateData));

    decoder.addSps(avcC.getSpsList());
    decoder.addPps(avcC.getPpsList());

    final Picture buf = Picture.create(pixelWidth + ((16 - (pixelWidth % 16)) % 16),
            pixelHeight + ((16 - (pixelHeight % 16)) % 16), ColorSpace.YUV420J);
    final List<ByteBuffer> byteBuffers = splitMOVPacket(frameBuffer, avcC);
    final Picture pic = decoder.decodeFrameFromNals(byteBuffers, buf.getData());

    if (pic != null) {
        // Work around for color issues in JCodec
        // https://github.com/jcodec/jcodec/issues/59
        // https://github.com/jcodec/jcodec/issues/192
        final byte[][] dataTemp = new byte[3][pic.getData().length];
        dataTemp[0] = pic.getPlaneData(0);
        dataTemp[1] = pic.getPlaneData(2);
        dataTemp[2] = pic.getPlaneData(1);

        final Picture tmpBuf = Picture.createPicture(pixelWidth, pixelHeight, dataTemp, ColorSpace.YUV420J);
        transform.transform(tmpBuf, rgb);
        AWTUtil.toBufferedImage(rgb, bufferedImage);
        frameCount++;
    }
    return bufferedImage;
}
 
Example #3
Source File: OpenViduTestAppE2eTest.java    From openvidu with Apache License 2.0 5 votes vote down vote up
private boolean recordedFileFine(File file, Recording recording) throws IOException {
	this.checkMultimediaFile(file, recording.hasAudio(), recording.hasVideo(), recording.getDuration(),
			recording.getResolution(), "aac", "h264", true);

	boolean isFine = false;
	Picture frame;
	try {
		// Get a frame at 75% duration and check that it has the expected color
		frame = FrameGrab.getFrameAtSec(file, (double) (recording.getDuration() * 0.75));
		BufferedImage image = AWTUtil.toBufferedImage(frame);
		Map<String, Long> colorMap = this.averageColor(image);

		String realResolution = image.getWidth() + "x" + image.getHeight();
		Assert.assertEquals(
				"Resolution (" + recording.getResolution()
						+ ") of recording entity is not equal to real video resolution (" + realResolution + ")",
				recording.getResolution(), realResolution);

		log.info("Recording map color: {}", colorMap.toString());
		log.info("Recording frame below");
		System.out.println(bufferedImageToBase64PngString(image));
		isFine = this.checkVideoAverageRgbGreen(colorMap);
	} catch (IOException | JCodecException e) {
		log.warn("Error getting frame from video recording: {}", e.getMessage());
		isFine = false;
	}
	return isFine;
}
 
Example #4
Source File: FrameRendererVisitor.java    From amazon-kinesis-video-streams-parser-library with Apache License 2.0 4 votes vote down vote up
@Override
public void visit(final MkvDataElement dataElement) throws MkvElementVisitException {
    log.info("Got data element: {}", dataElement.getElementMetaData().getTypeInfo().getName());
    final String dataElementName = dataElement.getElementMetaData().getTypeInfo().getName();

    if ("SimpleBlock".equals(dataElementName)) {
        final MkvValue<Frame> frame = dataElement.getValueCopy();
        final ByteBuffer frameBuffer = frame.getVal().getFrameData();
        final MkvTrackMetadata trackMetadata = fragmentMetadataVisitor.getMkvTrackMetadata(
                frame.getVal().getTrackNumber());
        final int pixelWidth = trackMetadata.getPixelWidth().get().intValue();
        final int pixelHeight = trackMetadata.getPixelHeight().get().intValue();
        codecPrivateData = trackMetadata.getCodecPrivateData().array();
        log.debug("Decoding frames ... ");
        // Read the bytes that appear to comprise the header
        // See: https://www.matroska.org/technical/specs/index.html#simpleblock_structure

        final Picture rgb = Picture.create(pixelWidth, pixelHeight, ColorSpace.RGB);
        final BufferedImage renderImage = new BufferedImage(
                pixelWidth, pixelHeight, BufferedImage.TYPE_3BYTE_BGR);
        final AvcCBox avcC = AvcCBox.parseAvcCBox(ByteBuffer.wrap(codecPrivateData));

        decoder.addSps(avcC.getSpsList());
        decoder.addPps(avcC.getPpsList());

        final Picture buf = Picture.create(pixelWidth + ((16 - (pixelWidth % 16)) % 16),
                pixelHeight + ((16 - (pixelHeight % 16)) % 16), ColorSpace.YUV420J);
        final List<ByteBuffer> byteBuffers = splitMOVPacket(frameBuffer, avcC);
        final Picture pic = decoder.decodeFrameFromNals(byteBuffers, buf.getData());

        if (pic != null) {
            // Work around for color issues in JCodec
            // https://github.com/jcodec/jcodec/issues/59
            // https://github.com/jcodec/jcodec/issues/192
            final byte[][] dataTemp = new byte[3][pic.getData().length];
            dataTemp[0] = pic.getPlaneData(0);
            dataTemp[1] = pic.getPlaneData(2);
            dataTemp[2] = pic.getPlaneData(1);

            final Picture tmpBuf = Picture.createPicture(pixelWidth, pixelHeight, dataTemp, ColorSpace.YUV420J);
            transform.transform(tmpBuf, rgb);
            AWTUtil.toBufferedImage(rgb, renderImage);
            kinesisVideoFrameViewer.update(renderImage);
            frameCount++;
        }
    }
}