org.jcodec.common.model.ColorSpace Java Examples

The following examples show how to use org.jcodec.common.model.ColorSpace. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: SequenceEncoderMp4.java    From ImageToVideo with Apache License 2.0 6 votes vote down vote up
public SequenceEncoderMp4(File out)
        throws IOException
{
    super(out);
    this.ch = NIOUtils.writableFileChannel(out);

    // Muxer that will store the encoded frames
    muxer = new MP4Muxer(ch, Brand.MP4);

    // Add video track to muxer
    outTrack = muxer.addTrack(TrackType.VIDEO, timeScale);

    // Allocate a buffer big enough to hold output frames
    _out = ByteBuffer.allocate(1920 * 1080 * 6);

    // Create an instance of encoder
    encoder = new H264Encoder();

    // Transform to convert between RGB and YUV
    transform = ColorUtil.getTransform(ColorSpace.RGB, encoder.getSupportedColorSpaces()[0]);

    // Encoder extra data ( SPS, PPS ) to be stored in a special place of
    // MP4
    spsList = new ArrayList<ByteBuffer>();
    ppsList = new ArrayList<ByteBuffer>();
}
 
Example #2
Source File: H264FrameEncoder.java    From amazon-kinesis-video-streams-parser-library with Apache License 2.0 6 votes vote down vote up
public EncodedFrame encodeFrame(final BufferedImage bi) {

        // Perform conversion from buffered image to pic
        out.clear();
        toEncode = AWTUtil.fromBufferedImage(bi, ColorSpace.YUV420J);

        // First frame is treated as I Frame (IDR Frame)
        final SliceType sliceType = this.frameNumber == 0 ? SliceType.I : SliceType.P;
        log.debug("Encoding frame no: {}, frame type : {}", frameNumber, sliceType);

        final boolean idr = this.frameNumber == 0;

        // Encode image into H.264 frame, the result is stored in 'out' buffer
        final ByteBuffer data = encoder.doEncodeFrame(toEncode, out, idr, this.frameNumber++, sliceType);
        return EncodedFrame.builder()
                .byteBuffer(data)
                .isKeyFrame(idr)
                .cpd(ByteBuffer.wrap(cpd))
                .build();
    }
 
Example #3
Source File: PictureUtil.java    From cineast with MIT License 6 votes vote down vote up
public static int[] toColorArray(Picture src){
	if (src.getColor() != ColorSpace.RGB) {
           Transform transform = ColorUtil.getTransform(src.getColor(), ColorSpace.RGB);
           Picture rgb = Picture.create(src.getWidth(), src.getHeight(), ColorSpace.RGB, src.getCrop());
           transform.transform(src, rgb);
           src = rgb;
       }
	
	int[] _return = new int[src.getCroppedWidth() * src.getCroppedHeight()];
	
	int[] data = src.getPlaneData(0);
	
	for(int i = 0; i < _return.length; ++i){
		_return[i] = ReadableRGBContainer.toIntColor(data[3*i + 2], data[3*i + 1], data[3*i]);
	}
	
	return _return;
}
 
Example #4
Source File: ImageToH264MP4Encoder.java    From CameraV with GNU General Public License v3.0 6 votes vote down vote up
public ImageToH264MP4Encoder(SeekableByteChannel ch, AudioFormat af) throws IOException {
    this.ch = ch;
    this.af = af;
    // Muxer that will store the encoded frames
    muxer = new MP4Muxer(ch, Brand.MP4);

    // Add video track to muxer
    outTrack = muxer.addTrack(TrackType.VIDEO, 25);

    // Create an instance of encoder
    encoder = new H264Encoder();

    // Transform to convert between RGB and YUV
    transform = ColorUtil.getTransform(ColorSpace.RGB, encoder.getSupportedColorSpaces()[0]);

    // Encoder extra data ( SPS, PPS ) to be stored in a special place of
    // MP4
    spsList = new ArrayList<ByteBuffer>();
    ppsList = new ArrayList<ByteBuffer>();


    if (af != null)
    	audioTrack = muxer.addPCMAudioTrack(af);
}
 
Example #5
Source File: H264FrameDecoder.java    From amazon-kinesis-video-streams-parser-library with Apache License 2.0 5 votes vote down vote up
public BufferedImage decodeH264Frame(final Frame frame, final MkvTrackMetadata trackMetadata) {
    final ByteBuffer frameBuffer = frame.getFrameData();
    final int pixelWidth = trackMetadata.getPixelWidth().get().intValue();
    final int pixelHeight = trackMetadata.getPixelHeight().get().intValue();
    codecPrivateData = trackMetadata.getCodecPrivateData().array();
    log.debug("Decoding frames ... ");
    // Read the bytes that appear to comprise the header
    // See: https://www.matroska.org/technical/specs/index.html#simpleblock_structure

    final Picture rgb = Picture.create(pixelWidth, pixelHeight, ColorSpace.RGB);
    final BufferedImage bufferedImage = new BufferedImage(pixelWidth, pixelHeight, BufferedImage.TYPE_3BYTE_BGR);
    final AvcCBox avcC = AvcCBox.parseAvcCBox(ByteBuffer.wrap(codecPrivateData));

    decoder.addSps(avcC.getSpsList());
    decoder.addPps(avcC.getPpsList());

    final Picture buf = Picture.create(pixelWidth + ((16 - (pixelWidth % 16)) % 16),
            pixelHeight + ((16 - (pixelHeight % 16)) % 16), ColorSpace.YUV420J);
    final List<ByteBuffer> byteBuffers = splitMOVPacket(frameBuffer, avcC);
    final Picture pic = decoder.decodeFrameFromNals(byteBuffers, buf.getData());

    if (pic != null) {
        // Work around for color issues in JCodec
        // https://github.com/jcodec/jcodec/issues/59
        // https://github.com/jcodec/jcodec/issues/192
        final byte[][] dataTemp = new byte[3][pic.getData().length];
        dataTemp[0] = pic.getPlaneData(0);
        dataTemp[1] = pic.getPlaneData(2);
        dataTemp[2] = pic.getPlaneData(1);

        final Picture tmpBuf = Picture.createPicture(pixelWidth, pixelHeight, dataTemp, ColorSpace.YUV420J);
        transform.transform(tmpBuf, rgb);
        AWTUtil.toBufferedImage(rgb, bufferedImage);
        frameCount++;
    }
    return bufferedImage;
}
 
Example #6
Source File: FrameRendererVisitor.java    From amazon-kinesis-video-streams-parser-library with Apache License 2.0 4 votes vote down vote up
@Override
public void visit(final MkvDataElement dataElement) throws MkvElementVisitException {
    log.info("Got data element: {}", dataElement.getElementMetaData().getTypeInfo().getName());
    final String dataElementName = dataElement.getElementMetaData().getTypeInfo().getName();

    if ("SimpleBlock".equals(dataElementName)) {
        final MkvValue<Frame> frame = dataElement.getValueCopy();
        final ByteBuffer frameBuffer = frame.getVal().getFrameData();
        final MkvTrackMetadata trackMetadata = fragmentMetadataVisitor.getMkvTrackMetadata(
                frame.getVal().getTrackNumber());
        final int pixelWidth = trackMetadata.getPixelWidth().get().intValue();
        final int pixelHeight = trackMetadata.getPixelHeight().get().intValue();
        codecPrivateData = trackMetadata.getCodecPrivateData().array();
        log.debug("Decoding frames ... ");
        // Read the bytes that appear to comprise the header
        // See: https://www.matroska.org/technical/specs/index.html#simpleblock_structure

        final Picture rgb = Picture.create(pixelWidth, pixelHeight, ColorSpace.RGB);
        final BufferedImage renderImage = new BufferedImage(
                pixelWidth, pixelHeight, BufferedImage.TYPE_3BYTE_BGR);
        final AvcCBox avcC = AvcCBox.parseAvcCBox(ByteBuffer.wrap(codecPrivateData));

        decoder.addSps(avcC.getSpsList());
        decoder.addPps(avcC.getPpsList());

        final Picture buf = Picture.create(pixelWidth + ((16 - (pixelWidth % 16)) % 16),
                pixelHeight + ((16 - (pixelHeight % 16)) % 16), ColorSpace.YUV420J);
        final List<ByteBuffer> byteBuffers = splitMOVPacket(frameBuffer, avcC);
        final Picture pic = decoder.decodeFrameFromNals(byteBuffers, buf.getData());

        if (pic != null) {
            // Work around for color issues in JCodec
            // https://github.com/jcodec/jcodec/issues/59
            // https://github.com/jcodec/jcodec/issues/192
            final byte[][] dataTemp = new byte[3][pic.getData().length];
            dataTemp[0] = pic.getPlaneData(0);
            dataTemp[1] = pic.getPlaneData(2);
            dataTemp[2] = pic.getPlaneData(1);

            final Picture tmpBuf = Picture.createPicture(pixelWidth, pixelHeight, dataTemp, ColorSpace.YUV420J);
            transform.transform(tmpBuf, rgb);
            AWTUtil.toBufferedImage(rgb, renderImage);
            kinesisVideoFrameViewer.update(renderImage);
            frameCount++;
        }
    }
}
 
Example #7
Source File: YUVtoWebmMuxer.java    From CameraV with GNU General Public License v3.0 3 votes vote down vote up
public void encodeNativeFrame(ByteBuffer data, int width, int height, int frameIdx) throws IOException {
	
    Picture yuv = Picture.create(width, height, ColorSpace.YUV420);    
    
    ByteBuffer ff = encoder.encodeFrame(yuv, data);
    Packet packet = new Packet(ff, frameIdx, 1, 1, frameIdx, true, null);
    muxer.addFrame(packet);


}