Java Code Examples for android.graphics.Bitmap#copyPixelsFromBuffer()

The following examples show how to use android.graphics.Bitmap#copyPixelsFromBuffer() . You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: OpenGlUtil.java    From SimpleVideoEditor with Apache License 2.0 6 votes vote down vote up
public static void captureImage(int width, int height) throws InterruptedException {
    final Semaphore waiter = new Semaphore(0);

    // Take picture on OpenGL thread
    final int[] pixelMirroredArray = new int[width * height];
    final IntBuffer pixelBuffer = IntBuffer.allocate(width * height);
    GLES20.glReadPixels(0, 0, width, height, GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, pixelBuffer);
    int[] pixelArray = pixelBuffer.array();

    // Convert upside down mirror-reversed image to right-side up normal image.
    for (int i = 0; i < height; i++) {
        for (int j = 0; j < width; j++) {
            pixelMirroredArray[(height - i - 1) * width + j] = pixelArray[i * width + j];
        }
    }
    waiter.release();
    waiter.acquire();

    Bitmap bitmap = Bitmap.createBitmap(width, height, Bitmap.Config.ARGB_8888);
    bitmap.copyPixelsFromBuffer(IntBuffer.wrap(pixelMirroredArray));
    saveBitmap(bitmap, new File(Environment.getExternalStorageDirectory(), "videoeditor/tmp.png"));
}
 
Example 2
Source File: MovieSegment.java    From PhotoMovie with Apache License 2.0 6 votes vote down vote up
public Bitmap captureBitmap() throws OutOfMemoryError {
    int width = (int) mViewportRect.width();
    int height = (int) mViewportRect.height();

    final IntBuffer pixelBuffer = IntBuffer.allocate(width * height);
    GLES20.glReadPixels(0, 0, width, height, GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, pixelBuffer);
    int[] pixelArray = pixelBuffer.array();
    final int[] pixelMirroredArray = new int[width * height];

    // Convert upside down mirror-reversed image to right-side up normal image.
    for (int i = 0; i < height; i++) {
        for (int j = 0; j < width; j++) {
            pixelMirroredArray[(height - i - 1) * width + j] = pixelArray[i * width + j];
        }
    }

    Bitmap bitmap = Bitmap.createBitmap(width, height, Bitmap.Config.ARGB_8888);
    bitmap.copyPixelsFromBuffer(IntBuffer.wrap(pixelMirroredArray));
    return bitmap;
}
 
Example 3
Source File: LanSongFileUtil.java    From WeiXinRecordedDemo with MIT License 5 votes vote down vote up
public static String saveIntBuffer(IntBuffer buffer,int width,int height)
{
    Bitmap bmp = Bitmap.createBitmap(width,height,Bitmap.Config.ARGB_8888);
    buffer.position(0);
    bmp.copyPixelsFromBuffer(buffer);
    return saveBitmap(bmp);
}
 
Example 4
Source File: MadgeCanvas.java    From madge with Apache License 2.0 5 votes vote down vote up
public MadgeCanvas(Context context) {
  super(false);
  setColor(DEFAULT_COLOR);

  // Create a 2x2 checkerboard alpha pattern in a shader.
  ByteBuffer buffer = ByteBuffer.allocateDirect(4);
  buffer.put(0, (byte) 0xFF);
  buffer.put(1, (byte) 0x00);
  buffer.put(2, (byte) 0x00);
  buffer.put(3, (byte) 0xFF);
  Bitmap checker = Bitmap.createBitmap(2, 2, ALPHA_8);
  checker.copyPixelsFromBuffer(buffer);
  BitmapShader shader = new BitmapShader(checker, REPEAT, REPEAT);
  checkerboardPaint.setShader(shader);

  DisplayMetrics displayMetrics = context.getResources().getDisplayMetrics();
  float scaleValueTextSize = TEXT_SIZE_DP * displayMetrics.density;
  scaleValueOffset = scaleValueTextSize / 2;

  scaleValuePaintFill.setTextAlign(CENTER);
  scaleValuePaintFill.setStyle(FILL);
  scaleValuePaintFill.setTextSize(scaleValueTextSize);

  scaleValuePaintStroke.setTextAlign(CENTER);
  scaleValuePaintStroke.setStyle(STROKE);
  scaleValuePaintStroke.setStrokeWidth(scaleValueTextSize * 0.10f); // 10% stroke.
  scaleValuePaintStroke.setTextSize(scaleValueTextSize);
  scaleValuePaintStroke.setAlpha(0x66); // 40% opacity.
}
 
Example 5
Source File: ScreenCapturer.java    From habpanelviewer with GNU General Public License v3.0 5 votes vote down vote up
public synchronized Bitmap captureScreen() throws IllegalStateException {
    AtomicReference<Image> imageHolder = new AtomicReference<>();
    final CountDownLatch latch = new CountDownLatch(1);

    ImageReader mImageReader = ImageReader.newInstance(mWidth, mHeight, PixelFormat.RGBA_8888, 2);
    mImageReader.setOnImageAvailableListener(imageReader -> {
        imageHolder.set(mImageReader.acquireLatestImage());
        latch.countDown();
    }, mHandler);

    VirtualDisplay display = mProjection.createVirtualDisplay("screen-mirror", mWidth, mHeight, mDensity,
            DisplayManager.VIRTUAL_DISPLAY_FLAG_OWN_CONTENT_ONLY | DisplayManager.VIRTUAL_DISPLAY_FLAG_PUBLIC, mImageReader.getSurface(),
            null, null);

    try {
        latch.await(1, TimeUnit.SECONDS);

        if (latch.getCount() == 1) {
            throw new IllegalStateException("Screen capturing timed out");
        }

        final Image image = imageHolder.get();
        Image.Plane[] planes = image.getPlanes();
        ByteBuffer buffer = planes[0].getBuffer();
        int pixelStride = planes[0].getPixelStride();
        int rowStride = planes[0].getRowStride();
        int rowPadding = rowStride - pixelStride * mWidth;

        // create bitmap
        Bitmap bmp = Bitmap.createBitmap(mWidth + rowPadding / pixelStride, mHeight, Bitmap.Config.ARGB_8888);
        bmp.copyPixelsFromBuffer(buffer);
        image.close();

        return bmp;
    } catch (InterruptedException e) {
        throw new IllegalStateException("Got interrupt while capturing screen");
    } finally {
        display.release();
    }
}
 
Example 6
Source File: ScreenCaptureSocket.java    From pc-android-controller-android with Apache License 2.0 5 votes vote down vote up
@Override
protected Bitmap doInBackground(Image... params) {

    if (params == null || params.length < 1 || params[0] == null) {

        L.e(" params is null ...");
        return null;
    }

    Image image = params[0];

    int width = image.getWidth();
    int height = image.getHeight();
    final Image.Plane[] planes = image.getPlanes();
    final ByteBuffer buffer = planes[0].getBuffer();
    //每个像素的间距
    int pixelStride = planes[0].getPixelStride();
    //总的间距
    int rowStride = planes[0].getRowStride();
    image.close();
    int rowPadding = rowStride - pixelStride * width;
    Bitmap bitmap = Bitmap.createBitmap(width + rowPadding / pixelStride, height, Bitmap.Config.ARGB_8888);
    bitmap.copyPixelsFromBuffer(buffer);
    bitmap = Bitmap.createBitmap(bitmap, 0, 0, width, height);
    image.close();

    compressAndWrite(bitmap);

    return null;
}
 
Example 7
Source File: PhotoFilterView.java    From TelePlus-Android with GNU General Public License v2.0 5 votes vote down vote up
private Bitmap getRenderBufferBitmap() {
    ByteBuffer buffer = ByteBuffer.allocateDirect(renderBufferWidth * renderBufferHeight * 4);
    GLES20.glReadPixels(0, 0, renderBufferWidth, renderBufferHeight, GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, buffer);
    Bitmap bitmap = Bitmap.createBitmap(renderBufferWidth, renderBufferHeight, Bitmap.Config.ARGB_8888);
    bitmap.copyPixelsFromBuffer(buffer);
    return bitmap;
}
 
Example 8
Source File: BitmapOutput.java    From EZFilter with MIT License 5 votes vote down vote up
@Override
    public void bufferOutput(IntBuffer buffer) {
        int width = getWidth();
        int height = getHeight();
        if (width <= 0 || height <= 0) {
            if (mCallback != null) {
                mCallback.bitmapOutput(null);
            }
            return;
        }

        try {
            int[] pixels = buffer.array();

            // 方案一,使用copyPixelsFromBuffer,是方案二速度的2倍以上
            Bitmap bitmap = Bitmap.createBitmap(width, height, mConfig);
            bitmap.copyPixelsFromBuffer(IntBuffer.wrap(pixels));

            // 方案二,手动转换像素数组
//            for (int i = 0; i < pixels.length; i++) {
//                // glReadPixels设置GLES20.GL_RGBA时,读取出来格式为ABGR,要转换为Bitmap需要的ARGB,同时设置Alpha值为1
//                pixels[i] = (0xFF000000)
//                        | ((pixels[i] << 16) & 0x00FF0000)
//                        | (pixels[i] & (0xFF00FF00))
//                        | ((pixels[i] >> 16) & 0x000000FF);
//            }
//            Bitmap bitmap = Bitmap.createBitmap(pixels, width, height, mConfig);
            if (mCallback != null) {
                mCallback.bitmapOutput(bitmap);
            }
        } catch (OutOfMemoryError e) {
            e.printStackTrace();
            if (mCallback != null) {
                mCallback.bitmapOutput(null);
            }
        }
    }
 
Example 9
Source File: DefaultProjector.java    From DeviceConnect-Android with MIT License 5 votes vote down vote up
private void reverse(final Bitmap b) {
    int width = b.getWidth();
    int height = b.getHeight();
    IntBuffer buf = IntBuffer.allocate(width * height);
    IntBuffer tmp = IntBuffer.allocate(width * height);
    b.copyPixelsToBuffer(buf);
    for (int i = 0; i < height; i++) {
        for (int j = 0; j < width; j++) {
            tmp.put((height - i - 1) * width + j, buf.get(i * width + j));
        }
    }
    b.copyPixelsFromBuffer(tmp);
    buf.clear();
}
 
Example 10
Source File: Painting.java    From TelePlus-Android with GNU General Public License v2.0 4 votes vote down vote up
public PaintingData getPaintingData(RectF rect, boolean undo) {
    int minX = (int) rect.left;
    int minY = (int) rect.top;
    int width = (int) rect.width();
    int height = (int) rect.height();

    GLES20.glGenFramebuffers(1, buffers, 0);
    int framebuffer = buffers[0];
    GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, framebuffer);

    GLES20.glGenTextures(1, buffers, 0);
    int texture = buffers[0];

    GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, texture);
    GLES20.glTexParameteri(GL10.GL_TEXTURE_2D, GL10.GL_TEXTURE_MIN_FILTER, GL10.GL_LINEAR);
    GLES20.glTexParameteri(GL10.GL_TEXTURE_2D, GL10.GL_TEXTURE_MAG_FILTER, GL10.GL_LINEAR);
    GLES20.glTexParameteri(GL10.GL_TEXTURE_2D, GL10.GL_TEXTURE_WRAP_S, GL10.GL_CLAMP_TO_EDGE);
    GLES20.glTexParameteri(GL10.GL_TEXTURE_2D, GL10.GL_TEXTURE_WRAP_T, GL10.GL_CLAMP_TO_EDGE);
    GLES20.glTexImage2D(GLES20.GL_TEXTURE_2D, 0, GLES20.GL_RGBA, width, height, 0, GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, null);

    GLES20.glFramebufferTexture2D(GLES20.GL_FRAMEBUFFER, GLES20.GL_COLOR_ATTACHMENT0, GLES20.GL_TEXTURE_2D, texture, 0);

    GLES20.glViewport(0, 0, (int) size.width, (int) size.height);

    if (shaders == null) {
        return null;
    }
    Shader shader = shaders.get(undo ? "nonPremultipliedBlit" : "blit");
    if (shader == null) {
        return null;
    }
    GLES20.glUseProgram(shader.program);

    Matrix translate = new Matrix();
    translate.preTranslate(-minX, -minY);
    float effective[] = GLMatrix.LoadGraphicsMatrix(translate);
    float finalProjection[] = GLMatrix.MultiplyMat4f(projection, effective);

    GLES20.glUniformMatrix4fv(shader.getUniform("mvpMatrix"), 1, false, FloatBuffer.wrap(finalProjection));

    if (undo) {
        GLES20.glUniform1i(shader.getUniform("texture"), 0);

        GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
        GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, getTexture());
    } else {
        GLES20.glUniform1i(shader.getUniform("texture"), 0);

        GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
        GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, bitmapTexture.texture());

        GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
        GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, getTexture());
    }
    GLES20.glClearColor(0, 0, 0, 0);
    GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);

    GLES20.glBlendFunc(GLES20.GL_ONE, GLES20.GL_ONE_MINUS_SRC_ALPHA);

    GLES20.glVertexAttribPointer(0, 2, GLES20.GL_FLOAT, false, 8, vertexBuffer);
    GLES20.glEnableVertexAttribArray(0);
    GLES20.glVertexAttribPointer(1, 2, GLES20.GL_FLOAT, false, 8, textureBuffer);
    GLES20.glEnableVertexAttribArray(1);

    GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4);

    dataBuffer.limit(width * height * 4);
    GLES20.glReadPixels(0, 0, width, height, GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, dataBuffer);

    PaintingData data;
    if (undo) {
        data = new PaintingData(null, dataBuffer);
    } else {
        Bitmap bitmap = Bitmap.createBitmap(width, height, Bitmap.Config.ARGB_8888);
        bitmap.copyPixelsFromBuffer(dataBuffer);

        data = new PaintingData(bitmap, null);
    }

    buffers[0] = framebuffer;
    GLES20.glDeleteFramebuffers(1, buffers, 0);

    buffers[0] = texture;
    GLES20.glDeleteTextures(1, buffers, 0);

    return data;
}
 
Example 11
Source File: ExtractMpegFramesTest.java    From Android-MediaCodec-Examples with Apache License 2.0 4 votes vote down vote up
/**
 * Saves the current frame to disk as a PNG image.
 */
public void saveFrame(String filename) throws IOException {
    // glReadPixels gives us a ByteBuffer filled with what is essentially big-endian RGBA
    // data (i.e. a byte of red, followed by a byte of green...).  To use the Bitmap
    // constructor that takes an int[] array with pixel data, we need an int[] filled
    // with little-endian ARGB data.
    //
    // If we implement this as a series of buf.get() calls, we can spend 2.5 seconds just
    // copying data around for a 720p frame.  It's better to do a bulk get() and then
    // rearrange the data in memory.  (For comparison, the PNG compress takes about 500ms
    // for a trivial frame.)
    //
    // So... we set the ByteBuffer to little-endian, which should turn the bulk IntBuffer
    // get() into a straight memcpy on most Android devices.  Our ints will hold ABGR data.
    // Swapping B and R gives us ARGB.  We need about 30ms for the bulk get(), and another
    // 270ms for the color swap.
    //
    // We can avoid the costly B/R swap here if we do it in the fragment shader (see
    // http://stackoverflow.com/questions/21634450/ ).
    //
    // Having said all that... it turns out that the Bitmap#copyPixelsFromBuffer()
    // method wants RGBA pixels, not ARGB, so if we create an empty bitmap and then
    // copy pixel data in we can avoid the swap issue entirely, and just copy straight
    // into the Bitmap from the ByteBuffer.
    //
    // Making this even more interesting is the upside-down nature of GL, which means
    // our output will look upside-down relative to what appears on screen if the
    // typical GL conventions are used.  (For ExtractMpegFrameTest, we avoid the issue
    // by inverting the frame when we render it.)
    //
    // Allocating large buffers is expensive, so we really want mPixelBuf to be
    // allocated ahead of time if possible.  We still get some allocations from the
    // Bitmap / PNG creation.

    mPixelBuf.rewind();
    GLES20.glReadPixels(0, 0, mWidth, mHeight, GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE,
        mPixelBuf);

    BufferedOutputStream bos = null;
    try {
        bos = new BufferedOutputStream(new FileOutputStream(filename));
        Bitmap bmp = Bitmap.createBitmap(mWidth, mHeight, Bitmap.Config.ARGB_8888);
        mPixelBuf.rewind();
        bmp.copyPixelsFromBuffer(mPixelBuf);
        bmp.compress(Bitmap.CompressFormat.PNG, 90, bos);
        bmp.recycle();
    } finally {
        if (bos != null) bos.close();
    }
    if (VERBOSE) {
        Log.d(TAG, "Saved " + mWidth + "x" + mHeight + " frame as '" + filename + "'");
    }
}
 
Example 12
Source File: EglSurfaceBase.java    From MockCamera with Apache License 2.0 4 votes vote down vote up
/**
 * Saves the EGL surface to a file.
 * <p/>
 * Expects that this object's EGL surface is current.
 */
public void saveFrame(File file) throws IOException {
    if (!eglCore.isCurrent(eGLSurface)) {
        throw new RuntimeException("Expected EGL context/surface is not current");
    }

    // glReadPixels fills in a "direct" ByteBuffer with what is essentially big-endian RGBA
    // data (i.e. a byte of red, followed by a byte of green...).  While the Bitmap
    // constructor that takes an int[] wants little-endian ARGB (blue/red swapped), the
    // Bitmap "copy pixels" method wants the same format GL provides.
    //
    // Ideally we'd have some way to re-use the ByteBuffer, especially if we're calling
    // here often.
    //
    // Making this even more interesting is the upside-down nature of GL, which means
    // our output will look upside down relative to what appears on screen if the
    // typical GL conventions are used.

    String filename = file.toString();

    int width = getWidth();
    int height = getHeight();
    ByteBuffer buf = ByteBuffer.allocateDirect(width * height * 4);
    buf.order(ByteOrder.LITTLE_ENDIAN);
    GLES20.glReadPixels(0, 0, width, height,
            GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, buf);
    GlUtil.checkGlError("glReadPixels");
    buf.rewind();

    BufferedOutputStream bos = null;
    try {
        bos = new BufferedOutputStream(new FileOutputStream(filename));
        Bitmap bmp = Bitmap.createBitmap(width, height, Bitmap.Config.ARGB_8888);
        bmp.copyPixelsFromBuffer(buf);
        bmp.compress(Bitmap.CompressFormat.PNG, 90, bos);
        bmp.recycle();
    } finally {
        if (bos != null) bos.close();
    }
    Log.i(TAG, "Saved " + width + "x" + height + " frame as '" + filename + "'");
}
 
Example 13
Source File: MagicCameraView.java    From TikTok with Apache License 2.0 4 votes vote down vote up
private Bitmap drawPhoto(Bitmap bitmap, boolean isRotated){
    int width = bitmap.getWidth();
    int height = bitmap.getHeight();
    int[] mFrameBuffers = new int[1];
    int[] mFrameBufferTextures = new int[1];
    if(beautyFilter == null)
        beautyFilter = new MagicBeautyFilter();
    beautyFilter.init();
    beautyFilter.onDisplaySizeChanged(width, height);
    beautyFilter.onInputSizeChanged(width, height);

    if(filter != null) {
        filter.onInputSizeChanged(width, height);
        filter.onDisplaySizeChanged(width, height);
    }
    GLES20.glGenFramebuffers(1, mFrameBuffers, 0);
    GLES20.glGenTextures(1, mFrameBufferTextures, 0);
    GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, mFrameBufferTextures[0]);
    GLES20.glTexImage2D(GLES20.GL_TEXTURE_2D, 0, GLES20.GL_RGBA, width, height, 0,
            GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, null);
    GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D,
            GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_LINEAR);
    GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D,
            GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_LINEAR);
    GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D,
            GLES20.GL_TEXTURE_WRAP_S, GLES20.GL_CLAMP_TO_EDGE);
    GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D,
            GLES20.GL_TEXTURE_WRAP_T, GLES20.GL_CLAMP_TO_EDGE);
    GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, mFrameBuffers[0]);
    GLES20.glFramebufferTexture2D(GLES20.GL_FRAMEBUFFER, GLES20.GL_COLOR_ATTACHMENT0,
            GLES20.GL_TEXTURE_2D, mFrameBufferTextures[0], 0);

    GLES20.glViewport(0, 0, width, height);
    int textureId = OpenGlUtils.loadTexture(bitmap, OpenGlUtils.NO_TEXTURE, true);

    FloatBuffer
            gLCubeBuffer = ByteBuffer.allocateDirect(TextureRotationUtil.CUBE_BAAB.length * 4)
            .order(ByteOrder.nativeOrder())
            .asFloatBuffer();
    FloatBuffer gLTextureBuffer = ByteBuffer
            .allocateDirect(TextureRotationUtil.TEXTURE_NO_ROTATION.length * 4)
            .order(ByteOrder.nativeOrder())
            .asFloatBuffer();
    gLCubeBuffer.put(TextureRotationUtil.CUBE_BAAB).position(0);
    if(isRotated)
        gLTextureBuffer.put(TextureRotationUtil.getRotation(Rotation.NORMAL, false, false)).position(0);
    else
        gLTextureBuffer.put(TextureRotationUtil.getRotation(Rotation.NORMAL, false, true)).position(0);


    if(filter == null){
        beautyFilter.onDrawFrame(textureId, gLCubeBuffer, gLTextureBuffer);
    }else{
        beautyFilter.onDrawFrame(textureId);
        filter.onDrawFrame(mFrameBufferTextures[0], gLCubeBuffer, gLTextureBuffer);
    }
    IntBuffer ib = IntBuffer.allocate(width * height);
    GLES20.glReadPixels(0, 0, width, height, GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, ib);
    Bitmap result = Bitmap.createBitmap(width, height, Bitmap.Config.ARGB_8888);
    result.copyPixelsFromBuffer(ib);

    GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, 0);
    GLES20.glDeleteTextures(1, new int[]{textureId}, 0);
    GLES20.glDeleteFramebuffers(mFrameBuffers.length, mFrameBuffers, 0);
    GLES20.glDeleteTextures(mFrameBufferTextures.length, mFrameBufferTextures, 0);

    beautyFilter.destroy();
    beautyFilter = null;
    if(filter != null) {
        filter.onDisplaySizeChanged(surfaceWidth, surfaceHeight);
        filter.onInputSizeChanged(imageWidth, imageHeight);
    }
    return result;
}
 
Example 14
Source File: OpenGlUtils.java    From TikTok with Apache License 2.0 4 votes vote down vote up
public static Bitmap drawToBitmapByFilter(Bitmap bitmap, GPUImageFilter filter,
                                          int displayWidth, int displayHeight, boolean rotate){
    if(filter == null)
        return null;
    int width = bitmap.getWidth();
    int height = bitmap.getHeight();
    int[] mFrameBuffers = new int[1];
    int[] mFrameBufferTextures = new int[1];
    GLES20.glGenFramebuffers(1, mFrameBuffers, 0);
    GLES20.glGenTextures(1, mFrameBufferTextures, 0);
    GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, mFrameBufferTextures[0]);
    GLES20.glTexImage2D(GLES20.GL_TEXTURE_2D, 0, GLES20.GL_RGBA, width, height, 0,
            GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, null);
    GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D,
            GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_LINEAR);
    GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D,
            GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_LINEAR);
    GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D,
            GLES20.GL_TEXTURE_WRAP_S, GLES20.GL_CLAMP_TO_EDGE);
    GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D,
            GLES20.GL_TEXTURE_WRAP_T, GLES20.GL_CLAMP_TO_EDGE);
    GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, mFrameBuffers[0]);
    GLES20.glFramebufferTexture2D(GLES20.GL_FRAMEBUFFER, GLES20.GL_COLOR_ATTACHMENT0,
            GLES20.GL_TEXTURE_2D, mFrameBufferTextures[0], 0);
    GLES20.glViewport(0, 0, width, height);
    filter.onInputSizeChanged(width, height);
    filter.onDisplaySizeChanged(displayWidth, displayHeight);
    int textureId = OpenGlUtils.loadTexture(bitmap, OpenGlUtils.NO_TEXTURE, true);
    if(rotate){
        FloatBuffer gLCubeBuffer = ByteBuffer.allocateDirect(TextureRotationUtil.CUBE.length * 4)
                .order(ByteOrder.nativeOrder())
                .asFloatBuffer();
        gLCubeBuffer.put(TextureRotationUtil.CUBE).position(0);

        FloatBuffer gLTextureBuffer = ByteBuffer
                .allocateDirect(TextureRotationUtil.TEXTURE_NO_ROTATION.length * 4)
                .order(ByteOrder.nativeOrder())
                .asFloatBuffer();
        gLTextureBuffer.put(TextureRotationUtil.getRotation(Rotation.ROTATION_90, true, false)).position(0);
        filter.onDrawFrame(textureId, gLCubeBuffer, gLTextureBuffer);
    }else {
        filter.onDrawFrame(textureId);
    }
    IntBuffer ib = IntBuffer.allocate(width * height);
    GLES20.glReadPixels(0, 0, width, height, GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, ib);
    Bitmap result = Bitmap.createBitmap(width, height, Bitmap.Config.ARGB_8888);
    result.copyPixelsFromBuffer(IntBuffer.wrap(ib.array()));
    GLES20.glDeleteTextures(1, new int[]{textureId}, 0);
    GLES20.glDeleteFramebuffers(1, mFrameBuffers, 0);
    GLES20.glDeleteTextures(1, mFrameBufferTextures, 0);
    filter.onInputSizeChanged(displayWidth, displayHeight);
    return result;
}
 
Example 15
Source File: HolographicOutlineHelper.java    From LaunchEnr with GNU General Public License v3.0 4 votes vote down vote up
/**
 * Applies a more expensive and accurate outline to whatever is currently drawn in a specified
 * bitmap.
 */
public void applyExpensiveOutlineWithBlur(Bitmap srcDst, Canvas srcDstCanvas) {

    // We start by removing most of the alpha channel so as to ignore shadows, and
    // other types of partial transparency when defining the shape of the object
    byte[] pixels = new byte[srcDst.getWidth() * srcDst.getHeight()];
    ByteBuffer buffer = ByteBuffer.wrap(pixels);
    buffer.rewind();
    srcDst.copyPixelsToBuffer(buffer);

    for (int i = 0; i < pixels.length; i++) {
        if ((pixels[i] & 0xFF) < 188) {
            pixels[i] = 0;
        }
    }

    buffer.rewind();
    srcDst.copyPixelsFromBuffer(buffer);

    // calculate the outer blur first
    mBlurPaint.setMaskFilter(mMediumOuterBlurMaskFilter);
    int[] outerBlurOffset = new int[2];
    Bitmap thickOuterBlur = srcDst.extractAlpha(mBlurPaint, outerBlurOffset);

    mBlurPaint.setMaskFilter(mThinOuterBlurMaskFilter);
    int[] brightOutlineOffset = new int[2];
    Bitmap brightOutline = srcDst.extractAlpha(mBlurPaint, brightOutlineOffset);

    // calculate the inner blur
    srcDstCanvas.setBitmap(srcDst);
    srcDstCanvas.drawColor(0xFF000000, PorterDuff.Mode.SRC_OUT);
    mBlurPaint.setMaskFilter(mMediumInnerBlurMaskFilter);
    int[] thickInnerBlurOffset = new int[2];
    Bitmap thickInnerBlur = srcDst.extractAlpha(mBlurPaint, thickInnerBlurOffset);

    // mask out the inner blur
    srcDstCanvas.setBitmap(thickInnerBlur);
    srcDstCanvas.drawBitmap(srcDst, -thickInnerBlurOffset[0],
            -thickInnerBlurOffset[1], mErasePaint);
    srcDstCanvas.drawRect(0, 0, -thickInnerBlurOffset[0], thickInnerBlur.getHeight(),
            mErasePaint);
    srcDstCanvas.drawRect(0, 0, thickInnerBlur.getWidth(), -thickInnerBlurOffset[1],
            mErasePaint);

    // draw the inner and outer blur
    srcDstCanvas.setBitmap(srcDst);
    srcDstCanvas.drawColor(0, PorterDuff.Mode.CLEAR);
    srcDstCanvas.drawBitmap(thickInnerBlur, thickInnerBlurOffset[0], thickInnerBlurOffset[1],
            mDrawPaint);
    srcDstCanvas.drawBitmap(thickOuterBlur, outerBlurOffset[0], outerBlurOffset[1],
            mDrawPaint);

    // draw the bright outline
    srcDstCanvas.drawBitmap(brightOutline, brightOutlineOffset[0], brightOutlineOffset[1],
            mDrawPaint);

    // cleanup
    srcDstCanvas.setBitmap(null);
    brightOutline.recycle();
    thickOuterBlur.recycle();
    thickInnerBlur.recycle();
}
 
Example 16
Source File: SampledImageReader.java    From PdfBox-Android with Apache License 2.0 4 votes vote down vote up
private static Bitmap from1Bit(PDImage pdImage) throws IOException
{
    final PDColorSpace colorSpace = pdImage.getColorSpace();
    final int width = pdImage.getWidth();
    final int height = pdImage.getHeight();
    Bitmap raster = Bitmap.createBitmap(width, height, Bitmap.Config.ALPHA_8);
    final float[] decode = getDecodeArray(pdImage);
    ByteBuffer buffer = ByteBuffer.allocate(raster.getRowBytes() * height);
    raster.copyPixelsToBuffer(buffer);
    byte[] output = buffer.array();

    // read bit stream
    InputStream iis = null;
    try
    {
        // create stream
        iis = pdImage.createInputStream();
        final boolean isIndexed =
            false; // TODO: PdfBox-Android colorSpace instanceof PDIndexed;

        int rowLen = width / 8;
        if (width % 8 > 0)
        {
            rowLen++;
        }

        // read stream
        byte value0;
        byte value1;
        if (isIndexed || decode[0] < decode[1])
        {
            value0 = 0;
            value1 = (byte) 255;
        }
        else
        {
            value0 = (byte) 255;
            value1 = 0;
        }
        byte[] buff = new byte[rowLen];
        int idx = 0;
        for (int y = 0; y < height; y++)
        {
            int x = 0;
            int readLen = iis.read(buff);
            for (int r = 0; r < rowLen && r < readLen; r++)
            {
                int value = buff[r];
                int mask = 128;
                for (int i = 0; i < 8; i++)
                {
                    int bit = value & mask;
                    mask >>= 1;
                    output[idx++] = bit == 0 ? value0 : value1;
                    x++;
                    if (x == width)
                    {
                        break;
                    }
                }
            }
            if (readLen != rowLen)
            {
                Log.w("PdfBox-Android", "premature EOF, image will be incomplete");
                break;
            }
        }


        buffer.rewind();
        raster.copyPixelsFromBuffer(buffer);
        // use the color space to convert the image to RGB
        return colorSpace.toRGBImage(raster);
    } finally
    {
        if (iis != null)
        {
            iis.close();
        }
    }
}
 
Example 17
Source File: HeifReader.java    From heifreader with MIT License 4 votes vote down vote up
private static Bitmap convertRgb565ToBitmap(Image image) {
    Bitmap bmp = Bitmap.createBitmap(image.getWidth(), image.getHeight(), Bitmap.Config.RGB_565);
    Image.Plane[] planes = image.getPlanes();
    bmp.copyPixelsFromBuffer(planes[0].getBuffer());
    return bmp;
}
 
Example 18
Source File: Painting.java    From TelePlus-Android with GNU General Public License v2.0 4 votes vote down vote up
public PaintingData getPaintingData(RectF rect, boolean undo) {
    int minX = (int) rect.left;
    int minY = (int) rect.top;
    int width = (int) rect.width();
    int height = (int) rect.height();

    GLES20.glGenFramebuffers(1, buffers, 0);
    int framebuffer = buffers[0];
    GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, framebuffer);

    GLES20.glGenTextures(1, buffers, 0);
    int texture = buffers[0];

    GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, texture);
    GLES20.glTexParameteri(GL10.GL_TEXTURE_2D, GL10.GL_TEXTURE_MIN_FILTER, GL10.GL_LINEAR);
    GLES20.glTexParameteri(GL10.GL_TEXTURE_2D, GL10.GL_TEXTURE_MAG_FILTER, GL10.GL_LINEAR);
    GLES20.glTexParameteri(GL10.GL_TEXTURE_2D, GL10.GL_TEXTURE_WRAP_S, GL10.GL_CLAMP_TO_EDGE);
    GLES20.glTexParameteri(GL10.GL_TEXTURE_2D, GL10.GL_TEXTURE_WRAP_T, GL10.GL_CLAMP_TO_EDGE);
    GLES20.glTexImage2D(GLES20.GL_TEXTURE_2D, 0, GLES20.GL_RGBA, width, height, 0, GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, null);

    GLES20.glFramebufferTexture2D(GLES20.GL_FRAMEBUFFER, GLES20.GL_COLOR_ATTACHMENT0, GLES20.GL_TEXTURE_2D, texture, 0);

    GLES20.glViewport(0, 0, (int) size.width, (int) size.height);

    if (shaders == null) {
        return null;
    }
    Shader shader = shaders.get(undo ? "nonPremultipliedBlit" : "blit");
    if (shader == null) {
        return null;
    }
    GLES20.glUseProgram(shader.program);

    Matrix translate = new Matrix();
    translate.preTranslate(-minX, -minY);
    float effective[] = GLMatrix.LoadGraphicsMatrix(translate);
    float finalProjection[] = GLMatrix.MultiplyMat4f(projection, effective);

    GLES20.glUniformMatrix4fv(shader.getUniform("mvpMatrix"), 1, false, FloatBuffer.wrap(finalProjection));

    if (undo) {
        GLES20.glUniform1i(shader.getUniform("texture"), 0);

        GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
        GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, getTexture());
    } else {
        GLES20.glUniform1i(shader.getUniform("texture"), 0);

        GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
        GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, bitmapTexture.texture());

        GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
        GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, getTexture());
    }
    GLES20.glClearColor(0, 0, 0, 0);
    GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);

    GLES20.glBlendFunc(GLES20.GL_ONE, GLES20.GL_ONE_MINUS_SRC_ALPHA);

    GLES20.glVertexAttribPointer(0, 2, GLES20.GL_FLOAT, false, 8, vertexBuffer);
    GLES20.glEnableVertexAttribArray(0);
    GLES20.glVertexAttribPointer(1, 2, GLES20.GL_FLOAT, false, 8, textureBuffer);
    GLES20.glEnableVertexAttribArray(1);

    GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4);

    dataBuffer.limit(width * height * 4);
    GLES20.glReadPixels(0, 0, width, height, GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, dataBuffer);

    PaintingData data;
    if (undo) {
        data = new PaintingData(null, dataBuffer);
    } else {
        Bitmap bitmap = Bitmap.createBitmap(width, height, Bitmap.Config.ARGB_8888);
        bitmap.copyPixelsFromBuffer(dataBuffer);

        data = new PaintingData(bitmap, null);
    }

    buffers[0] = framebuffer;
    GLES20.glDeleteFramebuffers(1, buffers, 0);

    buffers[0] = texture;
    GLES20.glDeleteTextures(1, buffers, 0);

    return data;
}
 
Example 19
Source File: Thumbnailer.java    From VCL-Android with Apache License 2.0 4 votes vote down vote up
/**
     * Thread main function.
     */
    @Override
    public void run() {
        int count = 0;
        int total = 0;

        Log.d(TAG, "Thumbnailer started");
mainloop:
        while (!isStopping) {
            lock.lock();
            // Get the id of the file browser item to create its thumbnail.
            while (mItems.size() == 0) {
                try {
                    if (mVideoBrowser != null && mVideoBrowser.get() != null) {
                        mVideoBrowser.get().hideProgressBar();
                        mVideoBrowser.get().clearTextInfo();
                    }
                    mTotalCount = 0;
                    notEmpty.await();
                } catch (InterruptedException e) {
                    Log.i(TAG, "interruption probably requested by stop()");
                    lock.unlock();
                    break mainloop;
                }
            }
            total = mTotalCount;
            MediaWrapper item = mItems.poll();
            lock.unlock();

            if (mVideoBrowser != null && mVideoBrowser.get() != null) {
                mVideoBrowser.get().showProgressBar();
                mVideoBrowser.get().sendTextInfo(String.format("%s %s", mPrefix, item.getFileName()), count, total);
            }
            count++;
            if (item.getArtworkURL() != null)
                continue; //no need for thumbnail, we have a cover

            int width = (VLCApplication.getAppResources().getDimensionPixelSize(R.dimen.grid_card_thumb_width));
            int height = (VLCApplication.getAppResources().getDimensionPixelSize(R.dimen.grid_card_thumb_height));

            //Get bitmap
            byte[] b = VLCUtil.getThumbnail(VLCInstance.get(), item.getUri(), width, height);

            if (b == null) {// We were not able to create a thumbnail for this item, store a dummy
                MediaDatabase.setPicture(item, Bitmap.createBitmap(1, 1, Config.ARGB_8888));
                continue;
            }

            // Create the bitmap
            Bitmap thumbnail = Bitmap.createBitmap(width, height, Config.ARGB_8888);

            thumbnail.copyPixelsFromBuffer(ByteBuffer.wrap(b));

            Log.i(TAG, "Thumbnail created for " + item.getFileName());

            MediaDatabase.setPicture(item, thumbnail);
            // Post to the file browser the new item.
            if (mVideoBrowser != null && mVideoBrowser.get() != null) {
                mVideoBrowser.get().setItemToUpdate(item);
            }
        }
        /* cleanup */
        if (mVideoBrowser != null && mVideoBrowser.get() != null) {
            mVideoBrowser.get().hideProgressBar();
            mVideoBrowser.get().clearTextInfo();
            mVideoBrowser.clear();
        }
        Log.d(TAG, "Thumbnailer stopped");
    }
 
Example 20
Source File: Painting.java    From Telegram-FOSS with GNU General Public License v2.0 4 votes vote down vote up
public PaintingData getPaintingData(RectF rect, boolean undo) {
    int minX = (int) rect.left;
    int minY = (int) rect.top;
    int width = (int) rect.width();
    int height = (int) rect.height();

    GLES20.glGenFramebuffers(1, buffers, 0);
    int framebuffer = buffers[0];
    GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, framebuffer);

    GLES20.glGenTextures(1, buffers, 0);
    int texture = buffers[0];

    GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, texture);
    GLES20.glTexParameteri(GL10.GL_TEXTURE_2D, GL10.GL_TEXTURE_WRAP_S, GL10.GL_CLAMP_TO_EDGE);
    GLES20.glTexParameteri(GL10.GL_TEXTURE_2D, GL10.GL_TEXTURE_WRAP_T, GL10.GL_CLAMP_TO_EDGE);
    GLES20.glTexParameteri(GL10.GL_TEXTURE_2D, GL10.GL_TEXTURE_MIN_FILTER, GL10.GL_LINEAR);
    GLES20.glTexParameteri(GL10.GL_TEXTURE_2D, GL10.GL_TEXTURE_MAG_FILTER, GL10.GL_NEAREST);
    GLES20.glTexImage2D(GLES20.GL_TEXTURE_2D, 0, GLES20.GL_RGBA, width, height, 0, GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, null);

    GLES20.glFramebufferTexture2D(GLES20.GL_FRAMEBUFFER, GLES20.GL_COLOR_ATTACHMENT0, GLES20.GL_TEXTURE_2D, texture, 0);

    GLES20.glViewport(0, 0, (int) size.width, (int) size.height);

    if (shaders == null) {
        return null;
    }
    Shader shader = shaders.get(undo ? "nonPremultipliedBlit" : "blit");
    if (shader == null) {
        return null;
    }
    GLES20.glUseProgram(shader.program);

    Matrix translate = new Matrix();
    translate.preTranslate(-minX, -minY);
    float[] effective = GLMatrix.LoadGraphicsMatrix(translate);
    float[] finalProjection = GLMatrix.MultiplyMat4f(projection, effective);

    GLES20.glUniformMatrix4fv(shader.getUniform("mvpMatrix"), 1, false, FloatBuffer.wrap(finalProjection));

    GLES20.glUniform1i(shader.getUniform("texture"), 0);

    GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
    GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, getTexture());

    GLES20.glClearColor(0, 0, 0, 0);
    GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);

    GLES20.glBlendFunc(GLES20.GL_ONE, GLES20.GL_ZERO);

    GLES20.glVertexAttribPointer(0, 2, GLES20.GL_FLOAT, false, 8, vertexBuffer);
    GLES20.glEnableVertexAttribArray(0);
    GLES20.glVertexAttribPointer(1, 2, GLES20.GL_FLOAT, false, 8, textureBuffer);
    GLES20.glEnableVertexAttribArray(1);

    GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4);

    dataBuffer.limit(width * height * 4);
    GLES20.glReadPixels(0, 0, width, height, GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, dataBuffer);

    PaintingData data;
    if (undo) {
        data = new PaintingData(null, dataBuffer);
    } else {
        Bitmap bitmap = Bitmap.createBitmap(width, height, Bitmap.Config.ARGB_8888);
        bitmap.copyPixelsFromBuffer(dataBuffer);

        data = new PaintingData(bitmap, null);
    }

    buffers[0] = framebuffer;
    GLES20.glDeleteFramebuffers(1, buffers, 0);

    buffers[0] = texture;
    GLES20.glDeleteTextures(1, buffers, 0);

    return data;
}