android-与Twilio视频流CustomView ARcore

android-与Twilio视频流CustomView ARcore,第1张

概述当我想使用twiliovideoapi和ARcore来流式传输自定义视图时出现问题,基本上它会流式传输黑屏.我从示例中使用ViewCapturer类到官方文档中的链接https://github.comwilio/video-quickstart-androidree/master/exampleCustomVideoCapturer,但不适用于arcore,这可能是由于arFrag

当我想使用twilio vIDeo API和ARcore来流式传输自定义视图时出现问题,基本上它会流式传输黑屏.
我从示例中使用VIEwCapturer类到官方文档中的链接https://github.com/twilio/video-quickstart-android/tree/master/exampleCustomVideoCapturer,但不适用于arcore,这可能是由于arFragment中存在表面视图.

谢谢您的支持.

activity_camera.xml

<?xml version="1.0" enCoding="utf-8"?><relativeLayout xmlns:androID="http://schemas.androID.com/apk/res/androID"    xmlns:tools="http://schemas.androID.com/tools"    androID:ID="@+ID/container"    androID:layout_wIDth="match_parent"    androID:layout_height="match_parent"    tools:context=".Cameraaractivity">    <fragment        androID:ID="@+ID/ux_fragment"        androID:name="com.Google.ar.SceneForm.ux.ArFragment"        androID:layout_wIDth="match_parent"        androID:layout_height="match_parent" />    <androID.support.v7.Widget.RecyclerVIEw        androID:ID="@+ID/recycler_vIEw"        androID:layout_wIDth="match_parent"        androID:layout_height="100dp"        androID:layout_alignParentBottom="true"        androID:background="#c100a5a0"        androID:visibility="gone" />    <Imagebutton        androID:ID="@+ID/btnCloseChat"        androID:layout_wIDth="24dp"        androID:layout_height="24dp"        androID:layout_alignParentBottom="true"        androID:layout_alignParentEnd="true"        androID:layout_marginBottom="86dp"        androID:layout_marginEnd="13dp"        androID:background="@androID:color/transparent"        androID:contentDescription="Close chat button"        androID:src="@drawable/ic_close_black_24dp"        androID:visibility="gone" /></relativeLayout>

localVIDeo创建的行:

screenVIDeoTrack = LocalVIDeoTrack.create(Cameraaractivity.this, true, new VIEwCapturer(mArFragment.getArSceneVIEw()));

和VIEwCapturer类

import androID.graphics.Bitmap;import androID.graphics.Canvas;import androID.os.Handler;import androID.os.Looper;import androID.os.SystemClock;import androID.vIEw.VIEw;import com.twilio.vIDeo.VIDeoCapturer;import com.twilio.vIDeo.VIDeoDimensions;import com.twilio.vIDeo.VIDeoFormat;import com.twilio.vIDeo.VIDeoFrame;import com.twilio.vIDeo.VIDeoPixelFormat;import java.nio.ByteBuffer;import java.util.ArrayList;import java.util.List;import java.util.concurrent.TimeUnit;import java.util.concurrent.atomic.AtomicBoolean;/** * VIEwCapturer demonstrates how to implement a custom {@link VIDeoCapturer}. This class * captures the contents of a provIDed vIEw and signals the {@link VIDeoCapturer.Listener} when * the frame is available. */public class VIEwCapturer implements VIDeoCapturer {    private static final int VIEW_CAPTURER_FRAMERATE_MS = 100;    private final VIEw vIEw;    private Handler handler = new Handler(Looper.getMainLooper());    private VIDeoCapturer.Listener vIDeoCapturerListener;    private AtomicBoolean started = new AtomicBoolean(false);    private final Runnable vIEwCapturer = new Runnable() {        @OverrIDe        public voID run() {            boolean dropFrame = vIEw.getWIDth() == 0 || vIEw.getHeight() == 0;            // Only capture the vIEw if the dimensions have been established            if (!dropFrame) {                // Draw vIEw into bitmap backed canvas                int measureDWIDth = VIEw.MeasureSpec.makeMeasureSpec(vIEw.getWIDth(),                        VIEw.MeasureSpec.EXACTLY);                int measuredHeight = VIEw.MeasureSpec.makeMeasureSpec(vIEw.getHeight(),                        VIEw.MeasureSpec.EXACTLY);                vIEw.measure(measureDWIDth, measuredHeight);                vIEw.layout(0, 0, vIEw.getMeasureDWIDth(), vIEw.getMeasuredHeight());                Bitmap vIEwBitmap = Bitmap.createBitmap(vIEw.getWIDth(), vIEw.getHeight(),                        Bitmap.Config.ARGB_8888);                Canvas vIEwCanvas = new Canvas(vIEwBitmap);                vIEw.draw(vIEwCanvas);                // Extract the frame from the bitmap                int bytes = vIEwBitmap.getByteCount();                ByteBuffer buffer = ByteBuffer.allocate(bytes);                vIEwBitmap.copyPixelsToBuffer(buffer);                byte[] array = buffer.array();                final long captureTimeNs =                        TimeUnit.MILliSECONDS.toNanos(SystemClock.elapsedRealtime());                // Create vIDeo frame                VIDeoDimensions dimensions = new VIDeoDimensions(vIEw.getWIDth(), vIEw.getHeight());                VIDeoFrame vIDeoFrame = new VIDeoFrame(array,                        dimensions, VIDeoFrame.RotationAngle.ROTATION_0, captureTimeNs);                // Notify the Listener                if (started.get()) {                    vIDeoCapturerListener.onFrameCaptured(vIDeoFrame);                }            }            // Schedule the next capture            if (started.get()) {                handler.postDelayed(this, VIEW_CAPTURER_FRAMERATE_MS);            }        }    };    public VIEwCapturer(VIEw vIEw) {        this.vIEw = vIEw;    }    /**     * Returns the List of supported formats for this vIEw capturer. Currently, only supports     * capturing to RGBA_8888 bitmaps.     *     * @return List of supported formats.     */    @OverrIDe    public List<VIDeoFormat> getSupportedFormats() {        List<VIDeoFormat> vIDeoFormats = new ArrayList<>();        VIDeoDimensions vIDeoDimensions = new VIDeoDimensions(vIEw.getWIDth(), vIEw.getHeight());        VIDeoFormat vIDeoFormat = new VIDeoFormat(vIDeoDimensions, 30, VIDeoPixelFormat.RGBA_8888);        vIDeoFormats.add(vIDeoFormat);        return vIDeoFormats;    }    /**     * Returns true because we are capturing screen content.     */    @OverrIDe    public boolean isScreencast() {        return true;    }    /**     * This will be invoked when it is time to start capturing frames.     *     * @param vIDeoFormat the vIDeo format of the frames to be captured.     * @param Listener capturer Listener.     */    @OverrIDe    public voID startCapture(VIDeoFormat vIDeoFormat, Listener Listener) {        // Store the capturer Listener        this.vIDeoCapturerListener = Listener;        this.started.set(true);        // Notify capturer API that the capturer has started        boolean capturerStarted = handler.postDelayed(vIEwCapturer,                VIEW_CAPTURER_FRAMERATE_MS);        this.vIDeoCapturerListener.onCapturerStarted(capturerStarted);    }    /**     * Stop capturing frames. Note that the SDK cannot receive frames once this has been invoked.     */    @OverrIDe    public voID stopCapture() {        this.started.set(false);        handler.removeCallbacks(vIEwCapturer);    }}

package com.bitdrome.dionigi.eragle.utils;import androID.graphics.Bitmap;import androID.os.Handler;import androID.os.Looper;import androID.os.SystemClock;import androID.vIEw.Pixelcopy;import androID.vIEw.SurfaceVIEw;import androID.vIEw.VIEw;import com.twilio.vIDeo.VIDeoCapturer;import com.twilio.vIDeo.VIDeoDimensions;import com.twilio.vIDeo.VIDeoFormat;import com.twilio.vIDeo.VIDeoFrame;import com.twilio.vIDeo.VIDeoPixelFormat;import java.nio.ByteBuffer;import java.util.ArrayList;import java.util.List;import java.util.concurrent.TimeUnit;import java.util.concurrent.atomic.AtomicBoolean;/** * VIEwCapturer demonstrates how to implement a custom {@link  VIDeoCapturer}. This class * captures the contents of a provIDed vIEw and signals the {@link  VIDeoCapturer.Listener} when * the frame is available. */public class VIEwCapturer implements VIDeoCapturer,     Pixelcopy.OnPixelcopyFinishedListener {    private static int VIEW_CAPTURER_FRAMERATE_MS = 10;private final VIEw vIEw;private Bitmap vIEwBitmap;private Handler handler = new Handler(Looper.getMainLooper());private Handler handlerPixelcopy = new Handler(Looper.getMainLooper());private VIDeoCapturer.Listener vIDeoCapturerListener;private AtomicBoolean started = new AtomicBoolean(false);public VIEwCapturer(VIEw vIEw) {    this(vIEw, 24);}public VIEwCapturer(VIEw vIEw, int framePerSecond) {    if (framePerSecond <= 0)        throw new IllegalArgumentException("framePersecond must be greater than 0");    this.vIEw = vIEw;    float tmp = (1f / framePerSecond) * 1000;    VIEW_CAPTURER_FRAMERATE_MS = Math.round(tmp);}private final Runnable vIEwCapturer = new Runnable() {    @OverrIDe    public voID run() {        boolean dropFrame = vIEw.getWIDth() == 0 || vIEw.getHeight() == 0;        // Only capture the vIEw if the dimensions have been established        if (!dropFrame) {            // Draw vIEw into bitmap backed canvas            int measureDWIDth = VIEw.MeasureSpec.makeMeasureSpec(vIEw.getWIDth(),                    VIEw.MeasureSpec.EXACTLY);            int measuredHeight = VIEw.MeasureSpec.makeMeasureSpec(vIEw.getHeight(),                    VIEw.MeasureSpec.EXACTLY);            vIEw.measure(measureDWIDth, measuredHeight);            vIEw.layout(0, 0, vIEw.getMeasureDWIDth(), vIEw.getMeasuredHeight());            vIEwBitmap = Bitmap.createBitmap(vIEw.getWIDth(), vIEw.getHeight(), Bitmap.Config.ARGB_8888);            try {                Pixelcopy.request((SurfaceVIEw) vIEw, vIEwBitmap, VIEwCapturer.this, handlerPixelcopy);            } catch (IllegalArgumentException e) {            }        }    }};/** * Returns the List of supported formats for this vIEw capturer. Currently, only supports * capturing to RGBA_8888 bitmaps. * * @return List of supported formats. */@OverrIDepublic List<VIDeoFormat> getSupportedFormats() {    List<VIDeoFormat> vIDeoFormats = new ArrayList<>();    VIDeoDimensions vIDeoDimensions = new VIDeoDimensions(vIEw.getWIDth(), vIEw.getHeight());    VIDeoFormat vIDeoFormat = new VIDeoFormat(vIDeoDimensions, 30, VIDeoPixelFormat.RGBA_8888);    vIDeoFormats.add(vIDeoFormat);    return vIDeoFormats;}/** * Returns true because we are capturing screen content. */@OverrIDepublic boolean isScreencast() {    return true;}/** * This will be invoked when it is time to start capturing frames. * * @param vIDeoFormat the vIDeo format of the frames to be captured. * @param Listener    capturer Listener. */@OverrIDepublic voID startCapture(VIDeoFormat vIDeoFormat, Listener Listener) {    // Store the capturer Listener    this.vIDeoCapturerListener = Listener;    this.started.set(true);    // Notify capturer API that the capturer has started    boolean capturerStarted = handler.postDelayed(vIEwCapturer,            VIEW_CAPTURER_FRAMERATE_MS);    this.vIDeoCapturerListener.onCapturerStarted(capturerStarted);}/** * Stop capturing frames. Note that the SDK cannot receive frames once this has been invoked. */@OverrIDepublic voID stopCapture() {    this.started.set(false);    handler.removeCallbacks(vIEwCapturer);}@OverrIDepublic voID onPixelcopyFinished(int i) {    // Extract the frame from the bitmap    int bytes = vIEwBitmap.getByteCount();    ByteBuffer buffer = ByteBuffer.allocate(bytes);    vIEwBitmap.copyPixelsToBuffer(buffer);    byte[] array = buffer.array();    final long captureTimeNs = TimeUnit.MILliSECONDS.toNanos(SystemClock.elapsedRealtime());    // Create vIDeo frame    VIDeoDimensions dimensions = new VIDeoDimensions(vIEw.getWIDth(), vIEw.getHeight());    VIDeoFrame vIDeoFrame = new VIDeoFrame(array,            dimensions, VIDeoFrame.RotationAngle.ROTATION_0, captureTimeNs);    // Notify the Listener    if (started.get()) {        vIDeoCapturerListener.onFrameCaptured(vIDeoFrame);    }    if (started.get()) {        handler.postDelayed(vIEwCapturer, VIEW_CAPTURER_FRAMERATE_MS);    }}}

解决方法:

对于必须通过Twilio VIDeo流式传输ARCore的人

在您的ARCore渲染类中.

@OverrIDepublic voID onDrawFrame(GL10 gl) {     ....     this.takeLastFrame();}private byte[] takeLastFrame() {    int height = this.mFrameHeight;    int wIDth = this.mFrameWIDth;    Mat input = new Mat(height, wIDth, CvType.CV_8UC4);    ByteBuffer buffer = ByteBuffer.allocate(input.rows() * input.cols() * input.channels());    GLES20.glreadPixels(0, 0, wIDth, height,            GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, buffer);    input.put(0, 0, buffer.array());    Core.rotate(input, input, Core.ROTATE_180);    Core.flip(input, input, 1);    return convertMatToBytes(input);}private byte[] convertMatToBytes(Mat image) {    int bufferSize = image.channels() * image.cols() * image.rows();    byte[] b = new byte[bufferSize];    image.get(0, 0, b);    return b;}

在您的自定义捕获器类中

byte[] array = vIEw.takeLastFrame();if (array != null && array.length > 0) {    final long captureTimeNs = TimeUnit.MILliSECONDS.toNanos(SystemClock.elapsedRealtime());    // Create vIDeo frame    VIDeoDimensions dimensions = new VIDeoDimensions(vIEw.getFrameWIDth(), vIEw.getFrameHeight());    VIDeoFrame vIDeoFrame = new VIDeoFrame(array,                        dimensions, VIDeoFrame.RotationAngle.ROTATION_0, captureTimeNs);    // Notify the Listener    if (started.get()) {        vIDeoCapturerListener.onFrameCaptured(vIDeoFrame);    }}            
总结

以上是内存溢出为你收集整理的android-与Twilio视频流CustomView ARcore全部内容,希望文章能够帮你解决android-与Twilio视频流CustomView ARcore所遇到的程序开发问题。

如果觉得内存溢出网站内容还不错,欢迎将内存溢出网站推荐给程序员好友。

欢迎分享,转载请注明来源:内存溢出

原文地址: http://outofmemory.cn/web/1094851.html

(0)
打赏 微信扫一扫 微信扫一扫 支付宝扫一扫 支付宝扫一扫
上一篇 2022-05-28
下一篇 2022-05-28

发表评论

登录后才能评论

评论列表(0条)

保存