“EVS Code tracing”的版本间的差异

来自个人维基
跳转至: 导航搜索
(以“source: vendor/nxp-opensource/imx/evs Questions to answer: GraphicBuffer如何从camera 到display达到zero copy? OpenGL如何render 到offscreen memory? Gralloc ...”为内容创建页面)
 

2021年9月2日 (四) 12:34的最后版本

source: vendor/nxp-opensource/imx/evs

Questions to answer:

GraphicBuffer如何从camera 到display达到zero copy?
OpenGL如何render 到offscreen memory?
Gralloc + offset 的风险?

1. How camera buffer is allocated
EvsEnumerator::openCamera

EvsEnumerator::openCamera(const hidl_string& cameraId) =>
 
void EvsCamera::openup(const char *deviceName)
{
    onOpen(deviceName);
    onMemoryCreate();//这里配置了三块buffer mBuffers[3]
}
void EvsCamera::onMemoryCreate()
{
    fsl::Memory *buffer = nullptr;
    fsl::MemoryManager* allocator = fsl::MemoryManager::getInstance();
    fsl::MemoryDesc desc;
    desc.mWidth = mWidth;
    desc.mHeight = mHeight;
    desc.mFormat = mFormat;
    desc.mFslFormat = mFormat;
    desc.mProduceUsage |= fsl::USAGE_HW_TEXTURE
            | fsl::USAGE_HW_RENDER | fsl::USAGE_HW_VIDEO_ENCODER;
    desc.mFlag = 0;
    int ret = desc.checkFormat();
    if (ret != 0) {
        ALOGE("%s checkFormat failed", __func__);
        return;
    }
 
    for (int i = 0; i < CAMERA_BUFFER_NUM(3); i++) {
 
        allocator->allocMemory(desc, &buffer);
        mBuffers[i] = buffer;
    }
}

2. Start video

Return<EvsResult> EvsCamera::startVideoStream(
        const ::android::sp<IEvsCameraStream>& stream)
{
        mStream = stream;
        mEvsAppRecipient = new EvsAppRecipient(this);
        appRecipient = mEvsAppRecipient;
        onStart(); //see V4l2Capture::onStart()
        create_thread(collectFrames );
}
 
bool V4l2Capture::onStart()
{
    fd = mDeviceFd;
    bufrequest.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;//Buffer of a multi-planar video capture stream,
     bufrequest.memory = V4L2_MEMORY_DMABUF;
    bufrequest.count = CAMERA_BUFFER_NUM(3);
    ioctl(fd, VIDIOC_REQBUFS, &bufrequest);
    fsl::Memory *buffer = nullptr;
    for (int i = 0; i < CAMERA_BUFFER_NUM; i++) {
        buffer = mBuffers[i];
        struct v4l2_plane planes ;
        struct v4l2_buffer buf;
        memset(&buf, 0, sizeof(buf));
        memset(&planes, 0, sizeof(planes));
        planes= {
            .bytesused = 0;
            // If the application sets this to 0 for an output stream, then bytesused will be set to
            //the size of the plane (see the length field of this struct) by the driver.
            .length = buffer->size;
            .m.fd =  buffer->fd;
            .data_offset =  buffer->GET_OFFSET(); //add this for CIF issue
            //Offset in bytes to video data in the plane. Drivers must set this field when type
            //refers to a capture stream, applications when it refers to an output stream.
        }
        buf.index    = i;
        buf.type     = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
        buf.memory   = V4L2_MEMORY_DMABUF;
        buf.m.planes = &planes;
        buf.m.planes->m.fd = buffer->fd;//no offset
        buf.length = 1;//For the multi-planar API the application sets this to the number of elements in the planes array.
        buf.m.planes->length = buffer->size;
 
        // Queue the first capture buffer
        ioctl(fd, VIDIOC_QBUF, &buf);
        int type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
        ioctl(fd, VIDIOC_STREAMON, &type);
    }
}
 
 
void EvsCamera::collectFrames()
{
    fsl::Memory *buffer = nullptr;
    int index = -1;
    // Run until our atomic signal is cleared
    while (runMode == RUN) {
        // Wait for a buffer to be ready
        buffer = onFrameCollect(index);
        if (buffer != nullptr) {
            forwardFrame(buffer, index);
        }
    }
}
//Camera buffer 收到后更新 mBuffers 的状态
void EvsCamera::forwardFrame(buffer, index);
{
    mStream->deliverFrame(buff);
    onFrameReturn(index);
}
 
Return<void> StreamHandler::deliverFrame(const BufferDesc& buffer) {
    mReadyBuffer //空的slot
    mBuffers[mReadyBuffer] = buffer;
    mSignal.notify_all();
    /* Note
    BufferDesc  mBuffers[3];
    mReadyBuffer //最新camera 写入的buffer
    mHeldBuffer //由display 使用中
    */
}

Ref: V4L2 3.6. Buffers — The Linux Kernel documentation

3. How to allocate buffer: Display/MemoryManager.cpp

int MemoryManager::allocMemory(MemoryDesc& desc, Memory** out)
{
    Memory *handle = NULL;
    int ret = 0;
 
    if (isDrmAlloc(desc.mFlag, desc.mFslFormat, desc.mProduceUsage)) {
        ret = mGPUAlloc->alloc(mGPUAlloc, desc.mWidth, desc.mHeight, desc.mFormat, (int)desc.mProduceUsage,
                (buffer_handle_t *)&handle, &desc.mStride);
          handle->fslFormat = desc.mFslFormat;
          allocMetaData(handle);
         *out = handle;
         return ret;
    }
    mIonManager = new IonManager();
    ret = mIonManager->allocMemory(desc, &handle);
    allocMetaData(handle);
    retainMemory(handle) {
        mIonManager->getVaddrs(handle) {
            handle->base = IonAllocator->getVaddr(handle->fd, handle->size){
                //base 直接是buffer 的开头vaddr
                //void* vaddr = mmap(0, handle->size, PROT_READ|PROT_WRITE, MAP_SHARED, handle->fd, 0);
            }
        }
        handle->fbId = 0;
        handle->fbHandle = 0;
    }
    *out = handle;
    return 0;
}
struct Memory : public native_handle
{
    static inline int sNumInts() {
        return (((sizeof(Memory) - sizeof(native_handle_t))/sizeof(int)) - sNumFds);
    }
    static const int sNumFds = 2;}
int IonManager::allocMemory(MemoryDesc& desc, Memory** out)
{
 
    desc.mFlag |= FLAGS_ALLOCATION_ION;
    int ret = desc.checkFormat();
 
    unsigned char *ptr = NULL;
    int sharedFd = -1;
    Memory* memory = NULL;
    int align = ION_MEM_ALIGN;
    int flags = MFLAGS_CONTIGUOUS;
 
    if (desc.mProduceUsage & (USAGE_SW_READ_OFTEN | USAGE_SW_WRITE_OFTEN)) {
        flags |= MFLAGS_CACHEABLE;
    }
    //用ion 配置空间
  (IonAllocator* mAllocator;)
    sharedFd = mAllocator->allocMemory(desc.mSize, align, flags) {
        heapIds: decided by flags;
        mIonFd = ion_open();
        ion_flags = ION_FLAG_CACHED if flags |= CACHEABLE
        ion_alloc_fd(mIonFd, size, align, heapIds, ion_flags, &sharedFd);
   }
    //包成Memory
   memory = new Memory(&desc, sharedFd, -1) {
            .fd = dup(sharedFd); //复制fd, 共享内存
            .size = desc->mSize;
            .offset = 0;
    }
    getPhys(memory) { memory->phys = mAllocator->getPhys(handle->fd, handle->size);}
    //check usage and add offset here. iMX8没有AFBC size 的判断
    *out = memory;
    close(sharedFd);
 
    return 0;
}

4. Display 端如何配置 buffer:
初始化Display 时创建三块1280x720, RGBA8888 buffer

EvsDisplay::EvsDisplay()
{
    ALOGD("EvsDisplay instantiated");
 
    // Set up our self description
    // NOTE:  These are arbitrary values chosen for testing
    mInfo.displayId   = "evs hal Display";
    mInfo.vendorFlags = 3870;
 
    mWidth = DISPLAY_WIDTH(1280);
    mHeight = DISPLAY_HEIGHT(720);
    mFormat = HAL_PIXEL_FORMAT_RGBA_8888;
 
    initialize() {
        nxp::hardware::display::V1_0::IDisplay mDisplay = IDisplay::getService();
        mDisplay->getLayer(DISPLAY_BUFFER_NUM(3), &mLayerId);//向IDisplay 请求一个空的layer代号, 并设置了3个slot
 
        desc.mProduceUsage |= fsl::USAGE_HW_TEXTURE| fsl::USAGE_HW_RENDER | fsl::USAGE_HW_VIDEO_ENCODER;
        for (int i = 0; i < DISPLAY_BUFFER_NUM; i++) {
            buffer = nullptr;
            MemoryManager allocator->allocMemory(desc, &buffer);
            mBuffers[i] = buffer;
        }
    }
}

5. Display thread

void EvsStateControl::updateLoop() {
    while(run) {
        //process command queue
        // Review vehicle state and choose an appropriate renderer
        selectStateForCurrentConditions(); //change cmera if events changed. explain later
        if (mCurrentRenderer) {
            BufferDesc tgtBuffer
            IEvsDisplay mDisplay->getTargetBuffer(&tgtBuffer);//display buffer
            (if tgtBuffer) {
                RenderTopView mCurrentRenderer->drawFrame(tgtBuffer)
                // Send the finished image back for display
                mDisplay->returnTargetBufferForDisplay(tgtBuffer);
            }
    }
}
//取出ready 的buffer
void EvsDisplay::getTargetBuffer(getTargetBuffer_cb  hbuf)
{
    IDisplay mDisplay->getSlot(mLayerID,  & slot); //取得layer 的slot 中状态为free的 槽
    buffer = mBuffers[slot]; //这个mBuffer 是display size. 一开始配好的
    hbuf = buffer;
    hbuf.bufferId     = buffer->id;
}
EvsResult EvsDisplay::returnTargetBufferForDisplay(const BufferDesc& buffer)
{
    mDisplay->presentLayer(mLayer, buffer.bufferId, mBuffers[buffer.bufferId]);//将buffer与slot 号加到present 的queue
    return EvsResult::OK;
}

6. drawFrame
RenderTopView {vector<ActiveCamera> mActiveCameras[cameraList];} 渲染 Parking 或多Camera
RenderDirectView {CameraInfo mCameraInfo;} Renders the view from a single specified camera directly to the full display.
渲染所用技巧基本一致

bool RenderTopView::drawFrame(const BufferDesc& tgtBuffer) {
    attachRenderTarget(tgtBuffer); //把空的display buffer转为KHRImage, see RenderBase::attachRenderTarget
    // Refresh our video texture contents.  We do it all at once in hopes of getting
    // better coherence among images.  This does not guarantee synchronization, of course...
    //把新的camera buffer 转为 mKHRimage, see VideoTex::refresh
    for (auto&& cam: mActiveCameras) {
        if (cam.tex) {
            VideoTex::cam.tex->refresh();
        }
    }
 
    // Iterate over all the cameras and project their images onto the ground plane
    //把camera buffer 画到display
    for (auto&& cam: mActiveCameras) {
        renderCameraOntoGroundPlane(cam);
    }
    // Draw the car image
    renderCarTopView();
 
    // Wait for the rendering to finish
    glFinish();
}
// Return true if the texture contents are changed
//重新转camera buffer 为texture: mKHRimage
bool VideoTex::refresh() {
   // Get the new image we want to use as our contents
   mImageBuffer = mStreamHandler->getNewFrame();//Camera 传过来的private handle
    //复制camera buffer 的handle 并包成 GraphicBuffer. 无拷贝
   sp<GraphicBuffer> pGfxBuffer = new GraphicBuffer(
        mImageBuffer.memHandle, //这是包装memHandle
        GraphicBuffer::CLONE_HANDLE, //importBuffer,复制出另一个handle
        mImageBuffer.width, mImageBuffer.height,
        mImageBuffer.format, 1, // layer count
        GRALLOC_USAGE_HW_TEXTURE,
        mImageBuffer.stride);
    EGLint eglImageAttributes[] = {EGL_IMAGE_PRESERVED_KHR, EGL_TRUE, EGL_NONE};
    EGLClientBuffer clientBuf = static_cast<EGLClientBuffer>(pGfxBuffer->getNativeBuffer());
//这个传回base+offset吗?这个传回 private_handle->ANativeWindowBuffer. 这个不是DmaBuf handle, 如何取到真正地址?
//ANativeWindowBuffer: 结构定义包含 native_handle_t* handle 及w,h,s,f,usage,layerCount;
//因此还是要靠ion 解析实际地址.
 
    //把camera 内容 clientBuf包成 KHRImage,并绑定为TextureId
    mKHRimage = eglCreateImageKHR(mDisplay, EGL_NO_CONTEXT,
                                  EGL_NATIVE_BUFFER_ANDROID, clientBuf,
                                  eglImageAttributes);
    glBindTexture(GL_TEXTURE_2D, glId()); //以后用ID 直接引用此texture
    glEGLImageTargetTexture2DOES(GL_TEXTURE_2D, static_cast<GLeglImageOES>(mKHRimage));
 
}
 
bool RenderBase::attachRenderTarget(const BufferDesc& tgtBuffer) {
 
    if (tgtBuffer.format != HAL_PIXEL_FORMAT_RGBA_8888) {
        return false;
    }
 
   // create a GraphicBuffer from the existing handle
   sp<GraphicBuffer> pGfxBuffer = new GraphicBuffer(tgtBuffer.memHandle,
            GraphicBuffer::CLONE_HANDLE,tgtBuffer.width, tgtBuffer.height,
            tgtBuffer.format, 1, // layer count
            GRALLOC_USAGE_HW_RENDER,tgtBuffer.stride);
 
    // Get a GL compatible reference to the graphics buffer we've been given
    EGLint eglImageAttributes[] = {EGL_IMAGE_PRESERVED_KHR, EGL_TRUE, EGL_NONE};
    EGLClientBuffer clientBuf = static_cast<EGLClientBuffer>(pGfxBuffer->getNativeBuffer());
    sKHRimage = eglCreateImageKHR(sDisplay, EGL_NO_CONTEXT, EGL_NATIVE_BUFFER_ANDROID, clientBuf,
                eglImageAttributes);
    if (sKHRimage == EGL_NO_IMAGE_KHR) {
        ALOGE("error creating EGLImage for target buffer: %s", getEGLError());
        return false;
    }
 
    // Construct a render buffer around the external buffer
    //把buffer 转为KHRImage 并绑定为RenderBuffer, 代号sColorBuffer
    glBindRenderbuffer(GL_RENDERBUFFER, sColorBuffer);
    glEGLImageTargetRenderbufferStorageOES(GL_RENDERBUFFER, static_cast<GLeglImageOES>(sKHRimage));
    glFramebufferRenderbuffer(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_RENDERBUFFER, sColorBuffer);
    // Store the size of our target buffer
    sWidth = tgtBuffer.width;
    sHeight = tgtBuffer.height;
    sAspectRatio = (float)sWidth / sHeight;
    glViewport(0, 0, sWidth, sHeight);
 
#if 1   // We don't actually need the clear if we're going to cover the whole screen anyway
    // Clear the color buffer
    glClearColor(0.8f, 0.1f, 0.2f, 1.0f);
    glClear(GL_COLOR_BUFFER_BIT);
#endif
    return true;
}
 
 
//把camera buffer 炫染到RenderBuffer
void RenderTopView::renderCameraOntoGroundPlane(const ActiveCamera& cam) {
    glUseProgram(mPgmAssets.projectedTexture);
    GLuint texId;
    texId = cam.tex->glId();
   glBindTexture(GL_TEXTURE_2D, texId);
   glDrawArrays(GL_TRIANGLE_STRIP, 0, 4);
}

7. 补充 EGL Extension #8:
https://www.khronos.org/registry/EGL/extensions/KHR/EGL_KHR_image_base.txt

This extension defines a new EGL resource type that is suitable for sharing 2D arrays of image data between client APIs, the EGLImage.

   Although the intended purpose is sharing 2D image data, the underlying interface makes no assumptions about the format or
   purpose of the resource being shared, leaving those decisions to the application and associated client APIs.


EGLImage: An opaque handle to a shared resource created by EGL client APIs, presumably a 2D array of image data

EGLImageKHR is an object which can be used to create EGLImage target resources (inside client APIs).

EGLImageKHR eglCreateImageKHR(

                           EGLDisplay dpy,
                           EGLContext ctx,
                           EGLenum target,
                           EGLClientBuffer buffer,
                           const EGLint *attrib_list)
   create an EGLImage from an existing image resource <buffer>
   <ctx> specifies the EGL client API context used for this operation, or EGL_NO_CONTEXT if a client API context is not required.


OES_EGL_image

This extension provides a mechanism for creating texture and renderbuffer objects sharing storage with specified EGLImage objects.


void EGLImageTargetTexture2DOES(enum target, eglImageOES image);

defines an entire two-dimensional texture array.

All properties of the texture images (including width, height, format, border, mipmap levels of detail, and image data) are taken from the specified eglImageOES <image>, rather than from the client or the framebuffer.


void EGLImageTargetRenderbufferStorageOES(enum target, eglImageOES image)

establishes the data storage, format, and dimensions of a renderbuffer object's image, using the parameters and storage associated with the eglImageOES <image>.


8. How cameras are listed:
source: vendor/nxp-opensource/imx/evs/EvsEnumerator.cpp

bool EvsEnumerator::EnumAvailableVideo() {
    DIR* dir = opendir("/dev");
    for each entry in dir {
        if (entry->d_name == "video"xx) && qualifyCaptureDevice()){
            deviceName = entry->d_name;
            enum_name = first line in file "/sys/class/video4linux/<d_name>/name"
            sCameraList += (CameraRecord.name=enum_name, CameraRecord.cameraId=deviceName);
            captureCount ++;
        }
    }
    if (captureCount > 0 )
        property_set(EVS_VIDEO_READY("vendor.evs.video.ready"), "1")
    }
}
 
bool qualifyCaptureDevice() {
    caps and pixelformat satisfy the following:
    ioctl(fd, VIDIOC_QUERYCAP, &caps);
    caps == (V4L2_CAP_VIDEO_CAPTURE_MPLANE|V4L2_CAP_STREAMING );
    ioctl(fd, VIDIOC_ENUM_FMT, &formatDescription);
    formatDescription.pixelformat == either of
        V4L2_PIX_FMT_YUYV,
        V4L2_PIX_FMT_NV21,
        V4L2_PIX_FMT_NV16,
        V4L2_PIX_FMT_YVU420,
        V4L2_PIX_FMT_RGB32,
        V4L2_PIX_FMT_ARGB32,
        V4L2_PIX_FMT_XRGB32
}
9. When to open camera
source: packages/services/Car/evs/app/
<source lang="c">
 
bool EvsStateControl::selectStateForCurrentConditions() {
    mGearValue.value.int32Values.setToExternal(&sDummyGear, 1);
    mTurnSignalValue.value.int32Values.setToExternal(&sDummySignal, 1);
    if (mGearValue.value.int32Values[0] == int32_t(VehicleGear::GEAR_REVERSE)) {
        desiredState = REVERSE(1);
    } else if (mTurnSignalValue.value.int32Values[0] == int32_t(VehicleTurnSignal::RIGHT)) {
        desiredState = RIGHT(3);
    } else if (mTurnSignalValue.value.int32Values[0] == int32_t(VehicleTurnSignal::LEFT)) {
        desiredState = LEFT(2);
    } else if (mGearValue.value.int32Values[0] == int32_t(VehicleGear::GEAR_PARK)) {
        desiredState = PARKING;
    }
    if (desiredState == PARKING || mCameraList.size > 1)
         mCurrentRenderer = make_unique<RenderTopView>();
    else
        mCurrentRenderer = std::make_unique<RenderDirectView>()
    }
    mCurrentRenderer->activate();
        ==> VideoTex::createVideoTexture(pEnum, evsCameraId,EGLDisplay glDisplay);
            ==> <IEvsCamera> pCamera = pEnum->openCamera(evsCameraId);
}

10. How to configure camera order
source: packages/services/Car/evs/app/

int main(int argc, char** argv)
{
    // Load our configuration information
    ConfigManager config;
    config.initialize("/system/etc/automotive/evs/config.json");
    EvsStateControl *pStateController = new EvsStateControl(pVnet, pEvs, pDisplay, config);
    pStateController->startUpdateLoop();
    pEvsListener->run(pStateController);
}
 
EvsStateControl::EvsStateControl(android::sp <IVehicle>       pVnet,
                                 android::sp <IEvsEnumerator> pEvs,
                                 android::sp <IEvsDisplay>    pDisplay,
                                 const ConfigManager&         config) :
{
    mConfig = config,
    for (CameraInfo info: config.getCameras()) {
        if (info.function.find("reverse") != std::string::npos) {
           mCameraList[State::REVERSE(0)].push_back(info);
        }
        if (info.function.find("right") != std::string::npos) {
            mCameraList[State::RIGHT(2)].push_back(info);
        }
        if (info.function.find("left") != std::string::npos) {
            mCameraList[State::LEFT(1)].push_back(info);
        }
        if (info.function.find("park") != std::string::npos) {
            mCameraList[State::PARKING(3)].push_back(info);
        }
    }
}
 
 
struct CameraInfo {
        std::string cameraId = "";  // The name of the camera from the point of view of the HAL
        std::string function = "";  // The expected use for this camera ("reverse", "left", "right")
        float position[3] = {0};    // x, y, z -> right, fwd, up in the units of car space
        float yaw   = 0;    // radians positive to the left (right hand rule about global z axis)
        float pitch = 0;    // positive upward (ie: right hand rule about local x axis)
        float hfov  = 0;    // radians
        float vfov  = 0;    // radians
};

sample configure file

config.json 
{
  "car" : { "width"  : 76.7, "wheelBase" : 117.9, "frontExtent" : 44.7,"rearExtent" : 40  },
  "display" : {"frontRange" : 100,"rearRange" : 100},
  "graphic" : {"frontPixel" : 23,"rearPixel" : 223},
  "cameras" : [
     {
      "cameraId" : "/dev/video32",
      "function" : "reverse,park",
      "x" : 0.0,
      "y" : -40.0,
      "z" : 48,
      "yaw" : 180,
      "pitch" : -30,
      "hfov" : 125,
      "vfov" :103
    },
    {
      "cameraId" : "/dev/video45",
      "function" : "front,park",
      "x" : 0.0,
      "y" : 100.0,
      "z" : 48,
      "yaw" : 0,
      "pitch" : -10,
      "hfov" : 70,
      "vfov" : 43
    },
    {
      "cameraId" : "/dev/video0",
      "function" : "right,park",
      "x" : 36.0,
      "y" : 60.0,
      "z" : 32,
      "yaw" : -90,
      "pitch" : -30,
      "hfov" : 60,
      "vfov" : 42
    }
  ]
}