OBS-scene
+ -

obs-scene场景图形渲染render

2024-06-05 21 0

obs-scene场景的图形渲染函数为scene_video_render

.video_render = scene_video_render,

函数内容如下:

static void scene_video_render(void *data, gs_effect_t *effect)
{
    obs_scene_item_ptr_array_t remove_items;
    struct obs_scene *scene = data;
    struct obs_scene_item *item;

    da_init(remove_items);
    video_lock(scene);
    if (!scene->is_group) {
        update_transforms_and_prune_sources(scene, &remove_items, NULL);
    }

    gs_blend_state_push();
    gs_reset_blend_state();

//从secen中找到第一个item,,依次调用render_item,渲染其子对象
    item = scene->first_item;
    while (item) {

    //user_visible表示可见
        if (item->user_visible ||   transition_active(item->hide_transition))
            render_item(item);

        item = item->next;
    }

    gs_blend_state_pop();
    video_unlock(scene);

    //引用计数减1,因为渲染函数中会自动引用计数+1
    for (size_t i = 0; i < remove_items.num; i++)
        obs_sceneitem_release(remove_items.array[i]);
    da_free(remove_items);

    UNUSED_PARAMETER(effect);
}

render_item函数比较大。

static inline void render_item(struct obs_scene_item *item)
{
    GS_DEBUG_MARKER_BEGIN_FORMAT(GS_DEBUG_COLOR_ITEM, "Item: %s",obs_source_get_name(item->source));

    const bool use_texrender = item_texture_enabled(item);

    obs_source_t *const source = item->source;
    const enum gs_color_space current_space = gs_get_color_space();
    const enum gs_color_space source_space = obs_source_get_color_space(source, 1, &current_space);
    const enum gs_color_format format =gs_get_format_from_space(source_space);

    //销毁老的或格式不正确
    if (item->item_render && (!use_texrender ||(gs_texrender_get_format(item->item_render) != format)))
    {
        gs_texrender_destroy(item->item_render);
        item->item_render = NULL;
    }

    //创建新的texture
    if (!item->item_render && use_texrender)
    {
        item->item_render = gs_texrender_create(format, GS_ZS_NONE);
    }

    //对该texture进行渲染
    if (item->item_render)
    {
        uint32_t width = obs_source_get_width(item->source);
        uint32_t height = obs_source_get_height(item->source);

        if (!width || !height) {
            goto cleanup;
        }

        uint32_t cx = calc_cx(item, width);
        uint32_t cy = calc_cy(item, height);

        //例用gs_texrender_begin_with_color_space设置新的渲染止标为item->item_render
        if (cx && cy &&  gs_texrender_begin_with_color_space(item->item_render, cx,cy, source_space))
        {
            float cx_scale = (float)width / (float)cx;
            float cy_scale = (float)height / (float)cy;
            struct vec4 clear_color;

            vec4_zero(&clear_color);
            gs_clear(GS_CLEAR_COLOR, &clear_color, 0.0f, 0);
            gs_ortho(0.0f, (float)width, 0.0f, (float)height, -100.0f, 100.0f);

            gs_matrix_scale3f(cx_scale, cy_scale, 1.0f);
            gs_matrix_translate3f(-(float)(item->crop.left + item->bounds_crop.left),
                          -(float)(item->crop.top +item->bounds_crop.top),
                          0.0f);

            if (item->user_visible && transition_active(item->show_transition))
            {
                const int cx =    obs_source_get_width(item->source);
                const int cy =    obs_source_get_height(item->source);
                obs_transition_set_size(item->show_transition,cx, cy);
                obs_source_video_render(item->show_transition);
            } else if (!item->user_visible && transition_active(item->hide_transition))
            {
                const int cx =    obs_source_get_width(item->source);
                const int cy =    obs_source_get_height(item->source);
                obs_transition_set_size(item->hide_transition,cx, cy);
                obs_source_video_render(item->hide_transition);
            } else {
                obs_source_set_texcoords_centered(item->source, true);
                //渲染源
                obs_source_video_render(item->source);
                obs_source_set_texcoords_centered(item->source, false);
            }

            gs_texrender_end(item->item_render);
        }
    }

    const bool linear_srgb =!item->item_render ||(item->blend_method != OBS_BLEND_METHOD_SRGB_OFF);
    const bool previous = gs_set_linear_srgb(linear_srgb);
    gs_matrix_push();
    gs_matrix_mul(&item->draw_transform);
    if (item->item_render) {
        render_item_texture(item, current_space, source_space);
    } else if (item->user_visible &&  transition_active(item->show_transition)) {
        const int cx = obs_source_get_width(item->source);
        const int cy = obs_source_get_height(item->source);
        obs_transition_set_size(item->show_transition, cx, cy);
        obs_source_video_render(item->show_transition);
    } else if (!item->user_visible &&  transition_active(item->hide_transition)){
        const int cx = obs_source_get_width(item->source);
        const int cy = obs_source_get_height(item->source);
        obs_transition_set_size(item->hide_transition, cx, cy);
        obs_source_video_render(item->hide_transition);
    } else {
        const bool centered = are_texcoords_centered(&item->draw_transform);
        obs_source_set_texcoords_centered(item->source, centered);
        obs_source_video_render(item->source);
        obs_source_set_texcoords_centered(item->source, false);
    }
    gs_matrix_pop();
    gs_set_linear_srgb(previous);

cleanup:
    GS_DEBUG_MARKER_END();
}

其调用obs_source_video_render进行渲染

void obs_source_video_render(obs_source_t *source)
{
    if (!obs_source_valid(source, "obs_source_video_render"))
        return;

    source = obs_source_get_ref(source);
    if (source) {
        render_video(source);
        obs_source_release(source);
    }
}

render_video函数:


static inline void render_video(obs_source_t *source)
{
    if (source->info.type != OBS_SOURCE_TYPE_FILTER &&
        (source->info.output_flags & OBS_SOURCE_VIDEO) == 0) {
        if (source->filter_parent)
            obs_source_skip_video_filter(source);
        return;
    }

    if (source->info.type == OBS_SOURCE_TYPE_INPUT &&
        (source->info.output_flags & OBS_SOURCE_ASYNC) != 0 &&
        !source->rendering_filter) {
        if (deinterlacing_enabled(source))
            deinterlace_update_async_video(source);
        obs_source_update_async_video(source);
    }

    if (!source->context.data || !source->enabled) {
        if (source->filter_parent)
            obs_source_skip_video_filter(source);
        return;
    }

    GS_DEBUG_MARKER_BEGIN_FORMAT(GS_DEBUG_COLOR_SOURCE,
                     get_type_format(source->info.type),
                     obs_source_get_name(source));

    if (source->filters.num && !source->rendering_filter)
        obs_source_render_filters(source);

    else if (source->info.video_render)
        obs_source_main_render(source);//渲染函数

    else if (source->filter_target)
        obs_source_video_render(source->filter_target);

    else if (deinterlacing_enabled(source))
        deinterlace_render(source);
    else
        obs_source_render_async_video(source);

    GS_DEBUG_MARKER_END();
}

调用osb_souce的回调函数video_render进行渲染.函数为obs_source_main_render,其内部调用source_render

static inline void obs_source_main_render(obs_source_t *source)
{
    uint32_t flags = source->info.output_flags;
    bool custom_draw = (flags & OBS_SOURCE_CUSTOM_DRAW) != 0;
    bool srgb_aware = (flags & OBS_SOURCE_SRGB) != 0;
    bool default_effect = !source->filter_parent && source->filters.num == 0 && !custom_draw;
    bool previous_srgb = false;

    if (!srgb_aware) {
        previous_srgb = gs_get_linear_srgb();
        gs_set_linear_srgb(false);
    }

    if (default_effect) {
        obs_source_default_render(source);
    } else if (source->context.data) {
        source_render(source, custom_draw ? NULL : gs_get_effect());
    }

    if (!srgb_aware)
        gs_set_linear_srgb(previous_srgb);
}

source_render进行渲染

static void source_render(obs_source_t *source, gs_effect_t *effect)
{
    void *const data = source->context.data;
    const enum gs_color_space current_space = gs_get_color_space();
    const enum gs_color_space source_space = obs_source_get_color_space(source, 1, &current_space);

    const char *convert_tech = NULL;
    float multiplier = 1.0;
    enum gs_color_format format = gs_get_format_from_space(source_space);
    switch (source_space) {
    case GS_CS_SRGB:
    case GS_CS_SRGB_16F:
        switch (current_space) {
        case GS_CS_709_EXTENDED:
            convert_tech = "Draw";
            break;
        case GS_CS_709_SCRGB:
            convert_tech = "DrawMultiply";
            multiplier = obs_get_video_sdr_white_level() / 80.0f;
            break;
        case GS_CS_SRGB:
            break;
        case GS_CS_SRGB_16F:
            break;
        }
        break;
    case GS_CS_709_EXTENDED:
        switch (current_space) {
        case GS_CS_SRGB:
        case GS_CS_SRGB_16F:
            convert_tech = "DrawTonemap";
            break;
        case GS_CS_709_SCRGB:
            convert_tech = "DrawMultiply";
            multiplier = obs_get_video_sdr_white_level() / 80.0f;
            break;
        case GS_CS_709_EXTENDED:
            break;
        }
        break;
    case GS_CS_709_SCRGB:
        switch (current_space) {
        case GS_CS_SRGB:
        case GS_CS_SRGB_16F:
            convert_tech = "DrawMultiplyTonemap";
            multiplier = 80.0f / obs_get_video_sdr_white_level();
            break;
        case GS_CS_709_EXTENDED:
            convert_tech = "DrawMultiply";
            multiplier = 80.0f / obs_get_video_sdr_white_level();
            break;
        case GS_CS_709_SCRGB:
            break;
        }
    }

    if (convert_tech) {
        if (source->color_space_texrender) {
            if (gs_texrender_get_format(  source->color_space_texrender) != format) {
                gs_texrender_destroy(source->color_space_texrender);
                source->color_space_texrender = NULL;
            }
        }

        if (!source->color_space_texrender) {
            source->color_space_texrender =    gs_texrender_create(format, GS_ZS_NONE);
        }

        gs_texrender_reset(source->color_space_texrender);
        const int cx = get_base_width(source);
        const int cy = get_base_height(source);
        if (gs_texrender_begin_with_color_space(source->color_space_texrender, cx, cy, source_space))
        {
            gs_enable_blending(false);

            struct vec4 clear_color;
            vec4_zero(&clear_color);
            gs_clear(GS_CLEAR_COLOR, &clear_color, 0.0f, 0);
            gs_ortho(0.0f, (float)cx, 0.0f, (float)cy, -100.0f,100.0f);

            source->info.video_render(data, effect);

            gs_enable_blending(true);

            gs_texrender_end(source->color_space_texrender);

            gs_effect_t *default_effect = obs->video.default_effect;
            gs_technique_t *tech = gs_effect_get_technique(default_effect, convert_tech);

            const bool previous = gs_framebuffer_srgb_enabled();
            gs_enable_framebuffer_srgb(true);

            gs_texture_t *const tex = gs_texrender_get_texture(source->color_space_texrender);
            gs_effect_set_texture_srgb(gs_effect_get_param_by_name(default_effect,"image"),tex);
            gs_effect_set_float(gs_effect_get_param_by_name(default_effect,"multiplier"),multiplier);

            gs_blend_state_push();
            gs_blend_function(GS_BLEND_ONE, GS_BLEND_INVSRCALPHA);

            const size_t passes = gs_technique_begin(tech);
            for (size_t i = 0; i < passes; i++) {
                gs_technique_begin_pass(tech, i);
                gs_draw_sprite(tex, 0, 0, 0);
                gs_technique_end_pass(tech);
            }
            gs_technique_end(tech);

            gs_blend_state_pop();

            gs_enable_framebuffer_srgb(previous);
        }
    } else {
    //回调函数
        source->info.video_render(data, effect);
    }
}

convert_tech变是量猜测应该是需要进行格式转换的。这里我们认为没有。直接会使用video_render函数。其data就是注册的图片、视频等源提供的上下文。

0 篇笔记 写笔记

作者信息
我爱内核
Windows驱动开发,网站开发
好好学习,天天向上。
取消
感谢您的支持,我会继续努力的!
扫码支持
扫码打赏,你说多少就多少

打开支付宝扫一扫,即可进行扫码打赏哦

您的支持,是我们前进的动力!