Imported Upstream version 0.14.2+dfsg1

This commit is contained in:
Sebastian Ramacher 2016-05-24 21:53:01 +02:00
parent fb3990e9e5
commit 41a01dbf05
529 changed files with 25112 additions and 2336 deletions

View file

@ -273,6 +273,7 @@ set(libobs_libobs_SOURCES
obs-encoder.c
obs-service.c
obs-source.c
obs-source-deinterlace.c
obs-source-transition.c
obs-output.c
obs-output-delay.c

View file

@ -1,7 +1,4 @@
uniform float4x4 ViewProj;
uniform float4x4 color_matrix;
uniform float3 color_range_min = {0.0, 0.0, 0.0};
uniform float3 color_range_max = {1.0, 1.0, 1.0};
uniform texture_rect image;
sampler_state def_sampler {
@ -28,13 +25,6 @@ float4 PSDrawBare(VertInOut vert_in) : TARGET
return image.Sample(def_sampler, vert_in.uv);
}
float4 PSDrawMatrix(VertInOut vert_in) : TARGET
{
float4 yuv = image.Sample(def_sampler, vert_in.uv);
yuv.xyz = clamp(yuv.xyz, color_range_min, color_range_max);
return saturate(mul(float4(yuv.xyz, 1.0), color_matrix));
}
technique Draw
{
pass
@ -43,13 +33,3 @@ technique Draw
pixel_shader = PSDrawBare(vert_in);
}
}
technique DrawMatrix
{
pass
{
vertex_shader = VSDefault(vert_in);
pixel_shader = PSDrawMatrix(vert_in);
}
}

View file

@ -0,0 +1,293 @@
/*
* Copyright (c) 2015 Ruwen Hahn <palana@stunned.de>
* John R. Bradley <jrb@turrettech.com>
* Hugh Bailey "Jim" <obs.jim@gmail.com>
*
* Permission to use, copy, modify, and distribute this software for any
* purpose with or without fee is hereby granted, provided that the above
* copyright notice and this permission notice appear in all copies.
*
* THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
* WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
* ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
* WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
* ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
* OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
*/
uniform float4x4 ViewProj;
uniform texture2d image;
uniform float4x4 color_matrix;
uniform float3 color_range_min = {0.0, 0.0, 0.0};
uniform float3 color_range_max = {1.0, 1.0, 1.0};
uniform texture2d previous_image;
uniform float2 dimensions;
uniform int field_order;
uniform bool frame2;
sampler_state textureSampler {
Filter = Linear;
AddressU = Clamp;
AddressV = Clamp;
};
struct VertData {
float4 pos : POSITION;
float2 uv : TEXCOORD0;
};
int3 select(int2 texel, int x, int y)
{
return int3(texel + int2(x, y), 0);
}
float4 load_at_prev(int2 texel, int x, int y)
{
return previous_image.Load(select(texel, x, y));
}
float4 load_at_image(int2 texel, int x, int y)
{
return image.Load(select(texel, x, y));
}
float4 load_at(int2 texel, int x, int y, int field)
{
if(field == 0)
return load_at_image(texel, x, y);
else
return load_at_prev(texel, x, y);
}
#define YADIF_UPDATE(c, level) \
if(score.c < spatial_score.c) \
{ \
spatial_score.c = score.c; \
spatial_pred.c = (load_at(texel, level, -1, field) + load_at(texel, -level, 1, field)).c / 2; \
#define YADIF_CHECK_ONE(level, c) \
{ \
float4 score = abs(load_at(texel, -1 + level, 1, field) - load_at(texel, -1 - level, -1, field)) + \
abs(load_at(texel, level, 1, field) - load_at(texel, -level, -1, field)) + \
abs(load_at(texel, 1 + level, 1, field) - load_at(texel, 1 - level, -1, field)); \
YADIF_UPDATE(c, level) } \
}
#define YADIF_CHECK(level) \
{ \
float4 score = abs(load_at(texel, -1 + level, 1, field) - load_at(texel, -1 - level, -1, field)) + \
abs(load_at(texel, level, 1, field) - load_at(texel, -level, -1, field)) + \
abs(load_at(texel, 1 + level, 1, field) - load_at(texel, 1 - level, -1, field)); \
YADIF_UPDATE(r, level) YADIF_CHECK_ONE(level * 2, r) } \
YADIF_UPDATE(g, level) YADIF_CHECK_ONE(level * 2, g) } \
YADIF_UPDATE(b, level) YADIF_CHECK_ONE(level * 2, b) } \
YADIF_UPDATE(a, level) YADIF_CHECK_ONE(level * 2, a) } \
}
float4 texel_at_yadif(int2 texel, int field, bool mode0)
{
if((texel.y % 2) == field)
return load_at(texel, 0, 0, field);
#define YADIF_AVG(x_off, y_off) ((load_at_prev(texel, x_off, y_off) + load_at_image(texel, x_off, y_off))/2)
float4 c = load_at(texel, 0, 1, field),
d = YADIF_AVG(0, 0),
e = load_at(texel, 0, -1, field);
float4 temporal_diff0 = (abs(load_at_prev(texel, 0, 0) - load_at_image(texel, 0, 0))) / 2,
temporal_diff1 = (abs(load_at_prev(texel, 0, 1) - c) + abs(load_at_prev(texel, 0, -1) - e)) / 2,
temporal_diff2 = (abs(load_at_image(texel, 0, 1) - c) + abs(load_at_image(texel, 0, -1) - e)) / 2,
diff = max(temporal_diff0, max(temporal_diff1, temporal_diff2));
float4 spatial_pred = (c + e) / 2,
spatial_score = abs(load_at(texel, -1, 1, field) - load_at(texel, -1, -1, field)) +
abs(c - e) +
abs(load_at(texel, 1, 1, field) - load_at(texel, 1, -1, field)) - 1;
YADIF_CHECK(-1)
YADIF_CHECK(1)
if (mode0) {
float4 b = YADIF_AVG(0, 2),
f = YADIF_AVG(0, -2);
float4 max_ = max(d - e, max(d - c, min(b - c, f - e))),
min_ = min(d - e, min(d - c, max(b - c, f - e)));
diff = max(diff, max(min_, -max_));
} else {
diff = max(diff, max(min(d - e, d - c), -max(d - e, d - c)));
}
#define YADIF_SPATIAL(c) \
{ \
if(spatial_pred.c > d.c + diff.c) \
spatial_pred.c = d.c + diff.c; \
else if(spatial_pred.c < d.c - diff.c) \
spatial_pred.c = d.c - diff.c; \
}
YADIF_SPATIAL(r)
YADIF_SPATIAL(g)
YADIF_SPATIAL(b)
YADIF_SPATIAL(a)
return spatial_pred;
}
float4 texel_at_yadif_2x(int2 texel, int field, bool mode0)
{
field = frame2 ? (1 - field) : field;
return texel_at_yadif(texel, field, mode0);
}
float4 texel_at_discard(int2 texel, int field)
{
texel.y = texel.y / 2 * 2;
return load_at_image(texel, 0, field);
}
float4 texel_at_discard_2x(int2 texel, int field)
{
field = frame2 ? field : (1 - field);
return texel_at_discard(texel, field);
}
float4 texel_at_blend(int2 texel, int field)
{
return (load_at_image(texel, 0, 0) + load_at_image(texel, 0, 1)) / 2;
}
float4 texel_at_blend_2x(int2 texel, int field)
{
if (!frame2)
return (load_at_image(texel, 0, 0) +
load_at_prev(texel, 0, 1)) / 2;
else
return (load_at_image(texel, 0, 0) +
load_at_image(texel, 0, 1)) / 2;
}
float4 texel_at_linear(int2 texel, int field)
{
if ((texel.y % 2) == field)
return load_at_image(texel, 0, 0);
return (load_at_image(texel, 0, -1) + load_at_image(texel, 0, 1)) / 2;
}
float4 texel_at_linear_2x(int2 texel, int field)
{
field = frame2 ? field : (1 - field);
return texel_at_linear(texel, field);
}
float4 texel_at_yadif_discard(int2 texel, int field)
{
return (texel_at_yadif(texel, field, true) + texel_at_discard(texel, field)) / 2;
}
float4 texel_at_yadif_discard_2x(int2 texel, int field)
{
field = frame2 ? (1 - field) : field;
return (texel_at_yadif(texel, field, true) + texel_at_discard(texel, field)) / 2;
}
int2 pixel_uv(float2 uv)
{
return int2(uv * dimensions);
}
float4 PSYadifMode0RGBA(VertData v_in) : TARGET
{
return texel_at_yadif(pixel_uv(v_in.uv), field_order, true);
}
float4 PSYadifMode0RGBA_2x(VertData v_in) : TARGET
{
return texel_at_yadif_2x(pixel_uv(v_in.uv), field_order, true);
}
float4 PSYadifMode2RGBA(VertData v_in) : TARGET
{
return texel_at_yadif(pixel_uv(v_in.uv), field_order, false);
}
float4 PSYadifMode2RGBA_2x(VertData v_in) : TARGET
{
return texel_at_yadif_2x(pixel_uv(v_in.uv), field_order, false);
}
float4 PSYadifDiscardRGBA(VertData v_in) : TARGET
{
return texel_at_yadif_discard(pixel_uv(v_in.uv), field_order);
}
float4 PSYadifDiscardRGBA_2x(VertData v_in) : TARGET
{
return texel_at_yadif_discard_2x(pixel_uv(v_in.uv), field_order);
}
float4 PSLinearRGBA(VertData v_in) : TARGET
{
return texel_at_linear(pixel_uv(v_in.uv), field_order);
}
float4 PSLinearRGBA_2x(VertData v_in) : TARGET
{
return texel_at_linear_2x(pixel_uv(v_in.uv), field_order);
}
float4 PSDiscardRGBA(VertData v_in) : TARGET
{
return texel_at_discard(pixel_uv(v_in.uv), field_order);
}
float4 PSDiscardRGBA_2x(VertData v_in) : TARGET
{
return texel_at_discard_2x(pixel_uv(v_in.uv), field_order);
}
float4 PSBlendRGBA(VertData v_in) : TARGET
{
return texel_at_blend(pixel_uv(v_in.uv), field_order);
}
float4 PSBlendRGBA_2x(VertData v_in) : TARGET
{
return texel_at_blend_2x(pixel_uv(v_in.uv), field_order);
}
VertData VSDefault(VertData v_in)
{
VertData vert_out;
vert_out.pos = mul(float4(v_in.pos.xyz, 1.0), ViewProj);
vert_out.uv = v_in.uv;
return vert_out;
}
#define TECHNIQUE(rgba_ps, matrix_ps) \
technique Draw \
{ \
pass \
{ \
vertex_shader = VSDefault(v_in); \
pixel_shader = rgba_ps(v_in); \
} \
} \
float4 matrix_ps(VertData v_in) : TARGET \
{ \
float4 yuv = rgba_ps(v_in); \
yuv.xyz = clamp(yuv.xyz, color_range_min, color_range_max); \
return saturate(mul(float4(yuv.xyz, 1.0), color_matrix)); \
} \
\
technique DrawMatrix \
{ \
pass \
{ \
vertex_shader = VSDefault(v_in); \
pixel_shader = matrix_ps(v_in); \
} \
}

View file

@ -0,0 +1,21 @@
/*
* Copyright (c) 2016 Ruwen Hahn <palana@stunned.de>
* John R. Bradley <jrb@turrettech.com>
* Hugh Bailey "Jim" <obs.jim@gmail.com>
*
* Permission to use, copy, modify, and distribute this software for any
* purpose with or without fee is hereby granted, provided that the above
* copyright notice and this permission notice appear in all copies.
*
* THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
* WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
* ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
* WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
* ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
* OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
*/
#include "deinterlace_base.effect"
TECHNIQUE( PSBlendRGBA, PSBlendMatrix);

View file

@ -0,0 +1,21 @@
/*
* Copyright (c) 2016 Ruwen Hahn <palana@stunned.de>
* John R. Bradley <jrb@turrettech.com>
* Hugh Bailey "Jim" <obs.jim@gmail.com>
*
* Permission to use, copy, modify, and distribute this software for any
* purpose with or without fee is hereby granted, provided that the above
* copyright notice and this permission notice appear in all copies.
*
* THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
* WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
* ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
* WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
* ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
* OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
*/
#include "deinterlace_base.effect"
TECHNIQUE(PSBlendRGBA_2x, PSBlendMatrix_2x);

View file

@ -0,0 +1,21 @@
/*
* Copyright (c) 2016 Ruwen Hahn <palana@stunned.de>
* John R. Bradley <jrb@turrettech.com>
* Hugh Bailey "Jim" <obs.jim@gmail.com>
*
* Permission to use, copy, modify, and distribute this software for any
* purpose with or without fee is hereby granted, provided that the above
* copyright notice and this permission notice appear in all copies.
*
* THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
* WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
* ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
* WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
* ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
* OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
*/
#include "deinterlace_base.effect"
TECHNIQUE(PSDiscardRGBA, PSDiscardMatrix);

View file

@ -0,0 +1,21 @@
/*
* Copyright (c) 2016 Ruwen Hahn <palana@stunned.de>
* John R. Bradley <jrb@turrettech.com>
* Hugh Bailey "Jim" <obs.jim@gmail.com>
*
* Permission to use, copy, modify, and distribute this software for any
* purpose with or without fee is hereby granted, provided that the above
* copyright notice and this permission notice appear in all copies.
*
* THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
* WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
* ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
* WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
* ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
* OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
*/
#include "deinterlace_base.effect"
TECHNIQUE(PSDiscardRGBA_2x, PSDiscardMatrix_2x);

View file

@ -0,0 +1,21 @@
/*
* Copyright (c) 2016 Ruwen Hahn <palana@stunned.de>
* John R. Bradley <jrb@turrettech.com>
* Hugh Bailey "Jim" <obs.jim@gmail.com>
*
* Permission to use, copy, modify, and distribute this software for any
* purpose with or without fee is hereby granted, provided that the above
* copyright notice and this permission notice appear in all copies.
*
* THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
* WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
* ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
* WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
* ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
* OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
*/
#include "deinterlace_base.effect"
TECHNIQUE(PSLinearRGBA, PSLinearMatrix);

View file

@ -0,0 +1,21 @@
/*
* Copyright (c) 2016 Ruwen Hahn <palana@stunned.de>
* John R. Bradley <jrb@turrettech.com>
* Hugh Bailey "Jim" <obs.jim@gmail.com>
*
* Permission to use, copy, modify, and distribute this software for any
* purpose with or without fee is hereby granted, provided that the above
* copyright notice and this permission notice appear in all copies.
*
* THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
* WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
* ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
* WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
* ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
* OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
*/
#include "deinterlace_base.effect"
TECHNIQUE(PSLinearRGBA_2x, PSLinearxMatrixA_2x);

View file

@ -0,0 +1,21 @@
/*
* Copyright (c) 2016 Ruwen Hahn <palana@stunned.de>
* John R. Bradley <jrb@turrettech.com>
* Hugh Bailey "Jim" <obs.jim@gmail.com>
*
* Permission to use, copy, modify, and distribute this software for any
* purpose with or without fee is hereby granted, provided that the above
* copyright notice and this permission notice appear in all copies.
*
* THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
* WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
* ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
* WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
* ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
* OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
*/
#include "deinterlace_base.effect"
TECHNIQUE(PSYadifMode0RGBA, PSYadifMode0Matrix);

View file

@ -0,0 +1,21 @@
/*
* Copyright (c) 2016 Ruwen Hahn <palana@stunned.de>
* John R. Bradley <jrb@turrettech.com>
* Hugh Bailey "Jim" <obs.jim@gmail.com>
*
* Permission to use, copy, modify, and distribute this software for any
* purpose with or without fee is hereby granted, provided that the above
* copyright notice and this permission notice appear in all copies.
*
* THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
* WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
* ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
* WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
* ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
* OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
*/
#include "deinterlace_base.effect"
TECHNIQUE(PSYadifMode0RGBA_2x, PSYadifMode0Matrix_2x);

View file

@ -23,11 +23,19 @@ struct VertData {
float2 uv : TEXCOORD0;
};
VertData VSDefault(VertData v_in)
struct FragData {
float4 pos : POSITION;
float2 uv : TEXCOORD0;
float2 scale : TEXCOORD1;
};
FragData VSDefault(VertData v_in)
{
VertData vert_out;
FragData vert_out;
vert_out.pos = mul(float4(v_in.pos.xyz, 1.0), ViewProj);
vert_out.uv = v_in.uv;
vert_out.scale = min(0.25 + abs(0.75 / mul(float4(1.0 / base_dimension_i.xy, 1.0, 1.0), ViewProj).xy), 1.0);
return vert_out;
}
@ -48,12 +56,12 @@ float weight(float x, float radius)
return 0.0;
}
float3 weight3(float x)
float3 weight3(float x, float scale)
{
return float3(
weight(x * 2.0 + 0.0 * 2.0 - 3.0, 3.0),
weight(x * 2.0 + 1.0 * 2.0 - 3.0, 3.0),
weight(x * 2.0 + 2.0 * 2.0 - 3.0, 3.0));
weight((x * 2.0 + 0.0 * 2.0 - 3.0) * scale, 3.0),
weight((x * 2.0 + 1.0 * 2.0 - 3.0) * scale, 3.0),
weight((x * 2.0 + 2.0 * 2.0 - 3.0) * scale, 3.0));
}
float4 pixel(float xpos, float ypos)
@ -73,16 +81,16 @@ float4 get_line(float ypos, float3 xpos1, float3 xpos2, float3 rowtap1,
pixel(xpos2.b, ypos) * rowtap2.b;
}
float4 DrawLanczos(VertData v_in)
float4 DrawLanczos(FragData v_in)
{
float2 stepxy = base_dimension_i;
float2 pos = v_in.uv + stepxy * 0.5;
float2 f = frac(pos / stepxy);
float3 rowtap1 = weight3((1.0 - f.x) / 2.0);
float3 rowtap2 = weight3((1.0 - f.x) / 2.0 + 0.5);
float3 coltap1 = weight3((1.0 - f.y) / 2.0);
float3 coltap2 = weight3((1.0 - f.y) / 2.0 + 0.5);
float3 rowtap1 = weight3((1.0 - f.x) / 2.0, v_in.scale.x);
float3 rowtap2 = weight3((1.0 - f.x) / 2.0 + 0.5, v_in.scale.x);
float3 coltap1 = weight3((1.0 - f.y) / 2.0, v_in.scale.y);
float3 coltap2 = weight3((1.0 - f.y) / 2.0 + 0.5, v_in.scale.y);
/* make sure all taps added together is exactly 1.0, otherwise some
* (very small) distortion can occur */
@ -106,12 +114,12 @@ float4 DrawLanczos(VertData v_in)
get_line(xystart.y + stepxy.y * 5.0, xpos1, xpos2, rowtap1, rowtap2) * coltap2.b;
}
float4 PSDrawLanczosRGBA(VertData v_in) : TARGET
float4 PSDrawLanczosRGBA(FragData v_in) : TARGET
{
return DrawLanczos(v_in);
}
float4 PSDrawLanczosMatrix(VertData v_in) : TARGET
float4 PSDrawLanczosMatrix(FragData v_in) : TARGET
{
float4 rgba = DrawLanczos(v_in);
float4 yuv;

View file

@ -0,0 +1,38 @@
uniform float4x4 ViewProj;
uniform texture2d image;
sampler_state def_sampler {
Filter = Linear;
AddressU = Clamp;
AddressV = Clamp;
};
struct VertInOut {
float4 pos : POSITION;
float2 uv : TEXCOORD0;
};
VertInOut VSDefault(VertInOut vert_in)
{
VertInOut vert_out;
vert_out.pos = mul(float4(vert_in.pos.xyz, 1.0), ViewProj);
vert_out.uv = vert_in.uv;
return vert_out;
}
float4 PSDraw(VertInOut vert_in) : TARGET
{
float4 rgba = image.Sample(def_sampler, vert_in.uv);
if (rgba.a > 0.0)
rgba.rgb /= rgba.a;
return saturate(rgba);
}
technique Draw
{
pass
{
vertex_shader = VSDefault(vert_in);
pixel_shader = PSDraw(vert_in);
}
}

View file

@ -284,6 +284,9 @@ enum gs_shader_param_type {
GS_SHADER_PARAM_VEC2,
GS_SHADER_PARAM_VEC3,
GS_SHADER_PARAM_VEC4,
GS_SHADER_PARAM_INT2,
GS_SHADER_PARAM_INT3,
GS_SHADER_PARAM_INT4,
GS_SHADER_PARAM_MATRIX4X4,
GS_SHADER_PARAM_TEXTURE,
};

View file

@ -232,7 +232,7 @@ void matrix4_scale_i(struct matrix4 *dst, const struct vec3 *v,
bool matrix4_inv(struct matrix4 *dst, const struct matrix4 *m)
{
struct vec4 *dstv = (struct vec4 *)dst;
float det = matrix4_determinant(m);
float det;
float m3x3[9];
int i, j, sign;

View file

@ -28,6 +28,12 @@ enum gs_shader_param_type get_shader_param_type(const char *type)
return GS_SHADER_PARAM_VEC3;
else if (strcmp(type, "float4") == 0)
return GS_SHADER_PARAM_VEC4;
else if (strcmp(type, "int2") == 0)
return GS_SHADER_PARAM_INT2;
else if (strcmp(type, "int3") == 0)
return GS_SHADER_PARAM_INT3;
else if (strcmp(type, "int4") == 0)
return GS_SHADER_PARAM_INT4;
else if (astrcmp_n(type, "texture", 7) == 0)
return GS_SHADER_PARAM_TEXTURE;
else if (strcmp(type, "float4x4") == 0)

View file

@ -99,6 +99,9 @@ bool gs_texrender_begin(gs_texrender_t *texrender, uint32_t cx, uint32_t cy)
if (!texrender_resetbuffer(texrender, cx, cy))
return false;
if (!texrender->target)
return false;
gs_viewport_push();
gs_projection_push();
gs_matrix_push();

View file

@ -44,6 +44,9 @@ enum video_format video_format_from_fourcc(uint32_t fourcc)
case MAKE_FOURCC('Y','V','Y','U'):
return VIDEO_FORMAT_YVYU;
case MAKE_FOURCC('Y','8','0','0'):
return VIDEO_FORMAT_Y800;
}
return VIDEO_FORMAT_NONE;

View file

@ -66,6 +66,13 @@ void video_frame_init(struct video_frame *frame, enum video_format format,
frame->linesize[1] = width;
break;
case VIDEO_FORMAT_Y800:
size = width * height;
ALIGN_SIZE(size, alignment);
frame->data[0] = bmalloc(size);
frame->linesize[0] = width;
break;
case VIDEO_FORMAT_YVYU:
case VIDEO_FORMAT_YUY2:
case VIDEO_FORMAT_UYVY:
@ -115,6 +122,7 @@ void video_frame_copy(struct video_frame *dst, const struct video_frame *src,
memcpy(dst->data[1], src->data[1], src->linesize[1] * cy / 2);
break;
case VIDEO_FORMAT_Y800:
case VIDEO_FORMAT_YVYU:
case VIDEO_FORMAT_YUY2:
case VIDEO_FORMAT_UYVY:

View file

@ -46,6 +46,7 @@ enum video_format {
VIDEO_FORMAT_RGBA,
VIDEO_FORMAT_BGRA,
VIDEO_FORMAT_BGRX,
VIDEO_FORMAT_Y800, /* grayscale */
/* planar 4:4:4 */
VIDEO_FORMAT_I444,
@ -97,6 +98,7 @@ static inline bool format_is_yuv(enum video_format format)
case VIDEO_FORMAT_RGBA:
case VIDEO_FORMAT_BGRA:
case VIDEO_FORMAT_BGRX:
case VIDEO_FORMAT_Y800:
return false;
}
@ -115,6 +117,7 @@ static inline const char *get_video_format_name(enum video_format format)
case VIDEO_FORMAT_BGRA: return "BGRA";
case VIDEO_FORMAT_BGRX: return "BGRX";
case VIDEO_FORMAT_I444: return "I444";
case VIDEO_FORMAT_Y800: return "Y800";
case VIDEO_FORMAT_NONE:;
}

View file

@ -38,6 +38,7 @@ static inline enum AVPixelFormat get_ffmpeg_video_format(
case VIDEO_FORMAT_RGBA: return AV_PIX_FMT_RGBA;
case VIDEO_FORMAT_BGRA: return AV_PIX_FMT_BGRA;
case VIDEO_FORMAT_BGRX: return AV_PIX_FMT_BGRA;
case VIDEO_FORMAT_Y800: return AV_PIX_FMT_GRAY8;
case VIDEO_FORMAT_I444: return AV_PIX_FMT_YUV444P;
}

View file

@ -221,3 +221,58 @@ size_t obs_parse_avc_header(uint8_t **header, const uint8_t *data, size_t size)
*header = output.bytes.array;
return output.bytes.num;
}
void obs_extract_avc_headers(const uint8_t *packet, size_t size,
uint8_t **new_packet_data, size_t *new_packet_size,
uint8_t **header_data, size_t *header_size,
uint8_t **sei_data, size_t *sei_size)
{
DARRAY(uint8_t) new_packet;
DARRAY(uint8_t) header;
DARRAY(uint8_t) sei;
const uint8_t *nal_start, *nal_end, *nal_codestart;
const uint8_t *end = packet + size;
int type;
da_init(new_packet);
da_init(header);
da_init(sei);
nal_start = obs_avc_find_startcode(packet, end);
nal_end = NULL;
while (nal_end != end) {
nal_codestart = nal_start;
while (nal_start < end && !*(nal_start++));
if (nal_start == end)
break;
type = nal_start[0] & 0x1F;
nal_end = obs_avc_find_startcode(nal_start, end);
if (!nal_end)
nal_end = end;
if (type == OBS_NAL_SPS || type == OBS_NAL_PPS) {
da_push_back_array(header, nal_codestart,
nal_end - nal_codestart);
} else if (type == OBS_NAL_SEI) {
da_push_back_array(sei, nal_codestart,
nal_end - nal_codestart);
} else {
da_push_back_array(new_packet, nal_codestart,
nal_end - nal_codestart);
}
nal_start = nal_end;
}
*new_packet_data = new_packet.array;
*new_packet_size = new_packet.num;
*header_data = header.array;
*header_size = header.num;
*sei_data = sei.array;
*sei_size = sei.num;
}

View file

@ -55,6 +55,10 @@ EXPORT void obs_parse_avc_packet(struct encoder_packet *avc_packet,
const struct encoder_packet *src);
EXPORT size_t obs_parse_avc_header(uint8_t **header, const uint8_t *data,
size_t size);
EXPORT void obs_extract_avc_headers(const uint8_t *packet, size_t size,
uint8_t **new_packet_data, size_t *new_packet_size,
uint8_t **header_data, size_t *header_size,
uint8_t **sei_data, size_t *sei_size);
#ifdef __cplusplus
}

View file

@ -34,7 +34,7 @@
*
* Reset to zero each major version
*/
#define LIBOBS_API_MINOR_VER 13
#define LIBOBS_API_MINOR_VER 14
/*
* Increment if backward-compatible bug fix

View file

@ -54,8 +54,8 @@ static bool init_encoder(struct obs_encoder *encoder, const char *name,
return false;
if (pthread_mutexattr_settype(&attr, PTHREAD_MUTEX_RECURSIVE) != 0)
return false;
if (!obs_context_data_init(&encoder->context, settings, name,
hotkey_data, false))
if (!obs_context_data_init(&encoder->context, OBS_OBJ_TYPE_ENCODER,
settings, name, hotkey_data, false))
return false;
if (pthread_mutex_init(&encoder->init_mutex, &attr) != 0)
return false;
@ -731,7 +731,7 @@ static void send_first_video_packet(struct obs_encoder *encoder,
da_init(data);
if (!get_sei(encoder, &sei, &size)) {
if (!get_sei(encoder, &sei, &size) || !sei || !size) {
cb->new_packet(cb->param, packet);
cb->sent_first_packet = true;
return;
@ -1150,3 +1150,9 @@ const char *obs_encoder_get_id(const obs_encoder_t *encoder)
return obs_encoder_valid(encoder, "obs_encoder_get_id")
? encoder->info.id : NULL;
}
uint32_t obs_get_encoder_caps(const char *encoder_id)
{
struct obs_encoder_info *info = find_encoder(encoder_id);
return info ? info->caps : 0;
}

View file

@ -29,6 +29,8 @@
extern "C" {
#endif
#define OBS_ENCODER_CAP_DEPRECATED (1<<0)
/** Specifies the encoder type */
enum obs_encoder_type {
OBS_ENCODER_AUDIO, /**< The encoder provides an audio codec */
@ -227,6 +229,8 @@ struct obs_encoder_info {
void *type_data;
void (*free_type_data)(void *type_data);
uint32_t caps;
};
EXPORT void obs_register_encoder_s(const struct obs_encoder_info *info,

View file

@ -237,6 +237,7 @@ struct obs_core_video {
gs_effect_t *bicubic_effect;
gs_effect_t *lanczos_effect;
gs_effect_t *bilinear_lowres_effect;
gs_effect_t *premultiplied_alpha_effect;
gs_stagesurf_t *mapped_surface;
int cur_texture;
@ -262,6 +263,15 @@ struct obs_core_video {
enum obs_scale_type scale_type;
gs_texture_t *transparent_texture;
gs_effect_t *deinterlace_discard_effect;
gs_effect_t *deinterlace_discard_2x_effect;
gs_effect_t *deinterlace_linear_effect;
gs_effect_t *deinterlace_linear_2x_effect;
gs_effect_t *deinterlace_blend_effect;
gs_effect_t *deinterlace_blend_2x_effect;
gs_effect_t *deinterlace_yadif_effect;
gs_effect_t *deinterlace_yadif_2x_effect;
};
struct obs_core_audio {
@ -371,6 +381,8 @@ extern struct obs_core *obs;
extern void *obs_video_thread(void *param);
extern gs_effect_t *obs_load_effect(gs_effect_t **effect, const char *file);
extern bool audio_callback(void *param,
uint64_t start_ts_in, uint64_t end_ts_in, uint64_t *out_ts,
uint32_t mixers, struct audio_output_data *mixes);
@ -385,6 +397,7 @@ struct obs_context_data {
obs_data_t *settings;
signal_handler_t *signals;
proc_handler_t *procs;
enum obs_obj_type type;
DARRAY(obs_hotkey_id) hotkeys;
DARRAY(obs_hotkey_pair_id) hotkey_pairs;
@ -402,6 +415,7 @@ struct obs_context_data {
extern bool obs_context_data_init(
struct obs_context_data *context,
enum obs_obj_type type,
obs_data_t *settings,
const char *name,
obs_data_t *hotkey_data,
@ -564,7 +578,7 @@ struct obs_source {
/* async video data */
gs_texture_t *async_texture;
gs_texrender_t *async_convert_texrender;
gs_texrender_t *async_texrender;
struct obs_source_frame *cur_async_frame;
bool async_gpu_conversion;
enum video_format async_format;
@ -587,6 +601,18 @@ struct obs_source {
uint32_t async_convert_width;
uint32_t async_convert_height;
/* async video deinterlacing */
uint64_t deinterlace_offset;
uint64_t deinterlace_frame_ts;
gs_effect_t *deinterlace_effect;
struct obs_source_frame *prev_async_frame;
gs_texture_t *async_prev_texture;
gs_texrender_t *async_prev_texrender;
uint32_t deinterlace_half_duration;
enum obs_deinterlace_mode deinterlace_mode;
bool deinterlace_top_first;
bool deinterlace_rendered;
/* filters */
struct obs_source *filter_parent;
struct obs_source *filter_target;
@ -671,6 +697,30 @@ static inline void obs_source_dosignal(struct obs_source *source,
&data);
}
/* maximum timestamp variance in nanoseconds */
#define MAX_TS_VAR 2000000000ULL
static inline bool frame_out_of_bounds(const obs_source_t *source, uint64_t ts)
{
if (ts < source->last_frame_ts)
return ((source->last_frame_ts - ts) > MAX_TS_VAR);
else
return ((ts - source->last_frame_ts) > MAX_TS_VAR);
}
static inline enum gs_color_format convert_video_format(
enum video_format format)
{
if (format == VIDEO_FORMAT_RGBA)
return GS_RGBA;
else if (format == VIDEO_FORMAT_BGRA)
return GS_BGRA;
else if (format == VIDEO_FORMAT_Y800)
return GS_R8;
return GS_BGRX;
}
extern void obs_source_activate(obs_source_t *source, enum view_type type);
extern void obs_source_deactivate(obs_source_t *source, enum view_type type);
extern void obs_source_video_tick(obs_source_t *source, float seconds);
@ -682,6 +732,22 @@ extern void obs_source_audio_render(obs_source_t *source, uint32_t mixers,
extern void add_alignment(struct vec2 *v, uint32_t align, int cx, int cy);
extern struct obs_source_frame *filter_async_video(obs_source_t *source,
struct obs_source_frame *in);
extern bool update_async_texture(struct obs_source *source,
const struct obs_source_frame *frame,
gs_texture_t *tex, gs_texrender_t *texrender);
extern bool set_async_texture_size(struct obs_source *source,
const struct obs_source_frame *frame);
extern void remove_async_frame(obs_source_t *source,
struct obs_source_frame *frame);
extern void set_deinterlace_texture_size(obs_source_t *source);
extern void deinterlace_process_last_frame(obs_source_t *source,
uint64_t sys_time);
extern void deinterlace_update_async_video(obs_source_t *source);
extern void deinterlace_render(obs_source_t *s);
/* ------------------------------------------------------------------------- */
/* outputs */

View file

@ -53,8 +53,8 @@ static const char *output_signals[] = {
static bool init_output_handlers(struct obs_output *output, const char *name,
obs_data_t *settings, obs_data_t *hotkey_data)
{
if (!obs_context_data_init(&output->context, settings, name,
hotkey_data, false))
if (!obs_context_data_init(&output->context, OBS_OBJ_TYPE_OUTPUT,
settings, name, hotkey_data, false))
return false;
signal_handler_add_array(output->context.signals, output_signals);
@ -994,14 +994,15 @@ static bool prune_interleaved_packets(struct obs_output *output)
int prune_start = prune_premature_packets(output);
#if DEBUG_STARTING_PACKETS == 1
blog(LOG_DEBUG, "--------- Pruning! ---------");
blog(LOG_DEBUG, "--------- Pruning! %d ---------", prune_start);
for (size_t i = 0; i < output->interleaved_packets.num; i++) {
struct encoder_packet *packet =
&output->interleaved_packets.array[i];
blog(LOG_DEBUG, "packet: %s %d, ts: %lld",
blog(LOG_DEBUG, "packet: %s %d, ts: %lld, pruned = %s",
packet->type == OBS_ENCODER_AUDIO ?
"audio" : "video", (int)packet->track_idx,
packet->dts_usec);
packet->dts_usec,
(int)i < prune_start ? "true" : "false");
}
#endif
@ -1039,6 +1040,26 @@ static int find_first_packet_type_idx(struct obs_output *output,
return -1;
}
static int find_last_packet_type_idx(struct obs_output *output,
enum obs_encoder_type type, size_t audio_idx)
{
for (size_t i = output->interleaved_packets.num; i > 0; i--) {
struct encoder_packet *packet =
&output->interleaved_packets.array[i - 1];
if (packet->type == type) {
if (type == OBS_ENCODER_AUDIO &&
packet->track_idx != audio_idx) {
continue;
}
return (int)(i - 1);
}
}
return -1;
}
static inline struct encoder_packet *find_first_packet_type(
struct obs_output *output, enum obs_encoder_type type,
size_t audio_idx)
@ -1047,14 +1068,20 @@ static inline struct encoder_packet *find_first_packet_type(
return (idx != -1) ? &output->interleaved_packets.array[idx] : NULL;
}
static bool initialize_interleaved_packets(struct obs_output *output)
static inline struct encoder_packet *find_last_packet_type(
struct obs_output *output, enum obs_encoder_type type,
size_t audio_idx)
{
struct encoder_packet *video;
struct encoder_packet *audio[MAX_AUDIO_MIXES];
size_t audio_mixes = num_audio_mixes(output);
int idx = find_last_packet_type_idx(output, type, audio_idx);
return (idx != -1) ? &output->interleaved_packets.array[idx] : NULL;
}
video = find_first_packet_type(output, OBS_ENCODER_VIDEO, 0);
if (!video)
static bool get_audio_and_video_packets(struct obs_output *output,
struct encoder_packet **video,
struct encoder_packet **audio, size_t audio_mixes)
{
*video = find_first_packet_type(output, OBS_ENCODER_VIDEO, 0);
if (!*video)
output->received_video = false;
for (size_t i = 0; i < audio_mixes; i++) {
@ -1065,10 +1092,45 @@ static bool initialize_interleaved_packets(struct obs_output *output)
}
}
if (!video) {
if (!*video) {
return false;
}
return true;
}
static bool initialize_interleaved_packets(struct obs_output *output)
{
struct encoder_packet *video;
struct encoder_packet *audio[MAX_AUDIO_MIXES];
struct encoder_packet *last_audio[MAX_AUDIO_MIXES];
size_t audio_mixes = num_audio_mixes(output);
size_t start_idx;
if (!get_audio_and_video_packets(output, &video, audio, audio_mixes))
return false;
for (size_t i = 0; i < audio_mixes; i++)
last_audio[i] = find_last_packet_type(output, OBS_ENCODER_AUDIO,
i);
/* ensure that there is audio past the first video packet */
for (size_t i = 0; i < audio_mixes; i++) {
if (last_audio[i]->dts_usec < video->dts_usec) {
output->received_audio = false;
return false;
}
}
/* clear out excess starting audio if it hasn't been already */
start_idx = get_interleaved_start_idx(output);
if (start_idx) {
discard_to_idx(output, start_idx);
if (!get_audio_and_video_packets(output, &video, audio,
audio_mixes))
return false;
}
/* get new offsets */
output->video_offset = video->dts;
for (size_t i = 0; i < audio_mixes; i++)

View file

@ -281,18 +281,38 @@ static void calculate_bounds_data(struct obs_scene_item *item,
(int)-width_diff, (int)-height_diff);
}
static inline uint32_t calc_cx(const struct obs_scene_item *item,
uint32_t width)
{
uint32_t crop_cx = item->crop.left + item->crop.right;
return (crop_cx > width) ? 2 : (width - crop_cx);
}
static inline uint32_t calc_cy(const struct obs_scene_item *item,
uint32_t height)
{
uint32_t crop_cy = item->crop.top + item->crop.bottom;
return (crop_cy > height) ? 2 : (height - crop_cy);
}
static void update_item_transform(struct obs_scene_item *item)
{
uint32_t width = obs_source_get_width(item->source);
uint32_t height = obs_source_get_height(item->source);
uint32_t cx = width;
uint32_t cy = height;
uint32_t cx = calc_cx(item, width);
uint32_t cy = calc_cy(item, height);
struct vec2 base_origin;
struct vec2 origin;
struct vec2 scale = item->scale;
struct calldata params;
uint8_t stack[128];
if (os_atomic_load_long(&item->defer_update) > 0)
return;
width = cx;
height = cy;
vec2_zero(&base_origin);
vec2_zero(&origin);
@ -358,6 +378,70 @@ static inline bool source_size_changed(struct obs_scene_item *item)
return item->last_width != width || item->last_height != height;
}
static inline bool crop_enabled(const struct obs_sceneitem_crop *crop)
{
return crop->left || crop->right || crop->top || crop->bottom;
}
static inline void render_item(struct obs_scene_item *item)
{
if (item->crop_render) {
uint32_t width = obs_source_get_width(item->source);
uint32_t height = obs_source_get_height(item->source);
uint32_t cx = calc_cx(item, width);
uint32_t cy = calc_cy(item, height);
if (cx && cy && gs_texrender_begin(item->crop_render, cx, cy)) {
float cx_scale = (float)width / (float)cx;
float cy_scale = (float)height / (float)cy;
struct vec4 clear_color;
vec4_zero(&clear_color);
gs_clear(GS_CLEAR_COLOR, &clear_color, 0.0f, 0);
gs_ortho(0.0f, (float)width, 0.0f, (float)height,
-100.0f, 100.0f);
gs_matrix_scale3f(cx_scale, cy_scale, 1.0f);
gs_matrix_translate3f(
-(float)item->crop.left,
-(float)item->crop.top,
0.0f);
obs_source_video_render(item->source);
gs_texrender_end(item->crop_render);
}
}
gs_matrix_push();
gs_matrix_mul(&item->draw_transform);
if (item->crop_render) {
gs_texture_t *tex = gs_texrender_get_texture(item->crop_render);
while (gs_effect_loop(obs->video.default_effect, "Draw"))
obs_source_draw(tex, 0, 0, 0, 0, 0);
} else {
obs_source_video_render(item->source);
}
gs_matrix_pop();
}
static void scene_video_tick(void *data, float seconds)
{
struct obs_scene *scene = data;
struct obs_scene_item *item;
video_lock(scene);
item = scene->first_item;
while (item) {
if (item->crop_render)
gs_texrender_reset(item->crop_render);
item = item->next;
}
video_unlock(scene);
UNUSED_PARAMETER(seconds);
}
static void scene_video_render(void *data, gs_effect_t *effect)
{
DARRAY(struct obs_scene_item*) remove_items;
@ -385,12 +469,8 @@ static void scene_video_render(void *data, gs_effect_t *effect)
if (source_size_changed(item))
update_item_transform(item);
if (item->user_visible) {
gs_matrix_push();
gs_matrix_mul(&item->draw_transform);
obs_source_video_render(item->source);
gs_matrix_pop();
}
if (item->user_visible)
render_item(item);
item = item->next;
}
@ -468,6 +548,23 @@ static void scene_load_item(struct obs_scene *scene, obs_data_t *item_data)
(uint32_t)obs_data_get_int(item_data, "bounds_align");
obs_data_get_vec2(item_data, "bounds", &item->bounds);
item->crop.left = (uint32_t)obs_data_get_int(item_data, "crop_left");
item->crop.top = (uint32_t)obs_data_get_int(item_data, "crop_top");
item->crop.right = (uint32_t)obs_data_get_int(item_data, "crop_right");
item->crop.bottom = (uint32_t)obs_data_get_int(item_data, "crop_bottom");
if (item->crop_render && !crop_enabled(&item->crop)) {
obs_enter_graphics();
gs_texrender_destroy(item->crop_render);
item->crop_render = NULL;
obs_leave_graphics();
} else if (!item->crop_render && crop_enabled(&item->crop)) {
obs_enter_graphics();
item->crop_render = gs_texrender_create(GS_RGBA, GS_ZS_NONE);
obs_leave_graphics();
}
obs_source_release(source);
update_item_transform(item);
@ -508,6 +605,10 @@ static void scene_save_item(obs_data_array_t *array,
obs_data_set_int (item_data, "bounds_type", (int)item->bounds_type);
obs_data_set_int (item_data, "bounds_align", (int)item->bounds_align);
obs_data_set_vec2 (item_data, "bounds", &item->bounds);
obs_data_set_int (item_data, "crop_left", (int)item->crop.left);
obs_data_set_int (item_data, "crop_top", (int)item->crop.top);
obs_data_set_int (item_data, "crop_right", (int)item->crop.right);
obs_data_set_int (item_data, "crop_bottom", (int)item->crop.bottom);
obs_data_array_push_back(array, item_data);
obs_data_release(item_data);
@ -742,6 +843,7 @@ const struct obs_source_info scene_info =
.get_name = scene_getname,
.create = scene_create,
.destroy = scene_destroy,
.video_tick = scene_video_tick,
.video_render = scene_video_render,
.audio_render = scene_audio_render,
.get_width = scene_getwidth,
@ -869,6 +971,8 @@ obs_scene_t *obs_scene_duplicate(obs_scene_t *scene, const char *name,
new_item->bounds_align = item->bounds_align;
new_item->bounds = item->bounds;
obs_sceneitem_set_crop(new_item, &item->crop);
obs_source_release(source);
}
}
@ -1123,6 +1227,11 @@ obs_sceneitem_t *obs_scene_add(obs_scene_t *scene, obs_source_t *source)
static void obs_sceneitem_destroy(obs_sceneitem_t *item)
{
if (item) {
if (item->crop_render) {
obs_enter_graphics();
gs_texrender_destroy(item->crop_render);
obs_leave_graphics();
}
obs_hotkey_pair_unregister(item->toggle_visibility);
pthread_mutex_destroy(&item->actions_mutex);
if (item->source)
@ -1573,3 +1682,75 @@ void obs_scene_atomic_update(obs_scene_t *scene,
full_unlock(scene);
obs_scene_release(scene);
}
static inline bool crop_equal(const struct obs_sceneitem_crop *crop1,
const struct obs_sceneitem_crop *crop2)
{
return crop1->left == crop2->left &&
crop1->right == crop2->right &&
crop1->top == crop2->top &&
crop1->bottom == crop2->bottom;
}
void obs_sceneitem_set_crop(obs_sceneitem_t *item,
const struct obs_sceneitem_crop *crop)
{
bool now_enabled;
if (!obs_ptr_valid(item, "obs_sceneitem_set_crop"))
return;
if (!obs_ptr_valid(crop, "obs_sceneitem_set_crop"))
return;
if (crop_equal(crop, &item->crop))
return;
now_enabled = crop_enabled(crop);
obs_enter_graphics();
if (!now_enabled) {
gs_texrender_destroy(item->crop_render);
item->crop_render = NULL;
} else if (!item->crop_render) {
item->crop_render = gs_texrender_create(GS_RGBA, GS_ZS_NONE);
}
memcpy(&item->crop, crop, sizeof(*crop));
if (item->crop.left < 0) item->crop.left = 0;
if (item->crop.right < 0) item->crop.right = 0;
if (item->crop.top < 0) item->crop.top = 0;
if (item->crop.bottom < 0) item->crop.bottom = 0;
obs_leave_graphics();
update_item_transform(item);
}
void obs_sceneitem_get_crop(const obs_sceneitem_t *item,
struct obs_sceneitem_crop *crop)
{
if (!obs_ptr_valid(item, "obs_sceneitem_get_crop"))
return;
if (!obs_ptr_valid(crop, "obs_sceneitem_get_crop"))
return;
memcpy(crop, &item->crop, sizeof(*crop));
}
void obs_sceneitem_defer_update_begin(obs_sceneitem_t *item)
{
if (!obs_ptr_valid(item, "obs_sceneitem_defer_update_begin"))
return;
os_atomic_inc_long(&item->defer_update);
}
void obs_sceneitem_defer_update_end(obs_sceneitem_t *item)
{
if (!obs_ptr_valid(item, "obs_sceneitem_defer_update_end"))
return;
if (os_atomic_dec_long(&item->defer_update) == 0)
update_item_transform(item);
}

View file

@ -35,10 +35,14 @@ struct obs_scene_item {
struct obs_scene *parent;
struct obs_source *source;
volatile long active_refs;
volatile long defer_update;
bool user_visible;
bool visible;
bool selected;
gs_texrender_t *crop_render;
struct obs_sceneitem_crop crop;
struct vec2 pos;
struct vec2 scale;
float rot;

View file

@ -33,8 +33,9 @@ const char *obs_service_get_display_name(const char *id)
return (info != NULL) ? info->get_name(info->type_data) : NULL;
}
obs_service_t *obs_service_create(const char *id, const char *name,
obs_data_t *settings, obs_data_t *hotkey_data)
static obs_service_t *obs_service_create_internal(const char *id,
const char *name, obs_data_t *settings, obs_data_t *hotkey_data,
bool private)
{
const struct obs_service_info *info = find_service(id);
struct obs_service *service;
@ -46,23 +47,14 @@ obs_service_t *obs_service_create(const char *id, const char *name,
service = bzalloc(sizeof(struct obs_service));
if (!obs_context_data_init(&service->context, settings, name,
hotkey_data, false)) {
if (!obs_context_data_init(&service->context, OBS_OBJ_TYPE_SERVICE,
settings, name, hotkey_data, private)) {
bfree(service);
return NULL;
}
if (!info) {
blog(LOG_ERROR, "Service ID '%s' not found", id);
service->info.id = bstrdup(id);
service->owns_info_id = true;
} else {
service->info = *info;
}
if (info)
service->context.data = service->info.create(
service->info = *info;
service->context.data = service->info.create(
service->context.settings, service);
if (!service->context.data)
blog(LOG_ERROR, "Failed to create service '%s'!", name);
@ -74,10 +66,24 @@ obs_service_t *obs_service_create(const char *id, const char *name,
&obs->data.services_mutex,
&obs->data.first_service);
blog(LOG_INFO, "service '%s' (%s) created", name, id);
blog(private ? LOG_DEBUG : LOG_INFO, "service '%s' (%s) created",
name, id);
return service;
}
obs_service_t *obs_service_create(const char *id,
const char *name, obs_data_t *settings, obs_data_t *hotkey_data)
{
return obs_service_create_internal(id, name, settings, hotkey_data,
false);
}
obs_service_t *obs_service_create_private(const char *id,
const char *name, obs_data_t *settings)
{
return obs_service_create_internal(id, name, settings, NULL, true);
}
static void actually_destroy_service(struct obs_service *service)
{
if (service->context.data)
@ -86,7 +92,8 @@ static void actually_destroy_service(struct obs_service *service)
if (service->output)
service->output->service = NULL;
blog(LOG_INFO, "service '%s' destroyed", service->context.name);
blog(service->context.private ? LOG_DEBUG : LOG_INFO,
"service '%s' destroyed", service->context.name);
obs_context_data_free(&service->context);
if (service->owns_info_id)

View file

@ -0,0 +1,444 @@
/******************************************************************************
Copyright (C) 2016 by Hugh Bailey <obs.jim@gmail.com>
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 2 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
******************************************************************************/
#include "obs-internal.h"
static bool ready_deinterlace_frames(obs_source_t *source, uint64_t sys_time)
{
struct obs_source_frame *next_frame = source->async_frames.array[0];
struct obs_source_frame *prev_frame = NULL;
struct obs_source_frame *frame = NULL;
uint64_t sys_offset = sys_time - source->last_sys_timestamp;
uint64_t frame_time = next_frame->timestamp;
uint64_t frame_offset = 0;
size_t idx = 1;
if ((source->flags & OBS_SOURCE_FLAG_UNBUFFERED) != 0) {
while (source->async_frames.num > 2) {
da_erase(source->async_frames, 0);
remove_async_frame(source, next_frame);
next_frame = source->async_frames.array[0];
}
if (source->async_frames.num == 2)
source->async_frames.array[0]->prev_frame = true;
source->deinterlace_offset = 0;
return true;
}
/* account for timestamp invalidation */
if (frame_out_of_bounds(source, frame_time)) {
source->last_frame_ts = next_frame->timestamp;
source->deinterlace_offset = 0;
return true;
} else {
frame_offset = frame_time - source->last_frame_ts;
source->last_frame_ts += sys_offset;
}
while (source->last_frame_ts > next_frame->timestamp) {
/* this tries to reduce the needless frame duplication, also
* helps smooth out async rendering to frame boundaries. In
* other words, tries to keep the framerate as smooth as
* possible */
if ((source->last_frame_ts - next_frame->timestamp) < 2000000)
break;
if (prev_frame) {
da_erase(source->async_frames, 0);
remove_async_frame(source, prev_frame);
}
if (source->async_frames.num <= 2) {
bool exit = true;
if (prev_frame) {
prev_frame->prev_frame = true;
} else if (!frame && source->async_frames.num == 2) {
exit = false;
}
if (exit) {
source->deinterlace_offset = 0;
return true;
}
}
if (frame)
idx = 2;
else
idx = 1;
prev_frame = frame;
frame = next_frame;
next_frame = source->async_frames.array[idx];
/* more timestamp checking and compensating */
if ((next_frame->timestamp - frame_time) > MAX_TS_VAR) {
source->last_frame_ts =
next_frame->timestamp - frame_offset;
source->deinterlace_offset = 0;
}
frame_time = next_frame->timestamp;
frame_offset = frame_time - source->last_frame_ts;
}
if (prev_frame)
prev_frame->prev_frame = true;
return frame != NULL;
}
static inline bool first_frame(obs_source_t *s)
{
if (s->last_frame_ts)
return false;
if (s->async_frames.num >= 2)
s->async_frames.array[0]->prev_frame = true;
return true;
}
static inline uint64_t uint64_diff(uint64_t ts1, uint64_t ts2)
{
return (ts1 < ts2) ? (ts2 - ts1) : (ts1 - ts2);
}
static inline void deinterlace_get_closest_frames(obs_source_t *s,
uint64_t sys_time)
{
const struct video_output_info *info;
uint64_t half_interval;
if (!s->async_frames.num)
return;
info = video_output_get_info(obs->video.video);
half_interval = (uint64_t)info->fps_den * 500000000ULL /
(uint64_t)info->fps_num;
if (first_frame(s) || ready_deinterlace_frames(s, sys_time)) {
uint64_t offset;
s->prev_async_frame = NULL;
s->cur_async_frame = s->async_frames.array[0];
da_erase(s->async_frames, 0);
if (s->cur_async_frame->prev_frame) {
s->prev_async_frame = s->cur_async_frame;
s->cur_async_frame = s->async_frames.array[0];
da_erase(s->async_frames, 0);
s->deinterlace_half_duration = (uint32_t)
((s->cur_async_frame->timestamp -
s->prev_async_frame->timestamp) / 2);
} else {
s->deinterlace_half_duration = (uint32_t)
((s->cur_async_frame->timestamp -
s->deinterlace_frame_ts) / 2);
}
if (!s->last_frame_ts)
s->last_frame_ts = s->cur_async_frame->timestamp;
s->deinterlace_frame_ts = s->cur_async_frame->timestamp;
offset = obs->video.video_time - s->deinterlace_frame_ts;
if (!s->deinterlace_offset) {
s->deinterlace_offset = offset;
} else {
uint64_t offset_diff = uint64_diff(
s->deinterlace_offset, offset);
if (offset_diff > half_interval)
s->deinterlace_offset = offset;
}
}
}
void deinterlace_process_last_frame(obs_source_t *s, uint64_t sys_time)
{
if (s->prev_async_frame) {
remove_async_frame(s, s->prev_async_frame);
s->prev_async_frame = NULL;
}
if (s->cur_async_frame) {
remove_async_frame(s, s->cur_async_frame);
s->cur_async_frame = NULL;
}
deinterlace_get_closest_frames(s, sys_time);
}
void set_deinterlace_texture_size(obs_source_t *source)
{
if (source->async_gpu_conversion) {
source->async_prev_texrender =
gs_texrender_create(GS_BGRX, GS_ZS_NONE);
source->async_prev_texture = gs_texture_create(
source->async_convert_width,
source->async_convert_height,
source->async_texture_format,
1, NULL, GS_DYNAMIC);
} else {
enum gs_color_format format = convert_video_format(
source->async_format);
source->async_prev_texture = gs_texture_create(
source->async_width, source->async_height,
format, 1, NULL, GS_DYNAMIC);
}
}
static inline struct obs_source_frame *get_prev_frame(obs_source_t *source,
bool *updated)
{
struct obs_source_frame *frame = NULL;
pthread_mutex_lock(&source->async_mutex);
*updated = source->cur_async_frame != NULL;
frame = source->prev_async_frame;
source->prev_async_frame = NULL;
if (frame)
os_atomic_inc_long(&frame->refs);
pthread_mutex_unlock(&source->async_mutex);
return frame;
}
void deinterlace_update_async_video(obs_source_t *source)
{
struct obs_source_frame *frame;
bool updated;
if (source->deinterlace_rendered)
return;
frame = get_prev_frame(source, &updated);
source->deinterlace_rendered = true;
if (frame)
frame = filter_async_video(source, frame);
if (frame) {
if (set_async_texture_size(source, frame)) {
update_async_texture(source, frame,
source->async_prev_texture,
source->async_prev_texrender);
}
obs_source_release_frame(source, frame);
} else if (updated) { /* swap cur/prev if no previous texture */
gs_texture_t *prev_tex = source->async_prev_texture;
source->async_prev_texture = source->async_texture;
source->async_texture = prev_tex;
if (source->async_texrender) {
gs_texrender_t *prev = source->async_prev_texrender;
source->async_prev_texrender = source->async_texrender;
source->async_texrender = prev;
}
}
}
static inline gs_effect_t *get_effect(enum obs_deinterlace_mode mode)
{
switch (mode) {
case OBS_DEINTERLACE_MODE_DISABLE: return NULL;
case OBS_DEINTERLACE_MODE_DISCARD:
return obs_load_effect(&obs->video.deinterlace_discard_effect,
"deinterlace_discard.effect");
case OBS_DEINTERLACE_MODE_RETRO:
return obs_load_effect(&obs->video.deinterlace_discard_2x_effect,
"deinterlace_discard_2x.effect");
case OBS_DEINTERLACE_MODE_BLEND:
return obs_load_effect(&obs->video.deinterlace_blend_effect,
"deinterlace_blend.effect");
case OBS_DEINTERLACE_MODE_BLEND_2X:
return obs_load_effect(&obs->video.deinterlace_blend_2x_effect,
"deinterlace_blend_2x.effect");
case OBS_DEINTERLACE_MODE_LINEAR:
return obs_load_effect(&obs->video.deinterlace_linear_effect,
"deinterlace_linear.effect");
case OBS_DEINTERLACE_MODE_LINEAR_2X:
return obs_load_effect(&obs->video.deinterlace_linear_2x_effect,
"deinterlace_linear_2x.effect");
case OBS_DEINTERLACE_MODE_YADIF:
return obs_load_effect(&obs->video.deinterlace_yadif_effect,
"deinterlace_yadif.effect");
case OBS_DEINTERLACE_MODE_YADIF_2X:
return obs_load_effect(&obs->video.deinterlace_yadif_2x_effect,
"deinterlace_yadif_2x.effect");
}
return NULL;
}
#define TWOX_TOLERANCE 1000000
void deinterlace_render(obs_source_t *s)
{
gs_effect_t *effect = s->deinterlace_effect;
uint64_t frame2_ts;
gs_eparam_t *image = gs_effect_get_param_by_name(effect, "image");
gs_eparam_t *prev = gs_effect_get_param_by_name(effect,
"previous_image");
gs_eparam_t *field = gs_effect_get_param_by_name(effect, "field_order");
gs_eparam_t *frame2 = gs_effect_get_param_by_name(effect, "frame2");
gs_eparam_t *dimensions = gs_effect_get_param_by_name(effect,
"dimensions");
struct vec2 size = {(float)s->async_width, (float)s->async_height};
bool yuv = format_is_yuv(s->async_format);
bool limited_range = yuv && !s->async_full_range;
const char *tech = yuv ? "DrawMatrix" : "Draw";
gs_texture_t *cur_tex = s->async_texrender ?
gs_texrender_get_texture(s->async_texrender) :
s->async_texture;
gs_texture_t *prev_tex = s->async_prev_texrender ?
gs_texrender_get_texture(s->async_prev_texrender) :
s->async_prev_texture;
if (!cur_tex || !prev_tex || !s->async_width || !s->async_height)
return;
gs_effect_set_texture(image, cur_tex);
gs_effect_set_texture(prev, prev_tex);
gs_effect_set_int(field, s->deinterlace_top_first);
gs_effect_set_vec2(dimensions, &size);
if (yuv) {
gs_eparam_t *color_matrix = gs_effect_get_param_by_name(
effect, "color_matrix");
gs_effect_set_val(color_matrix, s->async_color_matrix,
sizeof(float) * 16);
}
if (limited_range) {
const size_t size = sizeof(float) * 3;
gs_eparam_t *color_range_min = gs_effect_get_param_by_name(
effect, "color_range_min");
gs_eparam_t *color_range_max = gs_effect_get_param_by_name(
effect, "color_range_max");
gs_effect_set_val(color_range_min, s->async_color_range_min,
size);
gs_effect_set_val(color_range_max, s->async_color_range_max,
size);
}
frame2_ts = s->deinterlace_frame_ts + s->deinterlace_offset +
s->deinterlace_half_duration - TWOX_TOLERANCE;
gs_effect_set_bool(frame2, obs->video.video_time >= frame2_ts);
while (gs_effect_loop(effect, tech))
gs_draw_sprite(NULL, s->async_flip ? GS_FLIP_V : 0,
s->async_width, s->async_height);
}
static void enable_deinterlacing(obs_source_t *source,
enum obs_deinterlace_mode mode)
{
obs_enter_graphics();
if (source->async_format != VIDEO_FORMAT_NONE &&
source->async_width != 0 &&
source->async_height != 0)
set_deinterlace_texture_size(source);
source->deinterlace_mode = mode;
source->deinterlace_effect = get_effect(mode);
pthread_mutex_lock(&source->async_mutex);
if (source->prev_async_frame) {
remove_async_frame(source, source->prev_async_frame);
source->prev_async_frame = NULL;
}
pthread_mutex_unlock(&source->async_mutex);
obs_leave_graphics();
}
static void disable_deinterlacing(obs_source_t *source)
{
obs_enter_graphics();
gs_texture_destroy(source->async_prev_texture);
gs_texrender_destroy(source->async_prev_texrender);
source->deinterlace_mode = OBS_DEINTERLACE_MODE_DISABLE;
source->async_prev_texture = NULL;
source->async_prev_texrender = NULL;
obs_leave_graphics();
}
void obs_source_set_deinterlace_mode(obs_source_t *source,
enum obs_deinterlace_mode mode)
{
if (!obs_source_valid(source, "obs_source_set_deinterlace_mode"))
return;
if (source->deinterlace_mode == mode)
return;
if (source->deinterlace_mode == OBS_DEINTERLACE_MODE_DISABLE) {
enable_deinterlacing(source, mode);
} else if (mode == OBS_DEINTERLACE_MODE_DISABLE) {
disable_deinterlacing(source);
} else {
obs_enter_graphics();
source->deinterlace_mode = mode;
source->deinterlace_effect = get_effect(mode);
obs_leave_graphics();
}
}
enum obs_deinterlace_mode obs_source_get_deinterlace_mode(
const obs_source_t *source)
{
return obs_source_valid(source, "obs_source_set_deinterlace_mode") ?
source->deinterlace_mode : OBS_DEINTERLACE_MODE_DISABLE;
}
void obs_source_set_deinterlace_field_order(obs_source_t *source,
enum obs_deinterlace_field_order field_order)
{
if (!obs_source_valid(source, "obs_source_set_deinterlace_field_order"))
return;
source->deinterlace_top_first =
field_order == OBS_DEINTERLACE_FIELD_ORDER_TOP;
}
enum obs_deinterlace_field_order obs_source_get_deinterlace_field_order(
const obs_source_t *source)
{
if (!obs_source_valid(source, "obs_source_set_deinterlace_field_order"))
return OBS_DEINTERLACE_FIELD_ORDER_TOP;
return source->deinterlace_top_first
? OBS_DEINTERLACE_FIELD_ORDER_TOP
: OBS_DEINTERLACE_FIELD_ORDER_BOTTOM;
}

View file

@ -824,7 +824,8 @@ bool obs_transition_audio_render(obs_source_t *transition,
if (min_ts)
copy_transition_state(transition, &state);
} else if (transition->transitioning_audio) {
} else if (!transition->transitioning_video &&
transition->transitioning_audio) {
stopped = stop_audio(transition);
}

View file

@ -34,6 +34,11 @@ static inline bool data_valid(const struct obs_source *source, const char *f)
return obs_source_valid(source, f) && source->context.data;
}
static inline bool deinterlacing_enabled(const struct obs_source *source)
{
return source->deinterlace_mode != OBS_DEINTERLACE_MODE_DISABLE;
}
const struct obs_source_info *get_source_info(const char *id)
{
for (size_t i = 0; i < obs->source_types.num; i++) {
@ -79,8 +84,8 @@ bool obs_source_init_context(struct obs_source *source,
obs_data_t *settings, const char *name, obs_data_t *hotkey_data,
bool private)
{
if (!obs_context_data_init(&source->context, settings, name,
hotkey_data, private))
if (!obs_context_data_init(&source->context, OBS_OBJ_TYPE_SOURCE,
settings, name, hotkey_data, private))
return false;
return signal_handler_add_array(source->context.signals,
@ -109,6 +114,12 @@ static void allocate_audio_output_buffer(struct obs_source *source)
}
}
static inline bool is_async_video_source(const struct obs_source *source)
{
return (source->info.output_flags & OBS_SOURCE_ASYNC_VIDEO) ==
OBS_SOURCE_ASYNC_VIDEO;
}
static inline bool is_audio_source(const struct obs_source *source)
{
return source->info.output_flags & OBS_SOURCE_AUDIO;
@ -119,6 +130,8 @@ static inline bool is_composite_source(const struct obs_source *source)
return source->info.output_flags & OBS_SOURCE_COMPOSITE;
}
extern char *find_libobs_data_file(const char *file);
/* internal initialization */
bool obs_source_init(struct obs_source *source)
{
@ -153,6 +166,16 @@ bool obs_source_init(struct obs_source *source)
if (is_audio_source(source) || is_composite_source(source))
allocate_audio_output_buffer(source);
if (source->info.type == OBS_SOURCE_TYPE_TRANSITION) {
if (!obs_transition_init(source))
return false;
}
source->control = bzalloc(sizeof(obs_weak_source_t));
source->deinterlace_top_first = true;
source->control->source = source;
source->audio_mixers = 0xF;
if (is_audio_source(source)) {
pthread_mutex_lock(&obs->data.audio_sources_mutex);
@ -167,15 +190,6 @@ bool obs_source_init(struct obs_source *source)
pthread_mutex_unlock(&obs->data.audio_sources_mutex);
}
if (source->info.type == OBS_SOURCE_TYPE_TRANSITION) {
if (!obs_transition_init(source))
return false;
}
source->control = bzalloc(sizeof(obs_weak_source_t));
source->control->source = source;
source->audio_mixers = 0xF;
obs_context_data_insert(&source->context,
&obs->data.sources_mutex,
&obs->data.first_source);
@ -290,6 +304,13 @@ static obs_source_t *obs_source_create_internal(const char *id,
source->owns_info_id = true;
} else {
source->info = *info;
/* Always mark filters as private so they aren't found by
* source enum/search functions.
*
* XXX: Fix design flaws with filters */
if (info->type == OBS_SOURCE_TYPE_FILTER)
private = true;
}
source->mute_unmute_key = OBS_INVALID_HOTKEY_PAIR_ID;
@ -481,10 +502,14 @@ void obs_source_destroy(struct obs_source *source)
obs_source_frame_decref(source->async_cache.array[i].frame);
gs_enter_context(obs->video.graphics);
if (source->async_convert_texrender)
gs_texrender_destroy(source->async_convert_texrender);
if (source->async_texrender)
gs_texrender_destroy(source->async_texrender);
if (source->async_prev_texrender)
gs_texrender_destroy(source->async_prev_texrender);
if (source->async_texture)
gs_texture_destroy(source->async_texture);
if (source->async_prev_texture)
gs_texture_destroy(source->async_prev_texture);
if (source->filter_texrender)
gs_texrender_destroy(source->filter_texrender);
gs_leave_context();
@ -888,8 +913,6 @@ void obs_source_deactivate(obs_source_t *source, enum view_type type)
static inline struct obs_source_frame *get_closest_frame(obs_source_t *source,
uint64_t sys_time);
static void remove_async_frame(obs_source_t *source,
struct obs_source_frame *frame);
void obs_source_video_tick(obs_source_t *source, float seconds)
{
@ -905,12 +928,20 @@ void obs_source_video_tick(obs_source_t *source, float seconds)
uint64_t sys_time = obs->video.video_time;
pthread_mutex_lock(&source->async_mutex);
if (source->cur_async_frame) {
remove_async_frame(source, source->cur_async_frame);
source->cur_async_frame = NULL;
if (deinterlacing_enabled(source)) {
deinterlace_process_last_frame(source, sys_time);
} else {
if (source->cur_async_frame) {
remove_async_frame(source,
source->cur_async_frame);
source->cur_async_frame = NULL;
}
source->cur_async_frame = get_closest_frame(source,
sys_time);
}
source->cur_async_frame = get_closest_frame(source, sys_time);
source->last_sys_timestamp = sys_time;
pthread_mutex_unlock(&source->async_mutex);
}
@ -950,6 +981,7 @@ void obs_source_video_tick(obs_source_t *source, float seconds)
source->info.video_tick(source->context.data, seconds);
source->async_rendered = false;
source->deinterlace_rendered = false;
}
/* unless the value is 3+ hours worth of frames, this won't overflow */
@ -965,8 +997,6 @@ static inline size_t conv_time_to_frames(const size_t sample_rate,
return (size_t)(duration * (uint64_t)sample_rate / 1000000000ULL);
}
/* maximum timestamp variance in nanoseconds */
#define MAX_TS_VAR 2000000000ULL
/* maximum buffer size */
#define MAX_BUF_SIZE (1000 * AUDIO_OUTPUT_FRAMES * sizeof(float))
@ -1210,6 +1240,7 @@ static inline enum convert_type get_convert_type(enum video_format format)
case VIDEO_FORMAT_UYVY:
return CONVERT_422_U;
case VIDEO_FORMAT_Y800:
case VIDEO_FORMAT_I444:
case VIDEO_FORMAT_NONE:
case VIDEO_FORMAT_RGBA:
@ -1280,18 +1311,7 @@ static inline bool init_gpu_conversion(struct obs_source *source,
return false;
}
static inline enum gs_color_format convert_video_format(
enum video_format format)
{
if (format == VIDEO_FORMAT_RGBA)
return GS_RGBA;
else if (format == VIDEO_FORMAT_BGRA)
return GS_BGRA;
return GS_BGRX;
}
static inline bool set_async_texture_size(struct obs_source *source,
bool set_async_texture_size(struct obs_source *source,
const struct obs_source_frame *frame)
{
enum convert_type cur = get_convert_type(frame->format);
@ -1306,13 +1326,18 @@ static inline bool set_async_texture_size(struct obs_source *source,
source->async_format = frame->format;
gs_texture_destroy(source->async_texture);
gs_texrender_destroy(source->async_convert_texrender);
source->async_convert_texrender = NULL;
gs_texture_destroy(source->async_prev_texture);
gs_texrender_destroy(source->async_texrender);
gs_texrender_destroy(source->async_prev_texrender);
source->async_texture = NULL;
source->async_prev_texture = NULL;
source->async_texrender = NULL;
source->async_prev_texrender = NULL;
if (cur != CONVERT_NONE && init_gpu_conversion(source, frame)) {
source->async_gpu_conversion = true;
source->async_convert_texrender =
source->async_texrender =
gs_texrender_create(GS_BGRX, GS_ZS_NONE);
source->async_texture = gs_texture_create(
@ -1331,6 +1356,9 @@ static inline bool set_async_texture_size(struct obs_source *source,
format, 1, NULL, GS_DYNAMIC);
}
if (deinterlacing_enabled(source))
set_deinterlace_texture_size(source);
return !!source->async_texture;
}
@ -1379,6 +1407,7 @@ static const char *select_conversion_technique(enum video_format format)
return "NV12_Reverse";
break;
case VIDEO_FORMAT_Y800:
case VIDEO_FORMAT_BGRA:
case VIDEO_FORMAT_BGRX:
case VIDEO_FORMAT_RGBA:
@ -1397,11 +1426,9 @@ static inline void set_eparam(gs_effect_t *effect, const char *name, float val)
}
static bool update_async_texrender(struct obs_source *source,
const struct obs_source_frame *frame)
const struct obs_source_frame *frame,
gs_texture_t *tex, gs_texrender_t *texrender)
{
gs_texture_t *tex = source->async_texture;
gs_texrender_t *texrender = source->async_convert_texrender;
gs_texrender_reset(texrender);
upload_raw_frame(tex, frame);
@ -1454,11 +1481,10 @@ static bool update_async_texrender(struct obs_source *source,
return true;
}
static bool update_async_texture(struct obs_source *source,
const struct obs_source_frame *frame)
bool update_async_texture(struct obs_source *source,
const struct obs_source_frame *frame,
gs_texture_t *tex, gs_texrender_t *texrender)
{
gs_texture_t *tex = source->async_texture;
gs_texrender_t *texrender = source->async_convert_texrender;
enum convert_type type = get_convert_type(frame->format);
uint8_t *ptr;
uint32_t linesize;
@ -1473,7 +1499,7 @@ static bool update_async_texture(struct obs_source *source,
sizeof frame->color_range_max);
if (source->async_gpu_conversion && texrender)
return update_async_texrender(source, frame);
return update_async_texrender(source, frame, tex, texrender);
if (type == CONVERT_NONE) {
gs_texture_set_image(tex, frame->data[0], frame->linesize[0],
@ -1513,8 +1539,8 @@ static inline void obs_source_draw_texture(struct obs_source *source,
gs_texture_t *tex = source->async_texture;
gs_eparam_t *param;
if (source->async_convert_texrender)
tex = gs_texrender_get_texture(source->async_convert_texrender);
if (source->async_texrender)
tex = gs_texrender_get_texture(source->async_texrender);
if (color_range_min) {
size_t const size = sizeof(float) * 3;
@ -1566,9 +1592,6 @@ static void obs_source_draw_async_texture(struct obs_source *source)
}
}
static inline struct obs_source_frame *filter_async_video(obs_source_t *source,
struct obs_source_frame *in);
static void obs_source_update_async_video(obs_source_t *source)
{
if (!source->async_rendered) {
@ -1583,13 +1606,14 @@ static void obs_source_update_async_video(obs_source_t *source)
os_gettime_ns() - frame->timestamp;
source->timing_set = true;
if (!set_async_texture_size(source, frame))
return;
if (!update_async_texture(source, frame))
return;
}
if (set_async_texture_size(source, frame)) {
update_async_texture(source, frame,
source->async_texture,
source->async_texrender);
}
obs_source_release_frame(source, frame);
obs_source_release_frame(source, frame);
}
}
}
@ -1606,11 +1630,10 @@ static inline void obs_source_render_filters(obs_source_t *source)
source->rendering_filter = false;
}
static void obs_source_default_render(obs_source_t *source, bool color_matrix)
static void obs_source_default_render(obs_source_t *source)
{
gs_effect_t *effect = obs->video.default_effect;
const char *tech_name = color_matrix ? "DrawMatrix" : "Draw";
gs_technique_t *tech = gs_effect_get_technique(effect, tech_name);
gs_technique_t *tech = gs_effect_get_technique(effect, "Draw");
size_t passes, i;
passes = gs_technique_begin(tech);
@ -1626,14 +1649,13 @@ static void obs_source_default_render(obs_source_t *source, bool color_matrix)
static inline void obs_source_main_render(obs_source_t *source)
{
uint32_t flags = source->info.output_flags;
bool color_matrix = (flags & OBS_SOURCE_COLOR_MATRIX) != 0;
bool custom_draw = (flags & OBS_SOURCE_CUSTOM_DRAW) != 0;
bool default_effect = !source->filter_parent &&
source->filters.num == 0 &&
!custom_draw;
if (default_effect)
obs_source_default_render(source, color_matrix);
obs_source_default_render(source);
else if (source->context.data)
source->info.video_render(source->context.data,
custom_draw ? NULL : gs_get_effect());
@ -1649,8 +1671,11 @@ static inline void render_video(obs_source_t *source)
if (source->info.type == OBS_SOURCE_TYPE_INPUT &&
(source->info.output_flags & OBS_SOURCE_ASYNC) != 0 &&
!source->rendering_filter)
!source->rendering_filter) {
if (deinterlacing_enabled(source))
deinterlace_update_async_video(source);
obs_source_update_async_video(source);
}
if (!source->context.data || !source->enabled) {
if (source->filter_parent)
@ -1667,6 +1692,9 @@ static inline void render_video(obs_source_t *source)
else if (source->filter_target)
obs_source_video_render(source->filter_target);
else if (deinterlacing_enabled(source))
deinterlace_render(source);
else
obs_source_render_async_video(source);
}
@ -1827,6 +1855,12 @@ void obs_source_filter_add(obs_source_t *source, obs_source_t *filter)
calldata_set_ptr(&cd, "filter", filter);
signal_handler_signal(source->context.signals, "filter_add", &cd);
if (source && filter)
blog(source->context.private ? LOG_DEBUG : LOG_INFO,
"- filter '%s' (%s) added to source '%s'",
filter->context.name, filter->info.id,
source->context.name);
}
static bool obs_source_filter_remove_refless(obs_source_t *source,
@ -1859,6 +1893,12 @@ static bool obs_source_filter_remove_refless(obs_source_t *source,
signal_handler_signal(source->context.signals, "filter_remove", &cd);
if (source && filter)
blog(source->context.private ? LOG_DEBUG : LOG_INFO,
"- filter '%s' (%s) removed from source '%s'",
filter->context.name, filter->info.id,
source->context.name);
if (filter->info.filter_remove)
filter->info.filter_remove(filter->context.data,
filter->filter_parent);
@ -1988,7 +2028,7 @@ obs_data_t *obs_source_get_settings(const obs_source_t *source)
return source->context.settings;
}
static inline struct obs_source_frame *filter_async_video(obs_source_t *source,
struct obs_source_frame *filter_async_video(obs_source_t *source,
struct obs_source_frame *in)
{
size_t i;
@ -2071,6 +2111,7 @@ static void copy_frame_data(struct obs_source_frame *dst,
case VIDEO_FORMAT_YVYU:
case VIDEO_FORMAT_YUY2:
case VIDEO_FORMAT_UYVY:
case VIDEO_FORMAT_Y800:
case VIDEO_FORMAT_NONE:
case VIDEO_FORMAT_RGBA:
case VIDEO_FORMAT_BGRA:
@ -2099,6 +2140,7 @@ static inline void free_async_cache(struct obs_source *source)
da_resize(source->async_cache, 0);
da_resize(source->async_frames, 0);
source->cur_async_frame = NULL;
source->prev_async_frame = NULL;
}
#define MAX_UNUSED_FRAME_DURATION 5
@ -2375,17 +2417,11 @@ void obs_source_output_audio(obs_source_t *source,
pthread_mutex_unlock(&source->filter_mutex);
}
static inline bool frame_out_of_bounds(const obs_source_t *source, uint64_t ts)
void remove_async_frame(obs_source_t *source, struct obs_source_frame *frame)
{
if (ts < source->last_frame_ts)
return ((source->last_frame_ts - ts) > MAX_TS_VAR);
else
return ((ts - source->last_frame_ts) > MAX_TS_VAR);
}
if (frame)
frame->prev_frame = false;
static void remove_async_frame(obs_source_t *source,
struct obs_source_frame *frame)
{
for (size_t i = 0; i < source->async_cache.num; i++) {
struct async_frame *f = &source->async_cache.array[i];
@ -2594,9 +2630,8 @@ const char *obs_source_get_id(const obs_source_t *source)
}
static inline void render_filter_bypass(obs_source_t *target,
gs_effect_t *effect, bool use_matrix)
gs_effect_t *effect, const char *tech_name)
{
const char *tech_name = use_matrix ? "DrawMatrix" : "Draw";
gs_technique_t *tech = gs_effect_get_technique(effect, tech_name);
size_t passes, i;
@ -2610,9 +2645,8 @@ static inline void render_filter_bypass(obs_source_t *target,
}
static inline void render_filter_tex(gs_texture_t *tex, gs_effect_t *effect,
uint32_t width, uint32_t height, bool use_matrix)
uint32_t width, uint32_t height, const char *tech_name)
{
const char *tech_name = use_matrix ? "DrawMatrix" : "Draw";
gs_technique_t *tech = gs_effect_get_technique(effect, tech_name);
gs_eparam_t *image = gs_effect_get_param_by_name(effect, "image");
size_t passes, i;
@ -2638,25 +2672,35 @@ static inline bool can_bypass(obs_source_t *target, obs_source_t *parent,
((parent_flags & OBS_SOURCE_ASYNC) == 0);
}
void obs_source_process_filter_begin(obs_source_t *filter,
bool obs_source_process_filter_begin(obs_source_t *filter,
enum gs_color_format format,
enum obs_allow_direct_render allow_direct)
{
obs_source_t *target, *parent;
uint32_t target_flags, parent_flags;
int cx, cy;
bool use_matrix;
if (!obs_ptr_valid(filter, "obs_source_process_filter_begin"))
return;
return false;
target = obs_filter_get_target(filter);
parent = obs_filter_get_parent(filter);
if (!target) {
blog(LOG_INFO, "filter '%s' being processed with no target!",
filter->context.name);
return false;
}
if (!parent) {
blog(LOG_INFO, "filter '%s' being processed with no parent!",
filter->context.name);
return false;
}
target_flags = target->info.output_flags;
parent_flags = parent->info.output_flags;
cx = get_base_width(target);
cy = get_base_height(target);
use_matrix = !!(target_flags & OBS_SOURCE_COLOR_MATRIX);
filter->allow_direct = allow_direct;
@ -2665,12 +2709,12 @@ void obs_source_process_filter_begin(obs_source_t *filter,
* using the filter effect instead of rendering to texture to reduce
* the total number of passes */
if (can_bypass(target, parent, parent_flags, allow_direct)) {
return;
return true;
}
if (!cx || !cy) {
obs_source_skip_video_filter(filter);
return;
return false;
}
if (!filter->filter_texrender)
@ -2690,7 +2734,7 @@ void obs_source_process_filter_begin(obs_source_t *filter,
gs_ortho(0.0f, (float)cx, 0.0f, (float)cy, -100.0f, 100.0f);
if (target == parent && !custom_draw && !async)
obs_source_default_render(target, use_matrix);
obs_source_default_render(target);
else
obs_source_video_render(target);
@ -2698,15 +2742,44 @@ void obs_source_process_filter_begin(obs_source_t *filter,
}
gs_blend_state_pop();
return true;
}
void obs_source_process_filter_tech_end(obs_source_t *filter, gs_effect_t *effect,
uint32_t width, uint32_t height, const char *tech_name)
{
obs_source_t *target, *parent;
gs_texture_t *texture;
uint32_t target_flags, parent_flags;
if (!filter) return;
target = obs_filter_get_target(filter);
parent = obs_filter_get_parent(filter);
if (!target || !parent)
return;
target_flags = target->info.output_flags;
parent_flags = parent->info.output_flags;
const char *tech = tech_name ? tech_name : "Draw";
if (can_bypass(target, parent, parent_flags, filter->allow_direct)) {
render_filter_bypass(target, effect, tech);
} else {
texture = gs_texrender_get_texture(filter->filter_texrender);
render_filter_tex(texture, effect, width, height, tech);
}
}
void obs_source_process_filter_end(obs_source_t *filter, gs_effect_t *effect,
uint32_t width, uint32_t height)
{
obs_source_t *target, *parent;
gs_texture_t *texture;
uint32_t target_flags, parent_flags;
bool use_matrix;
if (!obs_ptr_valid(filter, "obs_source_process_filter_end"))
return;
@ -2715,15 +2788,14 @@ void obs_source_process_filter_end(obs_source_t *filter, gs_effect_t *effect,
parent = obs_filter_get_parent(filter);
target_flags = target->info.output_flags;
parent_flags = parent->info.output_flags;
use_matrix = !!(target_flags & OBS_SOURCE_COLOR_MATRIX);
if (can_bypass(target, parent, parent_flags, filter->allow_direct)) {
render_filter_bypass(target, effect, use_matrix);
render_filter_bypass(target, effect, "Draw");
} else {
texture = gs_texrender_get_texture(filter->filter_texrender);
if (texture)
render_filter_tex(texture, effect, width, height,
use_matrix);
"Draw");
}
}
@ -2732,7 +2804,6 @@ void obs_source_skip_video_filter(obs_source_t *filter)
obs_source_t *target, *parent;
bool custom_draw, async;
uint32_t parent_flags;
bool use_matrix;
if (!obs_ptr_valid(filter, "obs_source_skip_video_filter"))
return;
@ -2742,13 +2813,14 @@ void obs_source_skip_video_filter(obs_source_t *filter)
parent_flags = parent->info.output_flags;
custom_draw = (parent_flags & OBS_SOURCE_CUSTOM_DRAW) != 0;
async = (parent_flags & OBS_SOURCE_ASYNC) != 0;
use_matrix = !!(parent_flags & OBS_SOURCE_COLOR_MATRIX);
if (target == parent) {
if (!custom_draw && !async)
obs_source_default_render(target, use_matrix);
obs_source_default_render(target);
else if (target->info.video_render)
obs_source_main_render(target);
else if (deinterlacing_enabled(target))
deinterlace_render(target);
else
obs_source_render_async_video(target);
@ -3622,7 +3694,7 @@ static inline void process_audio_source_tick(obs_source_t *source,
void obs_source_audio_render(obs_source_t *source, uint32_t mixers,
size_t channels, size_t sample_rate, size_t size)
{
if (!source || !source->audio_output_buf[0][0]) {
if (!source->audio_output_buf[0][0]) {
source->audio_pending = true;
return;
}

View file

@ -87,15 +87,6 @@ enum obs_source_type {
*/
#define OBS_SOURCE_CUSTOM_DRAW (1<<3)
/**
* Source uses a color matrix (usually YUV sources).
*
* When this is used, the video_render callback will automatically assign a
* 4x4 YUV->RGB matrix to the "color_matrix" parameter of the effect, or it can
* be changed to a custom value.
*/
#define OBS_SOURCE_COLOR_MATRIX (1<<4)
/**
* Source supports interaction.
*

View file

@ -384,8 +384,8 @@ static bool vk_down(DWORD vk)
{
short state = GetAsyncKeyState(vk);
bool down = (state & 0x8000) != 0;
bool was_down = (state & 0x1) != 0;
return down || was_down;
return down;
}
bool obs_hotkeys_platform_is_pressed(obs_hotkeys_platform_t *context,

View file

@ -215,6 +215,17 @@ static bool obs_init_textures(struct obs_video_info *ovi)
return true;
}
gs_effect_t *obs_load_effect(gs_effect_t **effect, const char *file)
{
if (!*effect) {
char *filename = find_libobs_data_file(file);
*effect = gs_effect_create_from_file(filename, NULL);
bfree(filename);
}
return *effect;
}
static int obs_init_graphics(struct obs_video_info *ovi)
{
struct obs_core_video *video = &obs->video;
@ -280,6 +291,11 @@ static int obs_init_graphics(struct obs_video_info *ovi)
NULL);
bfree(filename);
filename = find_libobs_data_file("premultiplied_alpha.effect");
video->premultiplied_alpha_effect = gs_effect_create_from_file(filename,
NULL);
bfree(filename);
obs->video.transparent_texture = gs_texture_create(2, 2, GS_RGBA, 1,
&transparent_tex, 0);
@ -295,6 +311,8 @@ static int obs_init_graphics(struct obs_video_info *ovi)
success = false;
if (!video->conversion_effect)
success = false;
if (!video->premultiplied_alpha_effect)
success = false;
if (!video->transparent_texture)
success = false;
@ -1340,6 +1358,8 @@ gs_effect_t *obs_get_base_effect(enum obs_base_effect effect)
return obs->video.lanczos_effect;
case OBS_EFFECT_BILINEAR_LOWRES:
return obs->video.bilinear_lowres_effect;
case OBS_EFFECT_PREMULTIPLIED_ALPHA:
return obs->video.premultiplied_alpha_effect;
}
return NULL;
@ -1401,6 +1421,8 @@ static obs_source_t *obs_load_source_type(obs_data_t *source_data)
int64_t sync;
uint32_t flags;
uint32_t mixers;
int di_order;
int di_mode;
source = obs_source_create(id, name, settings, hotkeys);
@ -1444,6 +1466,14 @@ static obs_source_t *obs_load_source_type(obs_data_t *source_data)
obs_source_set_push_to_talk_delay(source,
obs_data_get_int(source_data, "push-to-talk-delay"));
di_mode = (int)obs_data_get_int(source_data, "deinterlace_mode");
obs_source_set_deinterlace_mode(source,
(enum obs_deinterlace_mode)di_mode);
di_order = (int)obs_data_get_int(source_data, "deinterlace_field_order");
obs_source_set_deinterlace_field_order(source,
(enum obs_deinterlace_field_order)di_order);
if (filters) {
size_t count = obs_data_array_count(filters);
@ -1474,7 +1504,8 @@ obs_source_t *obs_load_source(obs_data_t *source_data)
return obs_load_source_type(source_data);
}
void obs_load_sources(obs_data_array_t *array)
void obs_load_sources(obs_data_array_t *array, obs_load_source_cb cb,
void *private_data)
{
if (!obs) return;
@ -1507,6 +1538,7 @@ void obs_load_sources(obs_data_array_t *array)
if (source->info.type == OBS_SOURCE_TYPE_TRANSITION)
obs_transition_load(source, source_data);
obs_source_load(source);
cb(private_data, source);
}
obs_data_release(source_data);
}
@ -1538,6 +1570,9 @@ obs_data_t *obs_save_source(obs_source_t *source)
uint64_t ptm_delay = obs_source_get_push_to_mute_delay(source);
bool push_to_talk= obs_source_push_to_talk_enabled(source);
uint64_t ptt_delay = obs_source_get_push_to_talk_delay(source);
int di_mode = (int)obs_source_get_deinterlace_mode(source);
int di_order =
(int)obs_source_get_deinterlace_field_order(source);
obs_source_save(source);
hotkeys = obs_hotkeys_save_source(source);
@ -1562,6 +1597,8 @@ obs_data_t *obs_save_source(obs_source_t *source)
obs_data_set_bool (source_data, "push-to-talk", push_to_talk);
obs_data_set_int (source_data, "push-to-talk-delay", ptt_delay);
obs_data_set_obj (source_data, "hotkeys", hotkey_data);
obs_data_set_int (source_data, "deinterlace_mode", di_mode);
obs_data_set_int (source_data, "deinterlace_field_order", di_order);
if (source->info.type == OBS_SOURCE_TYPE_TRANSITION)
obs_transition_save(source, source_data);
@ -1650,6 +1687,7 @@ static inline char *dup_name(const char *name, bool private)
static inline bool obs_context_data_init_wrap(
struct obs_context_data *context,
enum obs_obj_type type,
obs_data_t *settings,
const char *name,
obs_data_t *hotkey_data,
@ -1658,6 +1696,7 @@ static inline bool obs_context_data_init_wrap(
assert(context);
memset(context, 0, sizeof(*context));
context->private = private;
context->type = type;
pthread_mutex_init_value(&context->rename_cache_mutex);
if (pthread_mutex_init(&context->rename_cache_mutex, NULL) < 0)
@ -1679,13 +1718,14 @@ static inline bool obs_context_data_init_wrap(
bool obs_context_data_init(
struct obs_context_data *context,
enum obs_obj_type type,
obs_data_t *settings,
const char *name,
obs_data_t *hotkey_data,
bool private)
{
if (obs_context_data_init_wrap(context, settings, name, hotkey_data,
private)) {
if (obs_context_data_init_wrap(context, type, settings, name,
hotkey_data, private)) {
return true;
} else {
obs_context_data_free(context);
@ -1768,3 +1808,35 @@ uint64_t obs_get_video_frame_time(void)
{
return obs ? obs->video.video_time : 0;
}
enum obs_obj_type obs_obj_get_type(void *obj)
{
struct obs_context_data *context = obj;
return context ? context->type : OBS_OBJ_TYPE_INVALID;
}
const char *obs_obj_get_id(void *obj)
{
struct obs_context_data *context = obj;
if (!context)
return NULL;
switch (context->type) {
case OBS_OBJ_TYPE_SOURCE: return ((obs_source_t*)obj)->info.id;
case OBS_OBJ_TYPE_OUTPUT: return ((obs_output_t*)obj)->info.id;
case OBS_OBJ_TYPE_ENCODER: return ((obs_encoder_t*)obj)->info.id;
case OBS_OBJ_TYPE_SERVICE: return ((obs_service_t*)obj)->info.id;
default:;
}
return NULL;
}
bool obs_obj_invalid(void *obj)
{
struct obs_context_data *context = obj;
if (!context)
return true;
return !context->data;
}

View file

@ -1,5 +1,5 @@
/******************************************************************************
Copyright (C) 2013-2014 by Hugh Bailey <obs.jim@gmail.com>
Copyright (C) 2013-2014 by Hugh Bailey <jim@obsproject.com>
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
@ -232,6 +232,7 @@ struct obs_source_frame {
/* used internally by libobs */
volatile long refs;
bool prev_frame;
};
/* ------------------------------------------------------------------------- */
@ -520,6 +521,7 @@ enum obs_base_effect {
OBS_EFFECT_BICUBIC, /**< Bicubic downscale */
OBS_EFFECT_LANCZOS, /**< Lanczos downscale */
OBS_EFFECT_BILINEAR_LOWRES, /**< Bilinear low resolution downscale */
OBS_EFFECT_PREMULTIPLIED_ALPHA,/**< Premultiplied alpha */
};
/** Returns a commonly used base effect */
@ -550,8 +552,11 @@ EXPORT obs_data_t *obs_save_source(obs_source_t *source);
/** Loads a source from settings data */
EXPORT obs_source_t *obs_load_source(obs_data_t *data);
typedef void (*obs_load_source_cb)(void *private_data, obs_source_t *source);
/** Loads sources from a data array */
EXPORT void obs_load_sources(obs_data_array_t *array);
EXPORT void obs_load_sources(obs_data_array_t *array, obs_load_source_cb cb,
void *private_data);
/** Saves sources to a data array */
EXPORT obs_data_array_t *obs_save_sources(void);
@ -560,6 +565,18 @@ typedef bool (*obs_save_source_filter_cb)(void *data, obs_source_t *source);
EXPORT obs_data_array_t *obs_save_sources_filtered(obs_save_source_filter_cb cb,
void *data);
enum obs_obj_type {
OBS_OBJ_TYPE_INVALID,
OBS_OBJ_TYPE_SOURCE,
OBS_OBJ_TYPE_OUTPUT,
OBS_OBJ_TYPE_ENCODER,
OBS_OBJ_TYPE_SERVICE
};
EXPORT enum obs_obj_type obs_obj_get_type(void *obj);
EXPORT const char *obs_obj_get_id(void *obj);
EXPORT bool obs_obj_invalid(void *obj);
/* ------------------------------------------------------------------------- */
/* View context */
@ -864,6 +881,32 @@ EXPORT void obs_source_add_audio_capture_callback(obs_source_t *source,
EXPORT void obs_source_remove_audio_capture_callback(obs_source_t *source,
obs_source_audio_capture_t callback, void *param);
enum obs_deinterlace_mode {
OBS_DEINTERLACE_MODE_DISABLE,
OBS_DEINTERLACE_MODE_DISCARD,
OBS_DEINTERLACE_MODE_RETRO,
OBS_DEINTERLACE_MODE_BLEND,
OBS_DEINTERLACE_MODE_BLEND_2X,
OBS_DEINTERLACE_MODE_LINEAR,
OBS_DEINTERLACE_MODE_LINEAR_2X,
OBS_DEINTERLACE_MODE_YADIF,
OBS_DEINTERLACE_MODE_YADIF_2X
};
enum obs_deinterlace_field_order {
OBS_DEINTERLACE_FIELD_ORDER_TOP,
OBS_DEINTERLACE_FIELD_ORDER_BOTTOM
};
EXPORT void obs_source_set_deinterlace_mode(obs_source_t *source,
enum obs_deinterlace_mode mode);
EXPORT enum obs_deinterlace_mode obs_source_get_deinterlace_mode(
const obs_source_t *source);
EXPORT void obs_source_set_deinterlace_field_order(obs_source_t *source,
enum obs_deinterlace_field_order field_order);
EXPORT enum obs_deinterlace_field_order obs_source_get_deinterlace_field_order(
const obs_source_t *source);
/* ------------------------------------------------------------------------- */
/* Functions used by sources */
@ -925,8 +968,11 @@ EXPORT void obs_source_release_frame(obs_source_t *source,
*
* After calling this, set your parameters for the effect, then call
* obs_source_process_filter_end to draw the filter.
*
* Returns true if filtering should continue, false if the filter is bypassed
* for whatever reason.
*/
EXPORT void obs_source_process_filter_begin(obs_source_t *filter,
EXPORT bool obs_source_process_filter_begin(obs_source_t *filter,
enum gs_color_format format,
enum obs_allow_direct_render allow_direct);
@ -940,6 +986,17 @@ EXPORT void obs_source_process_filter_begin(obs_source_t *filter,
EXPORT void obs_source_process_filter_end(obs_source_t *filter,
gs_effect_t *effect, uint32_t width, uint32_t height);
/**
* Draws the filter with a specific technique.
*
* Before calling this function, first call obs_source_process_filter_begin and
* then set the effect parameters, and then call this function to finalize the
* filter.
*/
EXPORT void obs_source_process_filter_tech_end(obs_source_t *filter,
gs_effect_t *effect, uint32_t width, uint32_t height,
const char *tech_name);
/** Skips the filter if the filter is invalid and cannot be rendered */
EXPORT void obs_source_skip_video_filter(obs_source_t *filter);
@ -1184,6 +1241,21 @@ EXPORT void obs_sceneitem_get_box_transform(const obs_sceneitem_t *item,
EXPORT bool obs_sceneitem_visible(const obs_sceneitem_t *item);
EXPORT bool obs_sceneitem_set_visible(obs_sceneitem_t *item, bool visible);
struct obs_sceneitem_crop {
int left;
int top;
int right;
int bottom;
};
EXPORT void obs_sceneitem_set_crop(obs_sceneitem_t *item,
const struct obs_sceneitem_crop *crop);
EXPORT void obs_sceneitem_get_crop(const obs_sceneitem_t *item,
struct obs_sceneitem_crop *crop);
EXPORT void obs_sceneitem_defer_update_begin(obs_sceneitem_t *item);
EXPORT void obs_sceneitem_defer_update_end(obs_sceneitem_t *item);
/* ------------------------------------------------------------------------- */
/* Outputs */
@ -1564,6 +1636,8 @@ EXPORT void *obs_encoder_get_type_data(obs_encoder_t *encoder);
EXPORT const char *obs_encoder_get_id(const obs_encoder_t *encoder);
EXPORT uint32_t obs_get_encoder_caps(const char *encoder_id);
/** Duplicates an encoder packet */
EXPORT void obs_duplicate_encoder_packet(struct encoder_packet *dst,
const struct encoder_packet *src);
@ -1579,6 +1653,9 @@ EXPORT const char *obs_service_get_display_name(const char *id);
EXPORT obs_service_t *obs_service_create(const char *id, const char *name,
obs_data_t *settings, obs_data_t *hotkey_data);
EXPORT obs_service_t *obs_service_create_private(const char *id,
const char *name, obs_data_t *settings);
/**
* Adds/releases a reference to a service. When the last reference is
* released, the service is destroyed.

View file

@ -602,6 +602,24 @@ static inline void cf_adderror_unexpected_eof(struct cf_preprocessor *pp,
NULL, NULL, NULL);
}
static inline void insert_path(struct cf_preprocessor *pp,
struct dstr *str_file)
{
const char *file;
const char *slash;
if (pp && pp->lex && pp->lex->file) {
file = pp->lex->file;
slash = strrchr(file, '/');
if (slash) {
struct dstr path = {0};
dstr_ncopy(&path, file, slash - file + 1);
dstr_insert_dstr(str_file, 0, &path);
dstr_free(&path);
}
}
}
static void cf_include_file(struct cf_preprocessor *pp,
const struct cf_token *file_token)
{
@ -615,6 +633,7 @@ static void cf_include_file(struct cf_preprocessor *pp,
dstr_init(&str_file);
dstr_copy_strref(&str_file, &file_token->str);
dstr_mid(&str_file, &str_file, 1, str_file.len-2);
insert_path(pp, &str_file);
/* if dependency already exists, run preprocessor on it */
for (i = 0; i < pp->dependencies.num; i++) {

View file

@ -346,6 +346,7 @@ void dstr_ncopy(struct dstr *dst, const char *array, const size_t len)
dst->array = bmemdup(array, len + 1);
dst->len = len;
dst->capacity = len + 1;
dst->array[len] = 0;
}
@ -363,6 +364,7 @@ void dstr_ncopy_dstr(struct dstr *dst, const struct dstr *str, const size_t len)
newlen = size_min(len, str->len);
dst->array = bmemdup(str->array, newlen + 1);
dst->len = newlen;
dst->capacity = newlen + 1;
dst->array[newlen] = 0;
}
@ -430,10 +432,11 @@ void dstr_insert(struct dstr *dst, const size_t idx, const char *array)
new_len = dst->len + len;
dstr_ensure_capacity(dst, new_len + 1);
dst->len = new_len;
memmove(dst->array+idx+len, dst->array+idx, dst->len - idx + 1);
memcpy(dst->array+idx, array, len);
dst->len = new_len;
}
void dstr_insert_dstr(struct dstr *dst, const size_t idx,
@ -450,10 +453,11 @@ void dstr_insert_dstr(struct dstr *dst, const size_t idx,
new_len = dst->len + str->len;
dstr_ensure_capacity(dst, (new_len+1));
dst->len = new_len;
memmove(dst->array+idx+str->len, dst->array+idx, dst->len - idx + 1);
memcpy(dst->array+idx, str->array, str->len);
dst->len = new_len;
}
void dstr_insert_ch(struct dstr *dst, const size_t idx, const char ch)

View file

@ -85,6 +85,37 @@ int64_t os_fgetsize(FILE *file)
return size;
}
#ifdef _WIN32
int os_stat(const char *file, struct stat *st)
{
if (file) {
wchar_t w_file[512];
size_t size = os_utf8_to_wcs(file, 0, w_file, sizeof(w_file));
if (size > 0) {
struct _stat st_w32;
int ret = _wstat(w_file, &st_w32);
if (ret == 0) {
st->st_dev = st_w32.st_dev;
st->st_ino = st_w32.st_ino;
st->st_mode = st_w32.st_mode;
st->st_nlink = st_w32.st_nlink;
st->st_uid = st_w32.st_uid;
st->st_gid = st_w32.st_gid;
st->st_rdev = st_w32.st_rdev;
st->st_size = st_w32.st_size;
st->st_atime = st_w32.st_atime;
st->st_mtime = st_w32.st_mtime;
st->st_ctime = st_w32.st_ctime;
}
return ret;
}
}
return -1;
}
#endif
int os_fseeki64(FILE *file, int64_t offset, int origin)
{
#ifdef _MSC_VER

View file

@ -34,6 +34,12 @@ EXPORT FILE *os_wfopen(const wchar_t *path, const char *mode);
EXPORT FILE *os_fopen(const char *path, const char *mode);
EXPORT int64_t os_fgetsize(FILE *file);
#ifdef _WIN32
EXPORT int os_stat(const char *file, struct stat *st);
#else
#define os_stat stat
#endif
EXPORT int os_fseeki64(FILE *file, int64_t offset, int origin);
EXPORT int64_t os_ftelli64(FILE *file);

View file

@ -287,6 +287,7 @@ static char *convert_string(const char *str, size_t len)
dstr_replace(&out, "\\n", "\n");
dstr_replace(&out, "\\t", "\t");
dstr_replace(&out, "\\r", "\r");
dstr_replace(&out, "\\\"", "\"");
return out.array;
}