updates
Some checks failed
CI / React UI Build (push) Successful in 11s
CI / Native Windows Build And Tests (push) Successful in 2m22s
CI / Windows Release Package (push) Has been cancelled

This commit is contained in:
2026-05-08 18:07:45 +10:00
parent eede6938cb
commit f322abf79a
24 changed files with 270 additions and 43 deletions

View File

@@ -121,7 +121,7 @@ Current native test coverage includes:
- JSON parsing and serialization.
- Parameter normalization and preset filename safety.
- Shader manifest parsing, temporal manifest validation, and package registry scanning.
- Video I/O format helpers, v210 pack/unpack math, playout scheduler timing, and fake backend contract coverage.
- Video I/O format helpers, v210/Ay10 row-byte math, v210 pack/unpack math, playout scheduler timing, and fake backend contract coverage.
- OSC packet parsing.
## Runtime Configuration
@@ -261,4 +261,5 @@ If `SLANG_ROOT` is not set, the workflow falls back to the repo-local default un
- compute shaders or a small 1x1 or nx1 RGBA16f render target for arbitrary data storage
- allow shaders to read other shaders data store based on name? or output over OSC
- Mipmapping for shader-declared textures
- Multipass for shaders at request
- better control layout
- check for redundant code

View File

@@ -102,6 +102,8 @@ Optional fields:
- `fonts`: packaged font assets for live text parameters.
- `temporal`: history-buffer requirements.
Parameter objects may also include an optional `description` string. The control UI displays it as helper text underneath the parameter label, so use it for short operational guidance rather than long documentation.
Shader-visible identifiers must be valid Slang-style identifiers:
- `entryPoint`
@@ -237,7 +239,7 @@ Fields:
Color/precision notes:
- `context.sourceColor`, `sampleVideo()`, and temporal history samples are display-referred Rec.709-like RGB, not linear-light RGB.
- The current DeckLink backend prefers 10-bit YUV capture and output when the card/mode supports it, with automatic 8-bit fallback.
- The current DeckLink backend prefers 10-bit YUV capture and output when the card/mode supports it, with automatic 8-bit fallback. If external keying is enabled, output prefers 10-bit YUVA (`Ay10`) when supported so shader alpha can drive the key signal, then falls back to 8-bit BGRA.
- Internal decoded, layer, composite, output, and temporal render targets are 16-bit floating point, so gradients and LUT work have more headroom than packed byte video I/O formats.
- Do not add extra Rec.709 or linear conversions unless the shader intentionally documents that behavior.

View File

@@ -105,7 +105,8 @@ bool OpenGLComposite::InitVideoIO()
MessageBoxA(NULL, initFailureReason.c_str(), title, MB_OK | MB_ICONERROR);
return false;
}
if (!mVideoIO->SelectPreferredFormats(videoModes, initFailureReason))
const bool outputAlphaRequired = mRuntimeHost && mRuntimeHost->ExternalKeyingEnabled();
if (!mVideoIO->SelectPreferredFormats(videoModes, outputAlphaRequired, initFailureReason))
goto error;
if (! CheckOpenGLExtensions())

View File

@@ -34,8 +34,8 @@ bool OpenGLRenderPipeline::RenderFrame(const RenderPipelineFrameContext& context
glBindFramebuffer(GL_FRAMEBUFFER, mRenderer.OutputFramebuffer());
if (mOutputReady)
mOutputReady();
if (state.outputPixelFormat == VideoIOPixelFormat::V210)
PackOutputForV210(state);
if (state.outputPixelFormat == VideoIOPixelFormat::V210 || state.outputPixelFormat == VideoIOPixelFormat::Yuva10)
PackOutputFor10Bit(state);
glFlush();
const auto renderEndTime = std::chrono::steady_clock::now();
@@ -50,7 +50,7 @@ bool OpenGLRenderPipeline::RenderFrame(const RenderPipelineFrameContext& context
return true;
}
void OpenGLRenderPipeline::PackOutputForV210(const VideoIOState& state)
void OpenGLRenderPipeline::PackOutputFor10Bit(const VideoIOState& state)
{
glBindFramebuffer(GL_FRAMEBUFFER, mRenderer.OutputPackFramebuffer());
glViewport(0, 0, state.outputPackTextureWidth, state.outputFrameSize.height);
@@ -64,10 +64,13 @@ void OpenGLRenderPipeline::PackOutputForV210(const VideoIOState& state)
const GLint outputResolutionLocation = glGetUniformLocation(mRenderer.OutputPackProgram(), "uOutputVideoResolution");
const GLint activeWordsLocation = glGetUniformLocation(mRenderer.OutputPackProgram(), "uActiveV210Words");
const GLint packFormatLocation = glGetUniformLocation(mRenderer.OutputPackProgram(), "uOutputPackFormat");
if (outputResolutionLocation >= 0)
glUniform2f(outputResolutionLocation, static_cast<float>(state.outputFrameSize.width), static_cast<float>(state.outputFrameSize.height));
if (activeWordsLocation >= 0)
glUniform1f(activeWordsLocation, static_cast<float>(ActiveV210WordsForWidth(state.outputFrameSize.width)));
if (packFormatLocation >= 0)
glUniform1i(packFormatLocation, state.outputPixelFormat == VideoIOPixelFormat::Yuva10 ? 2 : 1);
glDrawArrays(GL_TRIANGLES, 0, 3);
glUseProgram(0);
@@ -79,7 +82,7 @@ void OpenGLRenderPipeline::ReadOutputFrame(const VideoIOState& state, VideoIOOut
{
glPixelStorei(GL_PACK_ALIGNMENT, 4);
glPixelStorei(GL_PACK_ROW_LENGTH, 0);
if (state.outputPixelFormat == VideoIOPixelFormat::V210)
if (state.outputPixelFormat == VideoIOPixelFormat::V210 || state.outputPixelFormat == VideoIOPixelFormat::Yuva10)
{
glBindFramebuffer(GL_READ_FRAMEBUFFER, mRenderer.OutputPackFramebuffer());
glReadPixels(0, 0, state.outputPackTextureWidth, state.outputFrameSize.height, GL_RGBA, GL_UNSIGNED_BYTE, outputFrame.bytes);

View File

@@ -30,7 +30,7 @@ public:
bool RenderFrame(const RenderPipelineFrameContext& context, VideoIOOutputFrame& outputFrame);
private:
void PackOutputForV210(const VideoIOState& state);
void PackOutputFor10Bit(const VideoIOState& state);
void ReadOutputFrame(const VideoIOState& state, VideoIOOutputFrame& outputFrame);
OpenGLRenderer& mRenderer;

View File

@@ -90,12 +90,17 @@ const char* kOutputPackFragmentShaderSource =
"layout(binding = 0) uniform sampler2D uOutputRgb;\n"
"uniform vec2 uOutputVideoResolution;\n"
"uniform float uActiveV210Words;\n"
"uniform int uOutputPackFormat;\n"
"in vec2 vTexCoord;\n"
"layout(location = 0) out vec4 fragColor;\n"
"vec3 rgbAt(int x, int y)\n"
"vec4 rgbaAt(int x, int y)\n"
"{\n"
" ivec2 size = ivec2(max(uOutputVideoResolution, vec2(1.0, 1.0)));\n"
" return clamp(texelFetch(uOutputRgb, ivec2(clamp(x, 0, size.x - 1), clamp(y, 0, size.y - 1)), 0).rgb, vec3(0.0), vec3(1.0));\n"
" return clamp(texelFetch(uOutputRgb, ivec2(clamp(x, 0, size.x - 1), clamp(y, 0, size.y - 1)), 0), vec4(0.0), vec4(1.0));\n"
"}\n"
"vec3 rgbAt(int x, int y)\n"
"{\n"
" return rgbaAt(x, y).rgb;\n"
"}\n"
"vec3 rgbToLegalYcbcr10(vec3 rgb)\n"
"{\n"
@@ -112,9 +117,35 @@ const char* kOutputPackFragmentShaderSource =
"{\n"
" return vec4(float(word & 255u), float((word >> 8) & 255u), float((word >> 16) & 255u), float((word >> 24) & 255u)) / 255.0;\n"
"}\n"
"vec4 bigEndianWordToBytes(uint word)\n"
"{\n"
" return vec4(float((word >> 24) & 255u), float((word >> 16) & 255u), float((word >> 8) & 255u), float(word & 255u)) / 255.0;\n"
"}\n"
"vec4 packAy10Word(ivec2 outCoord)\n"
"{\n"
" ivec2 size = ivec2(max(uOutputVideoResolution, vec2(1.0, 1.0)));\n"
" if (outCoord.x >= size.x)\n"
" return vec4(0.0);\n"
" int pixelBase = (outCoord.x / 2) * 2;\n"
" int y = outCoord.y;\n"
" vec4 rgba0 = rgbaAt(pixelBase + 0, y);\n"
" vec4 rgba1 = rgbaAt(pixelBase + 1, y);\n"
" vec3 c0 = rgbToLegalYcbcr10(rgba0.rgb);\n"
" vec3 c1 = rgbToLegalYcbcr10(rgba1.rgb);\n"
" float chroma = (outCoord.x & 1) == 0 ? round((c0.y + c1.y) * 0.5) : round((c0.z + c1.z) * 0.5);\n"
" float alpha = round(clamp(((outCoord.x & 1) == 0 ? rgba0.a : rgba1.a), 0.0, 1.0) * 1023.0);\n"
" float luma = (outCoord.x & 1) == 0 ? c0.x : c1.x;\n"
" uint word = ((uint(luma) & 1023u) << 22) | ((uint(chroma) & 1023u) << 12) | ((uint(alpha) & 1023u) << 2);\n"
" return bigEndianWordToBytes(word);\n"
"}\n"
"void main()\n"
"{\n"
" ivec2 outCoord = ivec2(gl_FragCoord.xy);\n"
" if (uOutputPackFormat == 2)\n"
" {\n"
" fragColor = packAy10Word(outCoord);\n"
" return;\n"
" }\n"
" if (float(outCoord.x) >= uActiveV210Words)\n"
" {\n"
" fragColor = vec4(0.0);\n"

View File

@@ -629,6 +629,9 @@ bool ParseParameterDefinition(const JsonValue& parameterJson, ShaderParameterDef
if (!ValidateShaderIdentifier(definition.id, "parameters[].id", manifestPath, error))
return false;
if (!OptionalStringField(parameterJson, "description", definition.description, "", manifestPath, error))
return false;
if (!ParseParameterDefault(parameterJson, definition, manifestPath, error) ||
!ParseParameterNumberField(parameterJson, "min", definition.minNumbers, manifestPath, error) ||
!ParseParameterNumberField(parameterJson, "max", definition.maxNumbers, manifestPath, error) ||
@@ -1974,6 +1977,8 @@ JsonValue RuntimeHost::SerializeLayerStackLocked() const
JsonValue parameter = JsonValue::MakeObject();
parameter.set("id", JsonValue(definition.id));
parameter.set("label", JsonValue(definition.label));
if (!definition.description.empty())
parameter.set("description", JsonValue(definition.description));
parameter.set("type", JsonValue(ShaderParameterTypeToString(definition.type)));
parameter.set("defaultValue", SerializeParameterValue(definition, DefaultValueForDefinition(definition)));

View File

@@ -604,6 +604,9 @@ bool ParseParameterDefinition(const JsonValue& parameterJson, ShaderParameterDef
if (!ValidateShaderIdentifier(definition.id, "parameters[].id", manifestPath, error))
return false;
if (!OptionalStringField(parameterJson, "description", definition.description, "", manifestPath, error))
return false;
if (!ParseParameterDefault(parameterJson, definition, manifestPath, error) ||
!ParseParameterNumberField(parameterJson, "min", definition.minNumbers, manifestPath, error) ||
!ParseParameterNumberField(parameterJson, "max", definition.maxNumbers, manifestPath, error) ||

View File

@@ -26,6 +26,7 @@ struct ShaderParameterDefinition
{
std::string id;
std::string label;
std::string description;
ShaderParameterType type = ShaderParameterType::Float;
std::vector<double> defaultNumbers;
std::vector<double> minNumbers;

View File

@@ -54,6 +54,8 @@ const char* VideoIOPixelFormatName(VideoIOPixelFormat format)
{
case VideoIOPixelFormat::V210:
return "10-bit YUV v210";
case VideoIOPixelFormat::Yuva10:
return "10-bit YUVA Ay10";
case VideoIOPixelFormat::Bgra8:
return "8-bit BGRA";
case VideoIOPixelFormat::Uyvy8:
@@ -64,7 +66,7 @@ const char* VideoIOPixelFormatName(VideoIOPixelFormat format)
bool VideoIOPixelFormatIsTenBit(VideoIOPixelFormat format)
{
return format == VideoIOPixelFormat::V210;
return format == VideoIOPixelFormat::V210 || format == VideoIOPixelFormat::Yuva10;
}
VideoIOPixelFormat ChoosePreferredVideoIOFormat(bool tenBitSupported)
@@ -80,6 +82,8 @@ unsigned VideoIOBytesPerPixel(VideoIOPixelFormat format)
return 2u;
case VideoIOPixelFormat::Bgra8:
return 4u;
case VideoIOPixelFormat::Yuva10:
return 4u;
case VideoIOPixelFormat::V210:
default:
return 0u;
@@ -90,6 +94,8 @@ unsigned VideoIORowBytes(VideoIOPixelFormat format, unsigned frameWidth)
{
if (format == VideoIOPixelFormat::V210)
return MinimumV210RowBytes(frameWidth);
if (format == VideoIOPixelFormat::Yuva10)
return MinimumYuva10RowBytes(frameWidth);
return frameWidth * VideoIOBytesPerPixel(format);
}
@@ -103,6 +109,11 @@ unsigned MinimumV210RowBytes(unsigned frameWidth)
return ((frameWidth + 5u) / 6u) * 16u;
}
unsigned MinimumYuva10RowBytes(unsigned frameWidth)
{
return ((frameWidth + 63u) / 64u) * 256u;
}
unsigned ActiveV210WordsForWidth(unsigned frameWidth)
{
return ((frameWidth + 5u) / 6u) * 4u;

View File

@@ -7,6 +7,7 @@ enum class VideoIOPixelFormat
{
Uyvy8,
V210,
Yuva10,
Bgra8
};
@@ -31,6 +32,7 @@ unsigned VideoIOBytesPerPixel(VideoIOPixelFormat format);
unsigned VideoIORowBytes(VideoIOPixelFormat format, unsigned frameWidth);
unsigned PackedTextureWidthFromRowBytes(unsigned rowBytes);
unsigned MinimumV210RowBytes(unsigned frameWidth);
unsigned MinimumYuva10RowBytes(unsigned frameWidth);
unsigned ActiveV210WordsForWidth(unsigned frameWidth);
V210CodeValues Rec709RgbToLegalV210(float red, float green, float blue);
std::array<uint8_t, 16> PackV210Block(const V210SixPixelBlock& block);

View File

@@ -95,7 +95,7 @@ public:
virtual ~VideoIODevice() = default;
virtual void ReleaseResources() = 0;
virtual bool DiscoverDevicesAndModes(const VideoFormatSelection& videoModes, std::string& error) = 0;
virtual bool SelectPreferredFormats(const VideoFormatSelection& videoModes, std::string& error) = 0;
virtual bool SelectPreferredFormats(const VideoFormatSelection& videoModes, bool outputAlphaRequired, std::string& error) = 0;
virtual bool ConfigureInput(InputFrameCallback callback, const VideoFormat& inputVideoMode, std::string& error) = 0;
virtual bool ConfigureOutput(OutputFrameCallback callback, const VideoFormat& outputVideoMode, bool externalKeyingEnabled, std::string& error) = 0;
virtual bool Start() = 0;

View File

@@ -223,7 +223,7 @@ bool DeckLinkSession::DiscoverDevicesAndModes(const VideoFormatSelection& videoM
return true;
}
bool DeckLinkSession::SelectPreferredFormats(const VideoFormatSelection& videoModes, std::string& error)
bool DeckLinkSession::SelectPreferredFormats(const VideoFormatSelection& videoModes, bool outputAlphaRequired, std::string& error)
{
if (!output)
{
@@ -239,8 +239,15 @@ bool DeckLinkSession::SelectPreferredFormats(const VideoFormatSelection& videoMo
mState.formatStatusMessage += "DeckLink input does not report 10-bit YUV support for the configured mode; using 8-bit capture. ";
const bool outputTenBitSupported = OutputSupportsFormat(output, videoModes.output.displayMode, bmdFormat10BitYUV);
mState.outputPixelFormat = outputTenBitSupported ? VideoIOPixelFormat::V210 : VideoIOPixelFormat::Bgra8;
if (!outputTenBitSupported)
const bool outputTenBitYuvaSupported = OutputSupportsFormat(output, videoModes.output.displayMode, bmdFormat10BitYUVA);
mState.outputPixelFormat = outputAlphaRequired
? (outputTenBitYuvaSupported ? VideoIOPixelFormat::Yuva10 : VideoIOPixelFormat::Bgra8)
: (outputTenBitSupported ? VideoIOPixelFormat::V210 : VideoIOPixelFormat::Bgra8);
if (outputAlphaRequired && outputTenBitYuvaSupported)
mState.formatStatusMessage += "External keying requires alpha; using 10-bit YUVA output. ";
else if (outputAlphaRequired)
mState.formatStatusMessage += "External keying requires alpha, but DeckLink output does not report 10-bit YUVA support for the configured mode; using 8-bit BGRA output. ";
else if (!outputTenBitSupported)
mState.formatStatusMessage += "DeckLink output does not report 10-bit YUV support for the configured mode; using 8-bit BGRA output. ";
int deckLinkOutputRowBytes = 0;

View File

@@ -22,7 +22,7 @@ public:
void ReleaseResources() override;
bool DiscoverDevicesAndModes(const VideoFormatSelection& videoModes, std::string& error) override;
bool SelectPreferredFormats(const VideoFormatSelection& videoModes, std::string& error) override;
bool SelectPreferredFormats(const VideoFormatSelection& videoModes, bool outputAlphaRequired, std::string& error) override;
bool ConfigureInput(InputFrameCallback callback, const VideoFormat& inputVideoMode, std::string& error) override;
bool ConfigureOutput(OutputFrameCallback callback, const VideoFormat& outputVideoMode, bool externalKeyingEnabled, std::string& error) override;
bool Start() override;

View File

@@ -6,6 +6,8 @@ BMDPixelFormat DeckLinkPixelFormatForVideoIO(VideoIOPixelFormat format)
{
case VideoIOPixelFormat::V210:
return bmdFormat10BitYUV;
case VideoIOPixelFormat::Yuva10:
return bmdFormat10BitYUVA;
case VideoIOPixelFormat::Bgra8:
return bmdFormat8BitBGRA;
case VideoIOPixelFormat::Uyvy8:
@@ -18,6 +20,8 @@ VideoIOPixelFormat VideoIOPixelFormatFromDeckLink(BMDPixelFormat format)
{
if (format == bmdFormat10BitYUV)
return VideoIOPixelFormat::V210;
if (format == bmdFormat10BitYUVA)
return VideoIOPixelFormat::Yuva10;
if (format == bmdFormat8BitBGRA)
return VideoIOPixelFormat::Bgra8;
return VideoIOPixelFormat::Uyvy8;

View File

@@ -4,6 +4,29 @@
"description": "Production-style green/blue screen keyer with matte refinement, despill, edge treatment, and debug views.",
"category": "Keying",
"entryPoint": "shadeVideo",
"passes": [
{
"id": "rawMatte",
"source": "shader.slang",
"entryPoint": "buildRawMatte",
"inputs": ["layerInput"],
"output": "rawMatte"
},
{
"id": "refinedMatte",
"source": "shader.slang",
"entryPoint": "refineMatte",
"inputs": ["rawMatte"],
"output": "refinedMatte"
},
{
"id": "final",
"source": "shader.slang",
"entryPoint": "applyKey",
"inputs": ["refinedMatte"],
"output": "layerOutput"
}
],
"parameters": [
{
"id": "screenColor",
@@ -167,6 +190,46 @@
"max": 1.0,
"step": 0.005
},
{
"id": "cropLeft",
"label": "Crop Left",
"description": "Trims the final matte from the left edge as a fraction of frame width.",
"type": "float",
"default": 0.0,
"min": 0.0,
"max": 0.5,
"step": 0.001
},
{
"id": "cropRight",
"label": "Crop Right",
"description": "Trims the final matte from the right edge as a fraction of frame width.",
"type": "float",
"default": 0.0,
"min": 0.0,
"max": 0.5,
"step": 0.001
},
{
"id": "cropTop",
"label": "Crop Top",
"description": "Trims the final matte from the top edge as a fraction of frame height.",
"type": "float",
"default": 0.0,
"min": 0.0,
"max": 0.5,
"step": 0.001
},
{
"id": "cropBottom",
"label": "Crop Bottom",
"description": "Trims the final matte from the bottom edge as a fraction of frame height.",
"type": "float",
"default": 0.0,
"min": 0.0,
"max": 0.5,
"step": 0.001
},
{
"id": "viewMode",
"label": "View",

View File

@@ -50,12 +50,17 @@ float rawAlphaAt(float2 uv, ShaderContext context)
return saturate(alpha);
}
float refinedAlphaAt(float2 uv, ShaderContext context)
float matteAlphaAt(float2 uv)
{
return saturate(sampleVideo(saturate(uv)).a);
}
float refinedAlphaFromMatte(float2 uv, ShaderContext context)
{
float2 pixel = 1.0 / max(context.outputResolution, float2(1.0, 1.0));
float blur = max(matteBlur, 0.0);
float aaRadius = max(blur, 0.65);
float centerAlpha = rawAlphaAt(uv, context);
float centerAlpha = matteAlphaAt(uv);
float alpha = centerAlpha * 0.30;
if (aaRadius > 0.0001)
@@ -64,51 +69,51 @@ float refinedAlphaAt(float2 uv, ShaderContext context)
float2 halfRadius = radius * 0.5;
float alphaMin = centerAlpha;
float alphaMax = centerAlpha;
float sampleAlpha = rawAlphaAt(uv + float2(halfRadius.x, 0.0), context);
float sampleAlpha = matteAlphaAt(uv + float2(halfRadius.x, 0.0));
alpha += sampleAlpha * 0.065;
alphaMin = min(alphaMin, sampleAlpha);
alphaMax = max(alphaMax, sampleAlpha);
sampleAlpha = rawAlphaAt(uv - float2(halfRadius.x, 0.0), context);
sampleAlpha = matteAlphaAt(uv - float2(halfRadius.x, 0.0));
alpha += sampleAlpha * 0.065;
alphaMin = min(alphaMin, sampleAlpha);
alphaMax = max(alphaMax, sampleAlpha);
sampleAlpha = rawAlphaAt(uv + float2(0.0, halfRadius.y), context);
sampleAlpha = matteAlphaAt(uv + float2(0.0, halfRadius.y));
alpha += sampleAlpha * 0.065;
alphaMin = min(alphaMin, sampleAlpha);
alphaMax = max(alphaMax, sampleAlpha);
sampleAlpha = rawAlphaAt(uv - float2(0.0, halfRadius.y), context);
sampleAlpha = matteAlphaAt(uv - float2(0.0, halfRadius.y));
alpha += sampleAlpha * 0.065;
alphaMin = min(alphaMin, sampleAlpha);
alphaMax = max(alphaMax, sampleAlpha);
sampleAlpha = rawAlphaAt(uv + float2(radius.x, 0.0), context);
sampleAlpha = matteAlphaAt(uv + float2(radius.x, 0.0));
alpha += sampleAlpha * 0.06;
alphaMin = min(alphaMin, sampleAlpha);
alphaMax = max(alphaMax, sampleAlpha);
sampleAlpha = rawAlphaAt(uv - float2(radius.x, 0.0), context);
sampleAlpha = matteAlphaAt(uv - float2(radius.x, 0.0));
alpha += sampleAlpha * 0.06;
alphaMin = min(alphaMin, sampleAlpha);
alphaMax = max(alphaMax, sampleAlpha);
sampleAlpha = rawAlphaAt(uv + float2(0.0, radius.y), context);
sampleAlpha = matteAlphaAt(uv + float2(0.0, radius.y));
alpha += sampleAlpha * 0.06;
alphaMin = min(alphaMin, sampleAlpha);
alphaMax = max(alphaMax, sampleAlpha);
sampleAlpha = rawAlphaAt(uv - float2(0.0, radius.y), context);
sampleAlpha = matteAlphaAt(uv - float2(0.0, radius.y));
alpha += sampleAlpha * 0.06;
alphaMin = min(alphaMin, sampleAlpha);
alphaMax = max(alphaMax, sampleAlpha);
sampleAlpha = rawAlphaAt(uv + radius, context);
sampleAlpha = matteAlphaAt(uv + radius);
alpha += sampleAlpha * 0.05;
alphaMin = min(alphaMin, sampleAlpha);
alphaMax = max(alphaMax, sampleAlpha);
sampleAlpha = rawAlphaAt(uv - radius, context);
sampleAlpha = matteAlphaAt(uv - radius);
alpha += sampleAlpha * 0.05;
alphaMin = min(alphaMin, sampleAlpha);
alphaMax = max(alphaMax, sampleAlpha);
sampleAlpha = rawAlphaAt(uv + float2(radius.x, -radius.y), context);
sampleAlpha = matteAlphaAt(uv + float2(radius.x, -radius.y));
alpha += sampleAlpha * 0.05;
alphaMin = min(alphaMin, sampleAlpha);
alphaMax = max(alphaMax, sampleAlpha);
sampleAlpha = rawAlphaAt(uv + float2(-radius.x, radius.y), context);
sampleAlpha = matteAlphaAt(uv + float2(-radius.x, radius.y));
alpha += sampleAlpha * 0.05;
alphaMin = min(alphaMin, sampleAlpha);
alphaMax = max(alphaMax, sampleAlpha);
@@ -118,7 +123,7 @@ float refinedAlphaAt(float2 uv, ShaderContext context)
}
else
{
alpha = rawAlphaAt(uv, context);
alpha = centerAlpha;
}
alpha = saturate((alpha - clipBlack) / max(clipWhite - clipBlack, 0.0001));
@@ -147,17 +152,43 @@ float3 despillColor(float3 color, float alpha)
return saturate(neutralized);
}
float4 shadeVideo(ShaderContext context)
float cropMaskAt(float2 uv, ShaderContext context)
{
float2 feather = 1.0 / max(context.outputResolution, float2(1.0, 1.0));
float left = smoothstep(saturate(cropLeft), saturate(cropLeft) + feather.x, uv.x);
float right = 1.0 - smoothstep(1.0 - saturate(cropRight) - feather.x, 1.0 - saturate(cropRight), uv.x);
float top = smoothstep(saturate(cropTop), saturate(cropTop) + feather.y, uv.y);
float bottom = 1.0 - smoothstep(1.0 - saturate(cropBottom) - feather.y, 1.0 - saturate(cropBottom), uv.y);
return saturate(left * right * top * bottom);
}
float4 buildRawMatte(ShaderContext context)
{
float4 src = context.sourceColor;
float3 color = saturate(src.rgb);
float alpha = refinedAlphaAt(context.uv, context);
float alpha = rawAlphaAt(context.uv, context);
return float4(color, alpha);
}
float4 refineMatte(ShaderContext context)
{
float4 raw = sampleVideo(context.uv);
float alpha = refinedAlphaFromMatte(context.uv, context);
return float4(saturate(raw.rgb), alpha);
}
float4 applyKey(ShaderContext context)
{
float4 keyed = sampleVideo(context.uv);
float3 color = saturate(keyed.rgb);
float alpha = saturate(keyed.a);
float spill = spillAmountForColor(color);
float3 despilled = despillColor(color, alpha);
float cropMask = cropMaskAt(context.uv, context);
alpha *= cropMask;
float edgeAmount = saturate(1.0 - abs(alpha * 2.0 - 1.0));
despilled = lerp(despilled, despilled * saturate(edgeColor.rgb), edgeAmount * saturate(edgeRecover));
alpha = saturate(lerp(alpha, rawAlphaAt(context.uv, context), edgeAmount * saturate(edgeRecover) * 0.35));
if (viewMode == 1)
return float4(alpha, alpha, alpha, 1.0);
@@ -167,10 +198,15 @@ float4 shadeVideo(ShaderContext context)
return float4(despilled, 1.0);
if (viewMode == 4)
{
float rawAlpha = rawAlphaAt(context.uv, context);
float rawAlpha = rawAlphaAt(context.uv, context) * cropMask;
return float4(rawAlpha, alpha, spill, 1.0);
}
float3 premultiplied = saturate(despilled) * alpha;
return float4(premultiplied, alpha);
}
float4 shadeVideo(ShaderContext context)
{
return applyKey(context);
}

View File

@@ -4,6 +4,22 @@
"description": "VHS with wiggle, smear, and YIQ-style color separation inspired by nostalgic analog references.",
"category": "Glitch",
"entryPoint": "shadeVideo",
"passes": [
{
"id": "tapeSmear",
"source": "shader.slang",
"entryPoint": "buildTapeSmear",
"inputs": ["layerInput"],
"output": "tapeSmear"
},
{
"id": "final",
"source": "shader.slang",
"entryPoint": "finishVhs",
"inputs": ["tapeSmear"],
"output": "layerOutput"
}
],
"parameters": [
{
"id": "wiggle",

View File

@@ -158,10 +158,15 @@ float3 blurVhs(float2 uv, float d, int sampleCount)
return sum;
}
float4 shadeVideo(ShaderContext context)
float distortedTapeTime(ShaderContext context)
{
return context.time + context.startupRandom * 113.0;
}
float4 buildTapeSmear(ShaderContext context)
{
float2 uv = context.uv;
float time = context.time + context.startupRandom * 113.0;
float time = distortedTapeTime(context);
float framecount = frac(time * wiggleSpeed / 7.0) * 7.0;
int sampleCount = int(clamp(blurSamples, 3.0, 15.0) + 0.5);
@@ -189,6 +194,13 @@ float4 shadeVideo(ShaderContext context)
float q = rgb2yiq(qBlur).b;
float3 color = yiq2rgb(float3(y, i, q)) - pow(s + e * 2.0, 3.0);
return float4(saturate(color), 1.0);
}
float4 finishVhs(ShaderContext context)
{
float time = distortedTapeTime(context);
float3 color = sampleVideo(context.uv).rgb;
float2 centered = context.uv * 2.0 - 1.0;
centered.x *= context.outputResolution.x / max(context.outputResolution.y, 1.0);
@@ -238,3 +250,8 @@ float4 shadeVideo(ShaderContext context)
return float4(saturate(color), 1.0);
}
float4 shadeVideo(ShaderContext context)
{
return finishVhs(context);
}

View File

@@ -59,7 +59,7 @@ void TestValidManifest()
"fonts": [{ "id": "inter", "path": "Inter.ttf" }],
"temporal": { "enabled": true, "historySource": "source", "historyLength": 8 },
"parameters": [
{ "id": "gain", "label": "Gain", "type": "float", "default": 0.5, "min": 0, "max": 1 },
{ "id": "gain", "label": "Gain", "description": "Scales the output intensity.", "type": "float", "default": 0.5, "min": 0, "max": 1 },
{ "id": "titleText", "label": "Title", "type": "text", "default": "LIVE", "font": "inter", "maxLength": 32 },
{ "id": "mode", "label": "Mode", "type": "enum", "default": "soft", "options": [
{ "value": "soft", "label": "Soft" },
@@ -78,6 +78,7 @@ void TestValidManifest()
Expect(package.fontAssets.size() == 1 && package.fontAssets[0].id == "inter", "font assets parse");
Expect(package.temporal.enabled && package.temporal.effectiveHistoryLength == 4, "temporal history is capped");
Expect(package.parameters.size() == 4, "parameters parse");
Expect(package.parameters[0].description == "Scales the output intensity.", "parameter descriptions parse");
Expect(package.parameters[1].type == ShaderParameterType::Text && package.parameters[1].defaultTextValue == "LIVE", "text parameter parses");
Expect(package.parameters[3].type == ShaderParameterType::Trigger, "trigger parameter parses");
Expect(package.passes.size() == 1 && package.passes[0].id == "main", "legacy manifests get an implicit main pass");

View File

@@ -31,7 +31,7 @@ public:
return true;
}
bool SelectPreferredFormats(const VideoFormatSelection&, std::string&) override
bool SelectPreferredFormats(const VideoFormatSelection&, bool, std::string&) override
{
mState.inputPixelFormat = VideoIOPixelFormat::Uyvy8;
mState.outputPixelFormat = VideoIOPixelFormat::Bgra8;
@@ -120,7 +120,7 @@ int main()
bool outputSeen = false;
Expect(device.DiscoverDevicesAndModes(selection, error), "fake discovery succeeds");
Expect(device.SelectPreferredFormats(selection, error), "fake format selection succeeds");
Expect(device.SelectPreferredFormats(selection, false, error), "fake format selection succeeds");
Expect(device.ConfigureInput([&](const VideoIOFrame& frame) {
inputSeen = frame.bytes != nullptr && frame.width == 1920 && frame.pixelFormat == VideoIOPixelFormat::Uyvy8;
}, selection.input, error), "fake input config succeeds");

View File

@@ -22,6 +22,7 @@ void TestPreferredFormatSelection()
Expect(ChoosePreferredVideoIOFormat(true) == VideoIOPixelFormat::V210, "10-bit is preferred when supported");
Expect(ChoosePreferredVideoIOFormat(false) == VideoIOPixelFormat::Uyvy8, "8-bit is used as fallback");
Expect(DeckLinkPixelFormatForVideoIO(VideoIOPixelFormat::V210) == bmdFormat10BitYUV, "v210 maps to DeckLink 10-bit YUV");
Expect(DeckLinkPixelFormatForVideoIO(VideoIOPixelFormat::Yuva10) == bmdFormat10BitYUVA, "Ay10 maps to DeckLink 10-bit YUVA");
Expect(DeckLinkPixelFormatForVideoIO(VideoIOPixelFormat::Uyvy8) == bmdFormat8BitYUV, "UYVY maps to DeckLink 8-bit YUV");
Expect(DeckLinkPixelFormatForVideoIO(VideoIOPixelFormat::Bgra8) == bmdFormat8BitBGRA, "BGRA maps to DeckLink 8-bit BGRA");
}
@@ -31,11 +32,15 @@ void TestRowByteHelpers()
Expect(MinimumV210RowBytes(1920) == 5120, "1920-wide v210 active row bytes");
Expect(MinimumV210RowBytes(1280) == 3424, "1280-wide v210 active row bytes rounds up to six-pixel group");
Expect(MinimumV210RowBytes(3840) == 10240, "3840-wide v210 active row bytes");
Expect(MinimumYuva10RowBytes(1920) == 7680, "1920-wide Ay10 row bytes");
Expect(MinimumYuva10RowBytes(1919) == 7680, "Ay10 row bytes round up to 64-pixel boundary");
Expect(MinimumYuva10RowBytes(3840) == 15360, "3840-wide Ay10 row bytes");
Expect(PackedTextureWidthFromRowBytes(5120) == 1280, "packed texture width is row bytes divided into RGBA byte texels");
Expect(ActiveV210WordsForWidth(1920) == 1280, "active v210 words match 1920 width");
Expect(VideoIORowBytes(VideoIOPixelFormat::Uyvy8, 1920) == 3840, "UYVY row bytes");
Expect(VideoIORowBytes(VideoIOPixelFormat::Bgra8, 1920) == 7680, "BGRA row bytes");
Expect(VideoIORowBytes(VideoIOPixelFormat::V210, 1920) == 5120, "v210 row bytes");
Expect(VideoIORowBytes(VideoIOPixelFormat::Yuva10, 1920) == 7680, "Ay10 row bytes");
}
void TestV210PackUnpack()

View File

@@ -21,7 +21,10 @@ function ParameterHeader({ layer, parameter, onReset, resetDisabled }) {
return (
<div className="parameter__header">
<label>{parameter.label}</label>
<div className="parameter__title">
<label>{parameter.label}</label>
{parameter.description ? <p>{parameter.description}</p> : null}
</div>
<button
type="button"
className="parameter__osc"

View File

@@ -890,7 +890,22 @@ pre {
display: grid;
grid-template-columns: minmax(5.5rem, auto) minmax(0, 1fr) auto;
gap: 0.5rem;
align-items: center;
align-items: start;
}
.parameter__title {
min-width: 0;
}
.parameter__title label {
display: block;
}
.parameter__title p {
margin: 0.2rem 0 0;
color: var(--app-muted);
font-size: 0.82rem;
line-height: 1.3;
}
.parameter__osc,