8 Commits

Author SHA1 Message Date
Aiden
0c16665610 Revert "Decklink separation"
Some checks failed
CI / Windows Release Package (push) Has been cancelled
CI / React UI Build (push) Has been cancelled
CI / Native Windows Build And Tests (push) Has been cancelled
This reverts commit 46f2f1ece5.
2026-05-09 16:47:33 +10:00
Aiden
46f2f1ece5 Decklink separation 2026-05-09 14:42:11 +10:00
Aiden
4ffbb97abf Video backend
All checks were successful
CI / React UI Build (push) Successful in 11s
CI / Native Windows Build And Tests (push) Successful in 2m43s
CI / Windows Release Package (push) Successful in 2m54s
2026-05-09 14:15:49 +10:00
Aiden
98f5cbe309 preview changes
All checks were successful
CI / React UI Build (push) Successful in 11s
CI / Native Windows Build And Tests (push) Successful in 2m23s
CI / Windows Release Package (push) Successful in 2m41s
2026-05-09 13:53:00 +10:00
Aiden
93d856b3b6 CPU optimisations
Some checks failed
CI / React UI Build (push) Successful in 37s
CI / Windows Release Package (push) Has been cancelled
CI / Native Windows Build And Tests (push) Has been cancelled
2026-05-09 13:50:27 +10:00
6ea6971dd6 more shaders and updates/changes
All checks were successful
CI / React UI Build (push) Successful in 10s
CI / Native Windows Build And Tests (push) Successful in 2m22s
CI / Windows Release Package (push) Successful in 2m35s
2026-05-08 20:32:19 +10:00
163d70e9bd Annotations
All checks were successful
CI / React UI Build (push) Successful in 11s
CI / Native Windows Build And Tests (push) Successful in 2m22s
CI / Windows Release Package (push) Successful in 2m28s
2026-05-08 20:01:22 +10:00
8afef5065a Update README.md
All checks were successful
CI / React UI Build (push) Successful in 11s
CI / Native Windows Build And Tests (push) Successful in 2m20s
CI / Windows Release Package (push) Successful in 2m34s
2026-05-08 19:14:31 +10:00
51 changed files with 1515 additions and 305 deletions

View File

@@ -49,6 +49,10 @@ set(APP_SOURCES
"${APP_DIR}/videoio/decklink/DeckLinkSession.h"
"${APP_DIR}/videoio/decklink/DeckLinkVideoIOFormat.cpp"
"${APP_DIR}/videoio/decklink/DeckLinkVideoIOFormat.h"
"${APP_DIR}/videoio/VideoIOBackendFactory.cpp"
"${APP_DIR}/videoio/VideoIOBackendFactory.h"
"${APP_DIR}/videoio/VideoIOConfig.cpp"
"${APP_DIR}/videoio/VideoIOConfig.h"
"${APP_DIR}/gl/renderer/GLExtensions.cpp"
"${APP_DIR}/gl/renderer/GLExtensions.h"
"${APP_DIR}/gl/shader/GlobalParamsBuffer.cpp"
@@ -204,6 +208,35 @@ endif()
add_test(NAME RuntimeParameterUtilsTests COMMAND RuntimeParameterUtilsTests)
add_executable(RuntimeHostVideoIOStateTests
"${APP_DIR}/runtime/RuntimeHost.cpp"
"${APP_DIR}/runtime/RuntimeClock.cpp"
"${APP_DIR}/runtime/RuntimeJson.cpp"
"${APP_DIR}/runtime/RuntimeParameterUtils.cpp"
"${APP_DIR}/shader/ShaderCompiler.cpp"
"${APP_DIR}/shader/ShaderPackageRegistry.cpp"
"${APP_DIR}/videoio/VideoIOConfig.cpp"
"${CMAKE_CURRENT_SOURCE_DIR}/tests/RuntimeHostVideoIOStateTests.cpp"
)
target_include_directories(RuntimeHostVideoIOStateTests PRIVATE
"${APP_DIR}"
"${APP_DIR}/platform"
"${APP_DIR}/runtime"
"${APP_DIR}/shader"
"${APP_DIR}/videoio"
)
target_link_libraries(RuntimeHostVideoIOStateTests PRIVATE
Advapi32
)
if(MSVC)
target_compile_options(RuntimeHostVideoIOStateTests PRIVATE /W3)
endif()
add_test(NAME RuntimeHostVideoIOStateTests COMMAND RuntimeHostVideoIOStateTests)
add_executable(Std140BufferTests
"${CMAKE_CURRENT_SOURCE_DIR}/tests/Std140BufferTests.cpp"
)
@@ -318,6 +351,7 @@ endif()
add_test(NAME VideoPlayoutSchedulerTests COMMAND VideoPlayoutSchedulerTests)
add_executable(VideoIODeviceFakeTests
"${APP_DIR}/videoio/VideoIOConfig.cpp"
"${APP_DIR}/videoio/VideoIOFormat.cpp"
"${CMAKE_CURRENT_SOURCE_DIR}/tests/VideoIODeviceFakeTests.cpp"
)
@@ -334,6 +368,43 @@ endif()
add_test(NAME VideoIODeviceFakeTests COMMAND VideoIODeviceFakeTests)
add_executable(VideoIOBackendFactoryTests
"${APP_DIR}/videoio/decklink/DeckLinkAPI_i.c"
"${APP_DIR}/videoio/decklink/DeckLinkSession.cpp"
"${APP_DIR}/videoio/decklink/DeckLinkSession.h"
"${APP_DIR}/videoio/decklink/DeckLinkDisplayMode.cpp"
"${APP_DIR}/videoio/decklink/DeckLinkDisplayMode.h"
"${APP_DIR}/videoio/decklink/DeckLinkVideoIOFormat.cpp"
"${APP_DIR}/videoio/decklink/DeckLinkVideoIOFormat.h"
"${APP_DIR}/videoio/decklink/DeckLinkFrameTransfer.cpp"
"${APP_DIR}/videoio/decklink/DeckLinkFrameTransfer.h"
"${APP_DIR}/videoio/VideoIOBackendFactory.cpp"
"${APP_DIR}/videoio/VideoIOBackendFactory.h"
"${APP_DIR}/videoio/VideoIOConfig.cpp"
"${APP_DIR}/videoio/VideoIOConfig.h"
"${APP_DIR}/videoio/VideoIOFormat.cpp"
"${APP_DIR}/videoio/VideoPlayoutScheduler.cpp"
"${APP_DIR}/videoio/VideoPlayoutScheduler.h"
"${CMAKE_CURRENT_SOURCE_DIR}/tests/VideoIOBackendFactoryTests.cpp"
)
target_include_directories(VideoIOBackendFactoryTests PRIVATE
"${APP_DIR}"
"${APP_DIR}/gl/renderer"
"${APP_DIR}/videoio"
"${APP_DIR}/videoio/decklink"
)
target_link_libraries(VideoIOBackendFactoryTests PRIVATE
Ole32
)
if(MSVC)
target_compile_options(VideoIOBackendFactoryTests PRIVATE /W3)
endif()
add_test(NAME VideoIOBackendFactoryTests COMMAND VideoIOBackendFactoryTests)
install(TARGETS LoopThroughWithOpenGLCompositing
RUNTIME DESTINATION "."
)

View File

@@ -273,3 +273,5 @@ If `SLANG_ROOT` is not set, the workflow falls back to the repo-local default un
- compute shaders or a small 1x1 or nx1 RGBA16f render target for arbitrary data storage
- allow shaders to read other shaders data store based on name? or output over OSC
- Mipmapping for shader-declared textures
- Anotate included shaders
- allow 3 vector exposed controls

View File

@@ -412,10 +412,10 @@ LRESULT CALLBACK WndProc(HWND hWnd, UINT message, WPARAM wParam, LPARAM lParam)
break;
}
// Setup OpenGL and DeckLink capture and playout object
// Setup OpenGL and video I/O capture/playout object
pOpenGLComposite = new OpenGLComposite(hWnd, hDC, hRC);
if (pOpenGLComposite->InitDeckLink())
if (pOpenGLComposite->InitializeVideoIO())
{
wglMakeCurrent( NULL, NULL );
if (pOpenGLComposite->Start())
@@ -423,11 +423,11 @@ LRESULT CALLBACK WndProc(HWND hWnd, UINT message, WPARAM wParam, LPARAM lParam)
PostMessage(hWnd, kCreateStatusStripMessage, 0, 0);
break; // success
}
MessageBoxA(NULL, "The OpenGL/DeckLink runtime initialized, but playout failed to start. See the previous DeckLink start message for the failing call.", "Startup failed", MB_OK | MB_ICONERROR);
MessageBoxA(NULL, "The OpenGL/video I/O runtime initialized, but playout failed to start. See the previous start message for the failing call.", "Startup failed", MB_OK | MB_ICONERROR);
}
else
{
MessageBoxA(NULL, "The OpenGL/DeckLink runtime failed to initialize. See the previous initialization message for the failing call.", "Startup failed", MB_OK | MB_ICONERROR);
MessageBoxA(NULL, "The OpenGL/video I/O runtime failed to initialize. See the previous initialization message for the failing call.", "Startup failed", MB_OK | MB_ICONERROR);
}
// Failed to initialize - cleanup
@@ -438,7 +438,7 @@ LRESULT CALLBACK WndProc(HWND hWnd, UINT message, WPARAM wParam, LPARAM lParam)
}
catch (...)
{
ShowUnhandledExceptionMessage("Startup failed while creating the OpenGL/DeckLink runtime.");
ShowUnhandledExceptionMessage("Startup failed while creating the OpenGL/video I/O runtime.");
PostMessage(hWnd, WM_CLOSE, 0, 0);
break;
}
@@ -474,7 +474,7 @@ LRESULT CALLBACK WndProc(HWND hWnd, UINT message, WPARAM wParam, LPARAM lParam)
}
catch (...)
{
ShowUnhandledExceptionMessage("Shutdown failed while tearing down the OpenGL/DeckLink runtime.");
ShowUnhandledExceptionMessage("Shutdown failed while tearing down the OpenGL/video I/O runtime.");
}
// Deselect the current rendering context and delete it

View File

@@ -501,6 +501,9 @@ bool ControlServer::SendWebSocketText(SOCKET clientSocket, const std::string& pa
void ControlServer::BroadcastStateLocked()
{
if (mClients.empty())
return;
const std::string stateMessage = mCallbacks.getStateJson ? mCallbacks.getStateJson() : "{}";
for (auto it = mClients.begin(); it != mClients.end();)
{

View File

@@ -1,5 +1,3 @@
#include "DeckLinkDisplayMode.h"
#include "DeckLinkSession.h"
#include "OpenGLComposite.h"
#include "GLExtensions.h"
#include "GlRenderConstants.h"
@@ -10,6 +8,7 @@
#include "PngScreenshotWriter.h"
#include "RuntimeServices.h"
#include "ShaderBuildQueue.h"
#include "VideoIOBackendFactory.h"
#include <algorithm>
#include <chrono>
@@ -23,7 +22,6 @@
OpenGLComposite::OpenGLComposite(HWND hWnd, HDC hDC, HGLRC hRC) :
hGLWnd(hWnd), hGLDC(hDC), hGLRC(hRC),
mVideoIO(std::make_unique<DeckLinkSession>()),
mRenderer(std::make_unique<OpenGLRenderer>()),
mUseCommittedLayerStates(false),
mScreenshotRequested(false)
@@ -37,7 +35,7 @@ OpenGLComposite::OpenGLComposite(HWND hWnd, HDC hDC, HGLRC hRC) :
[this]() { ProcessScreenshotRequest(); },
[this]() { paintGL(); });
mVideoIOBridge = std::make_unique<OpenGLVideoIOBridge>(
*mVideoIO,
nullptr,
*mRenderer,
*mRenderPipeline,
*mRuntimeHost,
@@ -56,20 +54,15 @@ OpenGLComposite::~OpenGLComposite()
mRuntimeServices->Stop();
if (mShaderBuildQueue)
mShaderBuildQueue->Stop();
mVideoIO->ReleaseResources();
if (mVideoIO)
mVideoIO->ReleaseResources();
mRenderer->DestroyResources();
DeleteCriticalSection(&pMutex);
}
bool OpenGLComposite::InitDeckLink()
bool OpenGLComposite::InitializeVideoIO()
{
return InitVideoIO();
}
bool OpenGLComposite::InitVideoIO()
{
VideoFormatSelection videoModes;
std::string initFailureReason;
if (mRuntimeHost && mRuntimeHost->GetRepoRoot().empty())
@@ -82,31 +75,31 @@ bool OpenGLComposite::InitVideoIO()
}
}
if (mRuntimeHost)
if (!mRuntimeHost)
{
if (!ResolveConfiguredVideoFormats(
mRuntimeHost->GetInputVideoFormat(),
mRuntimeHost->GetInputFrameRate(),
mRuntimeHost->GetOutputVideoFormat(),
mRuntimeHost->GetOutputFrameRate(),
videoModes,
initFailureReason))
{
MessageBoxA(NULL, initFailureReason.c_str(), "DeckLink mode configuration error", MB_OK);
return false;
}
initFailureReason = "Runtime host is not available.";
MessageBoxA(NULL, initFailureReason.c_str(), "Video I/O initialization failed", MB_OK | MB_ICONERROR);
return false;
}
if (!mVideoIO->DiscoverDevicesAndModes(videoModes, initFailureReason))
const VideoIOConfiguration videoIOConfig = mRuntimeHost->GetVideoIOConfiguration();
mVideoIO = CreateVideoIODevice(videoIOConfig.backendId, initFailureReason);
if (!mVideoIO)
{
MessageBoxA(NULL, initFailureReason.c_str(), "Video I/O initialization failed", MB_OK | MB_ICONERROR);
return false;
}
mVideoIOBridge->SetVideoIODevice(mVideoIO.get());
if (!mVideoIO->DiscoverDevicesAndModes(videoIOConfig, initFailureReason))
{
const char* title = initFailureReason == "Please install the Blackmagic DeckLink drivers to use the features of this application."
? "This application requires the DeckLink drivers installed."
: "DeckLink initialization failed";
? "This application requires the selected video I/O drivers installed."
: "Video I/O initialization failed";
MessageBoxA(NULL, initFailureReason.c_str(), title, MB_OK | MB_ICONERROR);
return false;
}
const bool outputAlphaRequired = mRuntimeHost && mRuntimeHost->ExternalKeyingEnabled();
if (!mVideoIO->SelectPreferredFormats(videoModes, outputAlphaRequired, initFailureReason))
if (!mVideoIO->SelectPreferredFormats(videoIOConfig, initFailureReason))
goto error;
if (! CheckOpenGLExtensions())
@@ -121,9 +114,9 @@ bool OpenGLComposite::InitVideoIO()
goto error;
}
PublishVideoIOStatus(mVideoIO->OutputModelName().empty()
? "DeckLink output device selected."
: ("Selected output device: " + mVideoIO->OutputModelName()));
PublishVideoIOStatus(mVideoIO->DeviceName().empty()
? "Video I/O output device selected."
: ("Selected output device: " + mVideoIO->DeviceName()));
// Resize window to match output video frame, but scale large formats down by half for viewing.
if (mVideoIO->OutputFrameWidth() < 1920)
@@ -131,7 +124,7 @@ bool OpenGLComposite::InitVideoIO()
else
resizeWindow(mVideoIO->OutputFrameWidth() / 2, mVideoIO->OutputFrameHeight() / 2);
if (!mVideoIO->ConfigureInput([this](const VideoIOFrame& frame) { mVideoIOBridge->VideoFrameArrived(frame); }, videoModes.input, initFailureReason))
if (!mVideoIO->ConfigureInput([this](const VideoIOFrame& frame) { mVideoIOBridge->VideoFrameArrived(frame); }, initFailureReason))
{
goto error;
}
@@ -140,7 +133,7 @@ bool OpenGLComposite::InitVideoIO()
mRuntimeHost->SetSignalStatus(false, mVideoIO->InputFrameWidth(), mVideoIO->InputFrameHeight(), mVideoIO->InputDisplayModeName());
}
if (!mVideoIO->ConfigureOutput([this](const VideoIOCompletion& completion) { mVideoIOBridge->PlayoutFrameCompleted(completion); }, videoModes.output, mRuntimeHost && mRuntimeHost->ExternalKeyingEnabled(), initFailureReason))
if (!mVideoIO->ConfigureOutput([this](const VideoIOCompletion& completion) { mVideoIOBridge->PlayoutFrameCompleted(completion); }, initFailureReason))
{
goto error;
}
@@ -151,13 +144,16 @@ bool OpenGLComposite::InitVideoIO()
error:
if (!initFailureReason.empty())
MessageBoxA(NULL, initFailureReason.c_str(), "DeckLink initialization failed", MB_OK | MB_ICONERROR);
MessageBoxA(NULL, initFailureReason.c_str(), "Video I/O initialization failed", MB_OK | MB_ICONERROR);
mVideoIO->ReleaseResources();
return false;
}
void OpenGLComposite::paintGL()
{
if (!mVideoIO)
return;
if (!TryEnterCriticalSection(&pMutex))
{
ValidateRect(hGLWnd, NULL);
@@ -187,21 +183,13 @@ void OpenGLComposite::resizeWindow(int width, int height)
void OpenGLComposite::PublishVideoIOStatus(const std::string& statusMessage)
{
if (!mRuntimeHost)
if (!mRuntimeHost || !mVideoIO)
return;
if (!statusMessage.empty())
mVideoIO->SetStatusMessage(statusMessage);
mRuntimeHost->SetVideoIOStatus(
"decklink",
mVideoIO->OutputModelName(),
mVideoIO->SupportsInternalKeying(),
mVideoIO->SupportsExternalKeying(),
mVideoIO->KeyerInterfaceAvailable(),
mRuntimeHost->ExternalKeyingEnabled(),
mVideoIO->ExternalKeyingActive(),
mVideoIO->StatusMessage());
mRuntimeHost->SetVideoIOStatus(mVideoIO->State());
}
bool OpenGLComposite::InitOpenGLState()
@@ -314,15 +302,35 @@ void OpenGLComposite::renderEffect()
}
else if (mRuntimeHost)
{
if (mRuntimeHost->TryGetLayerRenderStates(mVideoIO->InputFrameWidth(), mVideoIO->InputFrameHeight(), layerStates))
{
mCachedLayerRenderStates = layerStates;
}
else
const unsigned renderWidth = mVideoIO->InputFrameWidth();
const unsigned renderHeight = mVideoIO->InputFrameHeight();
const uint64_t renderStateVersion = mRuntimeHost->GetRenderStateVersion();
const bool renderStateCacheValid =
!mCachedLayerRenderStates.empty() &&
mCachedRenderStateVersion == renderStateVersion &&
mCachedRenderStateWidth == renderWidth &&
mCachedRenderStateHeight == renderHeight;
if (renderStateCacheValid)
{
layerStates = mCachedLayerRenderStates;
mRuntimeHost->RefreshDynamicRenderStateFields(layerStates);
}
else
{
if (mRuntimeHost->TryGetLayerRenderStates(renderWidth, renderHeight, layerStates))
{
mCachedLayerRenderStates = layerStates;
mCachedRenderStateVersion = renderStateVersion;
mCachedRenderStateWidth = renderWidth;
mCachedRenderStateHeight = renderHeight;
}
else
{
layerStates = mCachedLayerRenderStates;
mRuntimeHost->RefreshDynamicRenderStateFields(layerStates);
}
}
}
const unsigned historyCap = mRuntimeHost ? mRuntimeHost->GetMaxTemporalHistoryFrames() : 0;
mRenderPass->Render(

View File

@@ -39,8 +39,7 @@ public:
OpenGLComposite(HWND hWnd, HDC hDC, HGLRC hRC);
~OpenGLComposite();
bool InitDeckLink();
bool InitVideoIO();
bool InitializeVideoIO();
bool Start();
bool Stop();
bool ReloadShader();
@@ -87,6 +86,9 @@ private:
std::unique_ptr<ShaderBuildQueue> mShaderBuildQueue;
std::unique_ptr<RuntimeServices> mRuntimeServices;
std::vector<RuntimeRenderState> mCachedLayerRenderStates;
uint64_t mCachedRenderStateVersion = 0;
unsigned mCachedRenderStateWidth = 0;
unsigned mCachedRenderStateHeight = 0;
std::atomic<bool> mUseCommittedLayerStates;
std::atomic<bool> mScreenshotRequested;

View File

@@ -45,7 +45,7 @@ void OpenGLRenderPass::Render(
}
else
{
const std::vector<RenderPassDescriptor> passes = BuildLayerPassDescriptors(layerStates, layerPrograms);
const std::vector<RenderPassDescriptor>& passes = BuildLayerPassDescriptors(layerStates, layerPrograms);
for (const RenderPassDescriptor& pass : passes)
{
RenderLayerPass(
@@ -71,9 +71,9 @@ void OpenGLRenderPass::RenderDecodePass(unsigned inputFrameWidth, unsigned input
glBindVertexArray(mRenderer.FullscreenVertexArray());
glUseProgram(mRenderer.DecodeProgram());
const GLint packedResolutionLocation = glGetUniformLocation(mRenderer.DecodeProgram(), "uPackedVideoResolution");
const GLint decodedResolutionLocation = glGetUniformLocation(mRenderer.DecodeProgram(), "uDecodedVideoResolution");
const GLint inputPixelFormatLocation = glGetUniformLocation(mRenderer.DecodeProgram(), "uInputPixelFormat");
const GLint packedResolutionLocation = mRenderer.DecodePackedResolutionLocation();
const GLint decodedResolutionLocation = mRenderer.DecodeDecodedResolutionLocation();
const GLint inputPixelFormatLocation = mRenderer.DecodeInputPixelFormatLocation();
if (packedResolutionLocation >= 0)
glUniform2f(packedResolutionLocation, static_cast<float>(captureTextureWidth), static_cast<float>(inputFrameHeight));
if (decodedResolutionLocation >= 0)
@@ -96,7 +96,8 @@ std::vector<RenderPassDescriptor> OpenGLRenderPass::BuildLayerPassDescriptors(
// Flatten the layer stack into concrete GL passes. A layer may now contain
// several shader passes, but the outer stack still sees one visible output
// per layer.
std::vector<RenderPassDescriptor> passes;
std::vector<RenderPassDescriptor>& passes = mPassScratch;
passes.clear();
const std::size_t passCount = layerStates.size() < layerPrograms.size() ? layerStates.size() : layerPrograms.size();
std::size_t descriptorCount = 0;
for (std::size_t index = 0; index < passCount; ++index)

View File

@@ -56,4 +56,5 @@ private:
OpenGLRenderer& mRenderer;
ShaderTextureBindings mTextureBindings;
mutable std::vector<RenderPassDescriptor> mPassScratch;
};

View File

@@ -4,6 +4,8 @@
#include "RuntimeHost.h"
#include "VideoIOFormat.h"
#include <cstring>
#include <chrono>
#include <gl/gl.h>
@@ -21,6 +23,11 @@ OpenGLRenderPipeline::OpenGLRenderPipeline(
{
}
OpenGLRenderPipeline::~OpenGLRenderPipeline()
{
ResetAsyncReadbackState();
}
bool OpenGLRenderPipeline::RenderFrame(const RenderPipelineFrameContext& context, VideoIOOutputFrame& outputFrame)
{
const VideoIOState& state = context.videoState;
@@ -62,9 +69,9 @@ void OpenGLRenderPipeline::PackOutputFor10Bit(const VideoIOState& state)
glBindVertexArray(mRenderer.FullscreenVertexArray());
glUseProgram(mRenderer.OutputPackProgram());
const GLint outputResolutionLocation = glGetUniformLocation(mRenderer.OutputPackProgram(), "uOutputVideoResolution");
const GLint activeWordsLocation = glGetUniformLocation(mRenderer.OutputPackProgram(), "uActiveV210Words");
const GLint packFormatLocation = glGetUniformLocation(mRenderer.OutputPackProgram(), "uOutputPackFormat");
const GLint outputResolutionLocation = mRenderer.OutputPackResolutionLocation();
const GLint activeWordsLocation = mRenderer.OutputPackActiveWordsLocation();
const GLint packFormatLocation = mRenderer.OutputPackFormatLocation();
if (outputResolutionLocation >= 0)
glUniform2f(outputResolutionLocation, static_cast<float>(state.outputFrameSize.width), static_cast<float>(state.outputFrameSize.height));
if (activeWordsLocation >= 0)
@@ -78,18 +85,195 @@ void OpenGLRenderPipeline::PackOutputFor10Bit(const VideoIOState& state)
glBindTexture(GL_TEXTURE_2D, 0);
}
void OpenGLRenderPipeline::ReadOutputFrame(const VideoIOState& state, VideoIOOutputFrame& outputFrame)
bool OpenGLRenderPipeline::EnsureAsyncReadbackBuffers(std::size_t requiredBytes)
{
if (requiredBytes == 0)
return false;
if (mAsyncReadbackBytes == requiredBytes && mAsyncReadbackSlots[0].pixelPackBuffer != 0)
return true;
ResetAsyncReadbackState();
mAsyncReadbackBytes = requiredBytes;
for (AsyncReadbackSlot& slot : mAsyncReadbackSlots)
{
glGenBuffers(1, &slot.pixelPackBuffer);
glBindBuffer(GL_PIXEL_PACK_BUFFER, slot.pixelPackBuffer);
glBufferData(GL_PIXEL_PACK_BUFFER, static_cast<GLsizeiptr>(requiredBytes), nullptr, GL_STREAM_READ);
slot.sizeBytes = requiredBytes;
slot.inFlight = false;
}
glBindBuffer(GL_PIXEL_PACK_BUFFER, 0);
mAsyncReadbackWriteIndex = 0;
mAsyncReadbackReadIndex = 0;
return true;
}
void OpenGLRenderPipeline::ResetAsyncReadbackState()
{
FlushAsyncReadbackPipeline();
for (AsyncReadbackSlot& slot : mAsyncReadbackSlots)
slot.sizeBytes = 0;
if (mAsyncReadbackSlots[0].pixelPackBuffer != 0)
{
for (AsyncReadbackSlot& slot : mAsyncReadbackSlots)
{
if (slot.pixelPackBuffer != 0)
{
glDeleteBuffers(1, &slot.pixelPackBuffer);
slot.pixelPackBuffer = 0;
}
}
}
mAsyncReadbackWriteIndex = 0;
mAsyncReadbackReadIndex = 0;
mAsyncReadbackBytes = 0;
}
void OpenGLRenderPipeline::FlushAsyncReadbackPipeline()
{
for (AsyncReadbackSlot& slot : mAsyncReadbackSlots)
{
if (slot.fence != nullptr)
{
glDeleteSync(slot.fence);
slot.fence = nullptr;
}
slot.inFlight = false;
}
mAsyncReadbackWriteIndex = 0;
mAsyncReadbackReadIndex = 0;
}
void OpenGLRenderPipeline::QueueAsyncReadback(const VideoIOState& state)
{
const bool usePackedOutput = state.outputPixelFormat == VideoIOPixelFormat::V210 || state.outputPixelFormat == VideoIOPixelFormat::Yuva10;
const std::size_t requiredBytes = static_cast<std::size_t>(state.outputFrameRowBytes) * state.outputFrameSize.height;
const GLenum format = usePackedOutput ? GL_RGBA : GL_BGRA;
const GLenum type = usePackedOutput ? GL_UNSIGNED_BYTE : GL_UNSIGNED_INT_8_8_8_8_REV;
const GLuint framebuffer = usePackedOutput ? mRenderer.OutputPackFramebuffer() : mRenderer.OutputFramebuffer();
const GLsizei readWidth = static_cast<GLsizei>(usePackedOutput ? state.outputPackTextureWidth : state.outputFrameSize.width);
const GLsizei readHeight = static_cast<GLsizei>(state.outputFrameSize.height);
if (requiredBytes == 0)
return;
if (mAsyncReadbackBytes != requiredBytes
|| mAsyncReadbackFormat != format
|| mAsyncReadbackType != type
|| mAsyncReadbackFramebuffer != framebuffer)
{
mAsyncReadbackFormat = format;
mAsyncReadbackType = type;
mAsyncReadbackFramebuffer = framebuffer;
if (!EnsureAsyncReadbackBuffers(requiredBytes))
return;
}
AsyncReadbackSlot& slot = mAsyncReadbackSlots[mAsyncReadbackWriteIndex];
if (slot.fence != nullptr)
{
glDeleteSync(slot.fence);
slot.fence = nullptr;
}
glPixelStorei(GL_PACK_ALIGNMENT, 4);
glPixelStorei(GL_PACK_ROW_LENGTH, 0);
if (state.outputPixelFormat == VideoIOPixelFormat::V210 || state.outputPixelFormat == VideoIOPixelFormat::Yuva10)
glBindFramebuffer(GL_READ_FRAMEBUFFER, framebuffer);
glBindBuffer(GL_PIXEL_PACK_BUFFER, slot.pixelPackBuffer);
glBufferData(GL_PIXEL_PACK_BUFFER, static_cast<GLsizeiptr>(requiredBytes), nullptr, GL_STREAM_READ);
glReadPixels(0, 0, readWidth, readHeight, format, type, nullptr);
slot.fence = glFenceSync(GL_SYNC_GPU_COMMANDS_COMPLETE, 0);
slot.inFlight = slot.fence != nullptr;
glBindBuffer(GL_PIXEL_PACK_BUFFER, 0);
mAsyncReadbackWriteIndex = (mAsyncReadbackWriteIndex + 1) % mAsyncReadbackSlots.size();
}
bool OpenGLRenderPipeline::TryConsumeAsyncReadback(VideoIOOutputFrame& outputFrame, GLuint64 timeoutNanoseconds)
{
if (mAsyncReadbackBytes == 0 || outputFrame.bytes == nullptr)
return false;
AsyncReadbackSlot& slot = mAsyncReadbackSlots[mAsyncReadbackReadIndex];
if (!slot.inFlight || slot.fence == nullptr || slot.pixelPackBuffer == 0)
return false;
const GLenum waitFlags = timeoutNanoseconds > 0 ? GL_SYNC_FLUSH_COMMANDS_BIT : 0;
const GLenum waitResult = glClientWaitSync(slot.fence, waitFlags, timeoutNanoseconds);
if (waitResult != GL_ALREADY_SIGNALED && waitResult != GL_CONDITION_SATISFIED)
return false;
glDeleteSync(slot.fence);
slot.fence = nullptr;
glBindBuffer(GL_PIXEL_PACK_BUFFER, slot.pixelPackBuffer);
void* mappedBytes = glMapBuffer(GL_PIXEL_PACK_BUFFER, GL_READ_ONLY);
if (mappedBytes == nullptr)
{
glBindBuffer(GL_PIXEL_PACK_BUFFER, 0);
slot.inFlight = false;
mAsyncReadbackReadIndex = (mAsyncReadbackReadIndex + 1) % mAsyncReadbackSlots.size();
return false;
}
std::memcpy(outputFrame.bytes, mappedBytes, slot.sizeBytes);
glUnmapBuffer(GL_PIXEL_PACK_BUFFER);
glBindBuffer(GL_PIXEL_PACK_BUFFER, 0);
slot.inFlight = false;
mAsyncReadbackReadIndex = (mAsyncReadbackReadIndex + 1) % mAsyncReadbackSlots.size();
CacheOutputFrame(outputFrame);
return true;
}
void OpenGLRenderPipeline::CacheOutputFrame(const VideoIOOutputFrame& outputFrame)
{
if (outputFrame.bytes == nullptr || outputFrame.height == 0 || outputFrame.rowBytes <= 0)
return;
const std::size_t byteCount = static_cast<std::size_t>(outputFrame.rowBytes) * outputFrame.height;
mCachedOutputFrame.resize(byteCount);
std::memcpy(mCachedOutputFrame.data(), outputFrame.bytes, byteCount);
}
void OpenGLRenderPipeline::ReadOutputFrameSynchronously(const VideoIOState& state, void* destinationBytes)
{
const bool usePackedOutput = state.outputPixelFormat == VideoIOPixelFormat::V210 || state.outputPixelFormat == VideoIOPixelFormat::Yuva10;
glPixelStorei(GL_PACK_ALIGNMENT, 4);
glPixelStorei(GL_PACK_ROW_LENGTH, 0);
if (usePackedOutput)
{
glBindFramebuffer(GL_READ_FRAMEBUFFER, mRenderer.OutputPackFramebuffer());
glReadPixels(0, 0, state.outputPackTextureWidth, state.outputFrameSize.height, GL_RGBA, GL_UNSIGNED_BYTE, outputFrame.bytes);
glReadPixels(0, 0, state.outputPackTextureWidth, state.outputFrameSize.height, GL_RGBA, GL_UNSIGNED_BYTE, destinationBytes);
}
else
{
glBindFramebuffer(GL_READ_FRAMEBUFFER, mRenderer.OutputFramebuffer());
glReadPixels(0, 0, state.outputFrameSize.width, state.outputFrameSize.height, GL_BGRA, GL_UNSIGNED_INT_8_8_8_8_REV, outputFrame.bytes);
glReadPixels(0, 0, state.outputFrameSize.width, state.outputFrameSize.height, GL_BGRA, GL_UNSIGNED_INT_8_8_8_8_REV, destinationBytes);
}
}
void OpenGLRenderPipeline::ReadOutputFrame(const VideoIOState& state, VideoIOOutputFrame& outputFrame)
{
if (TryConsumeAsyncReadback(outputFrame, 500000))
{
QueueAsyncReadback(state);
return;
}
// If async readback misses the playout deadline, prefer a fresh synchronous
// frame over reusing stale cached output, then restart the async pipeline.
if (outputFrame.bytes != nullptr)
{
ReadOutputFrameSynchronously(state, outputFrame.bytes);
CacheOutputFrame(outputFrame);
}
FlushAsyncReadbackPipeline();
QueueAsyncReadback(state);
}

View File

@@ -1,8 +1,11 @@
#pragma once
#include "GLExtensions.h"
#include "VideoIOTypes.h"
#include <array>
#include <functional>
#include <vector>
class OpenGLRenderer;
class RuntimeHost;
@@ -26,10 +29,26 @@ public:
RenderEffectCallback renderEffect,
OutputReadyCallback outputReady,
PaintCallback paint);
~OpenGLRenderPipeline();
bool RenderFrame(const RenderPipelineFrameContext& context, VideoIOOutputFrame& outputFrame);
private:
struct AsyncReadbackSlot
{
GLuint pixelPackBuffer = 0;
GLsync fence = nullptr;
std::size_t sizeBytes = 0;
bool inFlight = false;
};
bool EnsureAsyncReadbackBuffers(std::size_t requiredBytes);
void ResetAsyncReadbackState();
void FlushAsyncReadbackPipeline();
void QueueAsyncReadback(const VideoIOState& state);
bool TryConsumeAsyncReadback(VideoIOOutputFrame& outputFrame, GLuint64 timeoutNanoseconds);
void CacheOutputFrame(const VideoIOOutputFrame& outputFrame);
void ReadOutputFrameSynchronously(const VideoIOState& state, void* destinationBytes);
void PackOutputFor10Bit(const VideoIOState& state);
void ReadOutputFrame(const VideoIOState& state, VideoIOOutputFrame& outputFrame);
@@ -38,4 +57,12 @@ private:
RenderEffectCallback mRenderEffect;
OutputReadyCallback mOutputReady;
PaintCallback mPaint;
std::array<AsyncReadbackSlot, 3> mAsyncReadbackSlots;
std::size_t mAsyncReadbackWriteIndex = 0;
std::size_t mAsyncReadbackReadIndex = 0;
std::size_t mAsyncReadbackBytes = 0;
GLenum mAsyncReadbackFormat = GL_BGRA;
GLenum mAsyncReadbackType = GL_UNSIGNED_INT_8_8_8_8_REV;
GLuint mAsyncReadbackFramebuffer = 0;
std::vector<unsigned char> mCachedOutputFrame;
};

View File

@@ -7,7 +7,7 @@
#include <gl/gl.h>
OpenGLVideoIOBridge::OpenGLVideoIOBridge(
VideoIODevice& videoIO,
VideoIODevice* videoIO,
OpenGLRenderer& renderer,
OpenGLRenderPipeline& renderPipeline,
RuntimeHost& runtimeHost,
@@ -24,6 +24,11 @@ OpenGLVideoIOBridge::OpenGLVideoIOBridge(
{
}
void OpenGLVideoIOBridge::SetVideoIODevice(VideoIODevice* videoIO)
{
mVideoIO = videoIO;
}
void OpenGLVideoIOBridge::RecordFramePacing(VideoIOCompletionResult completionResult)
{
const auto now = std::chrono::steady_clock::now();
@@ -57,7 +62,10 @@ void OpenGLVideoIOBridge::RecordFramePacing(VideoIOCompletionResult completionRe
void OpenGLVideoIOBridge::VideoFrameArrived(const VideoIOFrame& inputFrame)
{
const VideoIOState& state = mVideoIO.State();
if (mVideoIO == nullptr)
return;
const VideoIOState& state = mVideoIO->State();
mRuntimeHost.TrySetSignalStatus(!inputFrame.hasNoInputSource, state.inputFrameSize.width, state.inputFrameSize.height, state.inputDisplayModeName);
if (inputFrame.hasNoInputSource || inputFrame.bytes == nullptr)
@@ -91,17 +99,20 @@ void OpenGLVideoIOBridge::VideoFrameArrived(const VideoIOFrame& inputFrame)
void OpenGLVideoIOBridge::PlayoutFrameCompleted(const VideoIOCompletion& completion)
{
if (mVideoIO == nullptr)
return;
RecordFramePacing(completion.result);
EnterCriticalSection(&mMutex);
VideoIOOutputFrame outputFrame;
if (!mVideoIO.BeginOutputFrame(outputFrame))
if (!mVideoIO->BeginOutputFrame(outputFrame))
{
LeaveCriticalSection(&mMutex);
return;
}
const VideoIOState& state = mVideoIO.State();
const VideoIOState& state = mVideoIO->State();
RenderPipelineFrameContext frameContext;
frameContext.videoState = state;
frameContext.completion = completion;
@@ -111,12 +122,12 @@ void OpenGLVideoIOBridge::PlayoutFrameCompleted(const VideoIOCompletion& complet
mRenderPipeline.RenderFrame(frameContext, outputFrame);
mVideoIO.EndOutputFrame(outputFrame);
mVideoIO->EndOutputFrame(outputFrame);
mVideoIO.AccountForCompletionResult(completion.result);
mVideoIO->AccountForCompletionResult(completion.result);
// Schedule the next frame for playout
mVideoIO.ScheduleOutputFrame(outputFrame);
mVideoIO->ScheduleOutputFrame(outputFrame);
wglMakeCurrent(NULL, NULL);

View File

@@ -13,7 +13,7 @@ class OpenGLVideoIOBridge
{
public:
OpenGLVideoIOBridge(
VideoIODevice& videoIO,
VideoIODevice* videoIO,
OpenGLRenderer& renderer,
OpenGLRenderPipeline& renderPipeline,
RuntimeHost& runtimeHost,
@@ -21,13 +21,15 @@ public:
HDC hdc,
HGLRC hglrc);
void SetVideoIODevice(VideoIODevice* videoIO);
void VideoFrameArrived(const VideoIOFrame& inputFrame);
void PlayoutFrameCompleted(const VideoIOCompletion& completion);
private:
void RecordFramePacing(VideoIOCompletionResult completionResult);
VideoIODevice& mVideoIO;
VideoIODevice* mVideoIO;
OpenGLRenderer& mRenderer;
OpenGLRenderPipeline& mRenderPipeline;
RuntimeHost& mRuntimeHost;

View File

@@ -62,6 +62,8 @@ PFNGLGENBUFFERSPROC glGenBuffers;
PFNGLDELETEBUFFERSPROC glDeleteBuffers;
PFNGLBINDBUFFERPROC glBindBuffer;
PFNGLBUFFERDATAPROC glBufferData;
PFNGLMAPBUFFERPROC glMapBuffer;
PFNGLUNMAPBUFFERPROC glUnmapBuffer;
PFNGLBUFFERSUBDATAPROC glBufferSubData;
PFNGLBINDBUFFERBASEPROC glBindBufferBase;
PFNGLACTIVETEXTUREPROC glActiveTexture;
@@ -131,6 +133,8 @@ bool ResolveGLExtensions()
glDeleteBuffers = (PFNGLDELETEBUFFERSPROC) wglGetProcAddress("glDeleteBuffers");
glBindBuffer = (PFNGLBINDBUFFERPROC) wglGetProcAddress("glBindBuffer");
glBufferData = (PFNGLBUFFERDATAPROC) wglGetProcAddress("glBufferData");
glMapBuffer = (PFNGLMAPBUFFERPROC) wglGetProcAddress("glMapBuffer");
glUnmapBuffer = (PFNGLUNMAPBUFFERPROC) wglGetProcAddress("glUnmapBuffer");
glBufferSubData = (PFNGLBUFFERSUBDATAPROC) wglGetProcAddress("glBufferSubData");
glBindBufferBase = (PFNGLBINDBUFFERBASEPROC) wglGetProcAddress("glBindBufferBase");
glActiveTexture = (PFNGLACTIVETEXTUREPROC) wglGetProcAddress("glActiveTexture");
@@ -176,6 +180,8 @@ bool ResolveGLExtensions()
&& glDeleteBuffers
&& glBindBuffer
&& glBufferData
&& glMapBuffer
&& glUnmapBuffer
&& glBufferSubData
&& glBindBufferBase
&& glActiveTexture

View File

@@ -89,6 +89,11 @@
#define GL_EXTERNAL_VIRTUAL_MEMORY_BUFFER_AMD 0x9160
#define GL_SYNC_GPU_COMMANDS_COMPLETE 0x9117
#define GL_SYNC_FLUSH_COMMANDS_BIT 0x00000001
#define GL_ALREADY_SIGNALED 0x911A
#define GL_TIMEOUT_EXPIRED 0x911B
#define GL_CONDITION_SATISFIED 0x911C
#define GL_WAIT_FAILED 0x911D
#define GL_READ_ONLY 0x88B8
typedef struct __GLsync *GLsync;
typedef unsigned __int64 GLuint64;
@@ -100,6 +105,8 @@ typedef void (APIENTRYP PFNGLBINDBUFFERPROC) (GLenum target, GLuint buffer);
typedef void (APIENTRYP PFNGLDELETEBUFFERSPROC) (GLsizei n, const GLuint *buffers);
typedef void (APIENTRYP PFNGLGENBUFFERSPROC) (GLsizei n, GLuint *buffers);
typedef void (APIENTRYP PFNGLBUFFERDATAPROC) (GLenum target, GLsizeiptr size, const GLvoid *data, GLenum usage);
typedef GLvoid* (APIENTRYP PFNGLMAPBUFFERPROC) (GLenum target, GLenum access);
typedef GLboolean (APIENTRYP PFNGLUNMAPBUFFERPROC) (GLenum target);
typedef void (APIENTRYP PFNGLATTACHSHADERPROC) (GLuint program, GLuint shader);
typedef void (APIENTRYP PFNGLCOMPILESHADERPROC) (GLuint shader);
typedef GLuint (APIENTRYP PFNGLCREATEPROGRAMPROC) (void);
@@ -159,6 +166,8 @@ extern PFNGLGENBUFFERSPROC glGenBuffers;
extern PFNGLDELETEBUFFERSPROC glDeleteBuffers;
extern PFNGLBINDBUFFERPROC glBindBuffer;
extern PFNGLBUFFERDATAPROC glBufferData;
extern PFNGLMAPBUFFERPROC glMapBuffer;
extern PFNGLUNMAPBUFFERPROC glUnmapBuffer;
extern PFNGLBUFFERSUBDATAPROC glBufferSubData;
extern PFNGLBINDBUFFERBASEPROC glBindBufferBase;
extern PFNGLACTIVETEXTUREPROC glActiveTexture;

View File

@@ -71,6 +71,9 @@ void OpenGLRenderer::SetDecodeShaderProgram(GLuint program, GLuint vertexShader,
mDecodeProgram = program;
mDecodeVertexShader = vertexShader;
mDecodeFragmentShader = fragmentShader;
mDecodePackedResolutionLocation = program != 0 ? glGetUniformLocation(program, "uPackedVideoResolution") : -1;
mDecodeDecodedResolutionLocation = program != 0 ? glGetUniformLocation(program, "uDecodedVideoResolution") : -1;
mDecodeInputPixelFormatLocation = program != 0 ? glGetUniformLocation(program, "uInputPixelFormat") : -1;
}
void OpenGLRenderer::SetOutputPackShaderProgram(GLuint program, GLuint vertexShader, GLuint fragmentShader)
@@ -78,6 +81,9 @@ void OpenGLRenderer::SetOutputPackShaderProgram(GLuint program, GLuint vertexSha
mOutputPackProgram = program;
mOutputPackVertexShader = vertexShader;
mOutputPackFragmentShader = fragmentShader;
mOutputPackResolutionLocation = program != 0 ? glGetUniformLocation(program, "uOutputVideoResolution") : -1;
mOutputPackActiveWordsLocation = program != 0 ? glGetUniformLocation(program, "uActiveV210Words") : -1;
mOutputPackFormatLocation = program != 0 ? glGetUniformLocation(program, "uOutputPackFormat") : -1;
}
bool OpenGLRenderer::ReserveTemporaryRenderTargets(std::size_t count, unsigned width, unsigned height, std::string& error)
@@ -217,6 +223,9 @@ void OpenGLRenderer::DestroyDecodeShaderProgram()
glDeleteProgram(mDecodeProgram);
mDecodeProgram = 0;
}
mDecodePackedResolutionLocation = -1;
mDecodeDecodedResolutionLocation = -1;
mDecodeInputPixelFormatLocation = -1;
if (mDecodeFragmentShader != 0)
{
@@ -238,6 +247,9 @@ void OpenGLRenderer::DestroyOutputPackShaderProgram()
glDeleteProgram(mOutputPackProgram);
mOutputPackProgram = 0;
}
mOutputPackResolutionLocation = -1;
mOutputPackActiveWordsLocation = -1;
mOutputPackFormatLocation = -1;
if (mOutputPackFragmentShader != 0)
{

View File

@@ -70,6 +70,12 @@ public:
GLuint GlobalParamsUBO() const { return mGlobalParamsUBO; }
GLuint DecodeProgram() const { return mDecodeProgram; }
GLuint OutputPackProgram() const { return mOutputPackProgram; }
GLint DecodePackedResolutionLocation() const { return mDecodePackedResolutionLocation; }
GLint DecodeDecodedResolutionLocation() const { return mDecodeDecodedResolutionLocation; }
GLint DecodeInputPixelFormatLocation() const { return mDecodeInputPixelFormatLocation; }
GLint OutputPackResolutionLocation() const { return mOutputPackResolutionLocation; }
GLint OutputPackActiveWordsLocation() const { return mOutputPackActiveWordsLocation; }
GLint OutputPackFormatLocation() const { return mOutputPackFormatLocation; }
GLsizeiptr GlobalParamsUBOSize() const { return mGlobalParamsUBOSize; }
void SetGlobalParamsUBOSize(GLsizeiptr size) { mGlobalParamsUBOSize = size; }
void ReplaceLayerPrograms(std::vector<LayerProgram>& newPrograms) { mLayerPrograms.swap(newPrograms); }
@@ -101,9 +107,15 @@ private:
GLuint mDecodeProgram = 0;
GLuint mDecodeVertexShader = 0;
GLuint mDecodeFragmentShader = 0;
GLint mDecodePackedResolutionLocation = -1;
GLint mDecodeDecodedResolutionLocation = -1;
GLint mDecodeInputPixelFormatLocation = -1;
GLuint mOutputPackProgram = 0;
GLuint mOutputPackVertexShader = 0;
GLuint mOutputPackFragmentShader = 0;
GLint mOutputPackResolutionLocation = -1;
GLint mOutputPackActiveWordsLocation = -1;
GLint mOutputPackFormatLocation = -1;
GLsizeiptr mGlobalParamsUBOSize = 0;
int mViewWidth = 0;
int mViewHeight = 0;

View File

@@ -12,7 +12,8 @@ GlobalParamsBuffer::GlobalParamsBuffer(OpenGLRenderer& renderer) :
bool GlobalParamsBuffer::Update(const RuntimeRenderState& state, unsigned availableSourceHistoryLength, unsigned availableTemporalHistoryLength)
{
std::vector<unsigned char> buffer;
std::vector<unsigned char>& buffer = mScratchBuffer;
buffer.clear();
buffer.reserve(512);
AppendStd140Float(buffer, static_cast<float>(state.timeSeconds));

View File

@@ -3,6 +3,8 @@
#include "OpenGLRenderer.h"
#include "ShaderTypes.h"
#include <vector>
class GlobalParamsBuffer
{
public:
@@ -12,4 +14,5 @@ public:
private:
OpenGLRenderer& mRenderer;
std::vector<unsigned char> mScratchBuffer;
};

View File

@@ -841,6 +841,8 @@ bool RuntimeHost::PollFileChanges(bool& registryChanged, bool& reloadRequested,
}
reloadRequested = mReloadRequested;
if (registryChanged || reloadRequested)
MarkRenderStateDirtyLocked();
return true;
}
catch (const std::exception& exception)
@@ -884,6 +886,7 @@ bool RuntimeHost::AddLayer(const std::string& shaderId, std::string& error)
EnsureLayerDefaultsLocked(layer, shaderIt->second);
mPersistentState.layers.push_back(layer);
mReloadRequested = true;
MarkRenderStateDirtyLocked();
return SavePersistentState(error);
}
@@ -900,6 +903,7 @@ bool RuntimeHost::RemoveLayer(const std::string& layerId, std::string& error)
mPersistentState.layers.erase(it);
mReloadRequested = true;
MarkRenderStateDirtyLocked();
return SavePersistentState(error);
}
@@ -921,6 +925,7 @@ bool RuntimeHost::MoveLayer(const std::string& layerId, int direction, std::stri
std::swap(mPersistentState.layers[index], mPersistentState.layers[newIndex]);
mReloadRequested = true;
MarkRenderStateDirtyLocked();
return SavePersistentState(error);
}
@@ -949,6 +954,7 @@ bool RuntimeHost::MoveLayerToIndex(const std::string& layerId, std::size_t targe
mPersistentState.layers.erase(mPersistentState.layers.begin() + static_cast<std::ptrdiff_t>(sourceIndex));
mPersistentState.layers.insert(mPersistentState.layers.begin() + static_cast<std::ptrdiff_t>(targetIndex), movedLayer);
mReloadRequested = true;
MarkRenderStateDirtyLocked();
return SavePersistentState(error);
}
@@ -964,6 +970,7 @@ bool RuntimeHost::SetLayerBypass(const std::string& layerId, bool bypassed, std:
layer->bypass = bypassed;
mReloadRequested = true;
MarkRenderStateDirtyLocked();
return SavePersistentState(error);
}
@@ -988,6 +995,7 @@ bool RuntimeHost::SetLayerShader(const std::string& layerId, const std::string&
layer->parameterValues.clear();
EnsureLayerDefaultsLocked(*layer, shaderIt->second);
mReloadRequested = true;
MarkRenderStateDirtyLocked();
return SavePersistentState(error);
}
@@ -1024,6 +1032,7 @@ bool RuntimeHost::UpdateLayerParameter(const std::string& layerId, const std::st
const double previousCount = value.numberValues.empty() ? 0.0 : value.numberValues[0];
const double triggerTime = std::chrono::duration_cast<std::chrono::duration<double>>(std::chrono::steady_clock::now() - mStartTime).count();
value.numberValues = { previousCount + 1.0, triggerTime };
MarkRenderStateDirtyLocked();
return true;
}
@@ -1032,6 +1041,7 @@ bool RuntimeHost::UpdateLayerParameter(const std::string& layerId, const std::st
return false;
layer->parameterValues[parameterId] = normalized;
MarkRenderStateDirtyLocked();
return SavePersistentState(error);
}
@@ -1079,6 +1089,7 @@ bool RuntimeHost::UpdateLayerParameterByControlKey(const std::string& layerKey,
const double previousCount = value.numberValues.empty() ? 0.0 : value.numberValues[0];
const double triggerTime = std::chrono::duration_cast<std::chrono::duration<double>>(std::chrono::steady_clock::now() - mStartTime).count();
value.numberValues = { previousCount + 1.0, triggerTime };
MarkRenderStateDirtyLocked();
return true;
}
@@ -1087,6 +1098,7 @@ bool RuntimeHost::UpdateLayerParameterByControlKey(const std::string& layerKey,
return false;
matchedLayer->parameterValues[parameterIt->id] = normalized;
MarkRenderStateDirtyLocked();
return SavePersistentState(error);
}
@@ -1110,6 +1122,7 @@ bool RuntimeHost::ResetLayerParameters(const std::string& layerId, std::string&
layer->parameterValues.clear();
EnsureLayerDefaultsLocked(*layer, shaderIt->second);
MarkRenderStateDirtyLocked();
return SavePersistentState(error);
}
@@ -1169,6 +1182,7 @@ bool RuntimeHost::LoadStackPreset(const std::string& presetName, std::string& er
mPersistentState.layers = nextLayers;
mReloadRequested = true;
MarkRenderStateDirtyLocked();
return SavePersistentState(error);
}
@@ -1197,31 +1211,37 @@ bool RuntimeHost::TrySetSignalStatus(bool hasSignal, unsigned width, unsigned he
void RuntimeHost::SetSignalStatusLocked(bool hasSignal, unsigned width, unsigned height, const std::string& modeName)
{
const bool changed = mHasSignal != hasSignal ||
mSignalWidth != width ||
mSignalHeight != height ||
mSignalModeName != modeName;
mHasSignal = hasSignal;
mSignalWidth = width;
mSignalHeight = height;
mSignalModeName = modeName;
if (changed)
MarkRenderStateDirtyLocked();
}
void RuntimeHost::SetDeckLinkOutputStatus(const std::string& modelName, bool supportsInternalKeying, bool supportsExternalKeying,
bool keyerInterfaceAvailable, bool externalKeyingRequested, bool externalKeyingActive, const std::string& statusMessage)
void RuntimeHost::MarkRenderStateDirtyLocked()
{
SetVideoIOStatus("decklink", modelName, supportsInternalKeying, supportsExternalKeying, keyerInterfaceAvailable,
externalKeyingRequested, externalKeyingActive, statusMessage);
mRenderStateVersion.fetch_add(1, std::memory_order_relaxed);
}
void RuntimeHost::SetVideoIOStatus(const std::string& backendName, const std::string& modelName, bool supportsInternalKeying, bool supportsExternalKeying,
bool keyerInterfaceAvailable, bool externalKeyingRequested, bool externalKeyingActive, const std::string& statusMessage)
void RuntimeHost::SetVideoIOStatus(const VideoIOState& state)
{
std::lock_guard<std::mutex> lock(mMutex);
mDeckLinkOutputStatus.backendName = backendName;
mDeckLinkOutputStatus.modelName = modelName;
mDeckLinkOutputStatus.supportsInternalKeying = supportsInternalKeying;
mDeckLinkOutputStatus.supportsExternalKeying = supportsExternalKeying;
mDeckLinkOutputStatus.keyerInterfaceAvailable = keyerInterfaceAvailable;
mDeckLinkOutputStatus.externalKeyingRequested = externalKeyingRequested;
mDeckLinkOutputStatus.externalKeyingActive = externalKeyingActive;
mDeckLinkOutputStatus.statusMessage = statusMessage;
mVideoIOStatus.backendId = state.backendId;
mVideoIOStatus.deviceName = state.deviceName;
mVideoIOStatus.capabilities = state.capabilities;
mVideoIOStatus.hasInputDevice = state.hasInputDevice;
mVideoIOStatus.hasInputSource = state.hasInputSource;
mVideoIOStatus.inputDisplayModeName = state.inputDisplayModeName;
mVideoIOStatus.outputDisplayModeName = state.outputDisplayModeName;
mVideoIOStatus.externalKeyingRequested = state.externalKeyingRequested;
mVideoIOStatus.externalKeyingActive = state.externalKeyingActive;
mVideoIOStatus.statusMessage = state.statusMessage;
mVideoIOStatus.formatStatusMessage = state.formatStatusMessage;
}
void RuntimeHost::SetPerformanceStats(double frameBudgetMilliseconds, double renderMilliseconds)
@@ -1456,61 +1476,67 @@ bool RuntimeHost::LoadConfig(std::string& error)
const double configuredValue = maxTemporalHistoryFramesValue->asNumber(static_cast<double>(mConfig.maxTemporalHistoryFrames));
mConfig.maxTemporalHistoryFrames = configuredValue <= 0.0 ? 0u : static_cast<unsigned>(configuredValue);
}
if (const JsonValue* videoBackendValue = configJson.find("videoBackend"))
{
VideoIOBackendId backendId = mConfig.videoIO.backendId;
if (videoBackendValue->isString() && ParseVideoIOBackendId(videoBackendValue->asString(), backendId))
mConfig.videoIO.backendId = backendId;
}
if (const JsonValue* enableExternalKeyingValue = configJson.find("enableExternalKeying"))
mConfig.enableExternalKeying = enableExternalKeyingValue->asBoolean(mConfig.enableExternalKeying);
mConfig.videoIO.externalKeyingEnabled = enableExternalKeyingValue->asBoolean(mConfig.videoIO.externalKeyingEnabled);
if (const JsonValue* videoFormatValue = configJson.find("videoFormat"))
{
if (videoFormatValue->isString() && !videoFormatValue->asString().empty())
{
mConfig.inputVideoFormat = videoFormatValue->asString();
mConfig.outputVideoFormat = videoFormatValue->asString();
mConfig.videoIO.inputMode.videoFormat = videoFormatValue->asString();
mConfig.videoIO.outputMode.videoFormat = videoFormatValue->asString();
}
}
if (const JsonValue* frameRateValue = configJson.find("frameRate"))
{
if (frameRateValue->isString() && !frameRateValue->asString().empty())
{
mConfig.inputFrameRate = frameRateValue->asString();
mConfig.outputFrameRate = frameRateValue->asString();
mConfig.videoIO.inputMode.frameRate = frameRateValue->asString();
mConfig.videoIO.outputMode.frameRate = frameRateValue->asString();
}
else if (frameRateValue->isNumber())
{
std::ostringstream stream;
stream << frameRateValue->asNumber();
mConfig.inputFrameRate = stream.str();
mConfig.outputFrameRate = stream.str();
mConfig.videoIO.inputMode.frameRate = stream.str();
mConfig.videoIO.outputMode.frameRate = stream.str();
}
}
if (const JsonValue* inputVideoFormatValue = configJson.find("inputVideoFormat"))
{
if (inputVideoFormatValue->isString() && !inputVideoFormatValue->asString().empty())
mConfig.inputVideoFormat = inputVideoFormatValue->asString();
mConfig.videoIO.inputMode.videoFormat = inputVideoFormatValue->asString();
}
if (const JsonValue* inputFrameRateValue = configJson.find("inputFrameRate"))
{
if (inputFrameRateValue->isString() && !inputFrameRateValue->asString().empty())
mConfig.inputFrameRate = inputFrameRateValue->asString();
mConfig.videoIO.inputMode.frameRate = inputFrameRateValue->asString();
else if (inputFrameRateValue->isNumber())
{
std::ostringstream stream;
stream << inputFrameRateValue->asNumber();
mConfig.inputFrameRate = stream.str();
mConfig.videoIO.inputMode.frameRate = stream.str();
}
}
if (const JsonValue* outputVideoFormatValue = configJson.find("outputVideoFormat"))
{
if (outputVideoFormatValue->isString() && !outputVideoFormatValue->asString().empty())
mConfig.outputVideoFormat = outputVideoFormatValue->asString();
mConfig.videoIO.outputMode.videoFormat = outputVideoFormatValue->asString();
}
if (const JsonValue* outputFrameRateValue = configJson.find("outputFrameRate"))
{
if (outputFrameRateValue->isString() && !outputFrameRateValue->asString().empty())
mConfig.outputFrameRate = outputFrameRateValue->asString();
mConfig.videoIO.outputMode.frameRate = outputFrameRateValue->asString();
else if (outputFrameRateValue->isNumber())
{
std::ostringstream stream;
stream << outputFrameRateValue->asNumber();
mConfig.outputFrameRate = stream.str();
mConfig.videoIO.outputMode.frameRate = stream.str();
}
}
@@ -1674,6 +1700,8 @@ bool RuntimeHost::ScanShaderPackages(std::string& error)
++it;
}
MarkRenderStateDirtyLocked();
return true;
}
@@ -1840,11 +1868,12 @@ JsonValue RuntimeHost::BuildStateValue() const
app.set("oscPort", JsonValue(static_cast<double>(mConfig.oscPort)));
app.set("autoReload", JsonValue(mAutoReloadEnabled));
app.set("maxTemporalHistoryFrames", JsonValue(static_cast<double>(mConfig.maxTemporalHistoryFrames)));
app.set("enableExternalKeying", JsonValue(mConfig.enableExternalKeying));
app.set("inputVideoFormat", JsonValue(mConfig.inputVideoFormat));
app.set("inputFrameRate", JsonValue(mConfig.inputFrameRate));
app.set("outputVideoFormat", JsonValue(mConfig.outputVideoFormat));
app.set("outputFrameRate", JsonValue(mConfig.outputFrameRate));
app.set("videoBackend", JsonValue(VideoIOBackendName(mConfig.videoIO.backendId)));
app.set("enableExternalKeying", JsonValue(mConfig.videoIO.externalKeyingEnabled));
app.set("inputVideoFormat", JsonValue(mConfig.videoIO.inputMode.videoFormat));
app.set("inputFrameRate", JsonValue(mConfig.videoIO.inputMode.frameRate));
app.set("outputVideoFormat", JsonValue(mConfig.videoIO.outputMode.videoFormat));
app.set("outputFrameRate", JsonValue(mConfig.videoIO.outputMode.frameRate));
root.set("app", app);
JsonValue runtime = JsonValue::MakeObject();
@@ -1860,25 +1889,22 @@ JsonValue RuntimeHost::BuildStateValue() const
video.set("modeName", JsonValue(mSignalModeName));
root.set("video", video);
JsonValue deckLink = JsonValue::MakeObject();
deckLink.set("modelName", JsonValue(mDeckLinkOutputStatus.modelName));
deckLink.set("supportsInternalKeying", JsonValue(mDeckLinkOutputStatus.supportsInternalKeying));
deckLink.set("supportsExternalKeying", JsonValue(mDeckLinkOutputStatus.supportsExternalKeying));
deckLink.set("keyerInterfaceAvailable", JsonValue(mDeckLinkOutputStatus.keyerInterfaceAvailable));
deckLink.set("externalKeyingRequested", JsonValue(mDeckLinkOutputStatus.externalKeyingRequested));
deckLink.set("externalKeyingActive", JsonValue(mDeckLinkOutputStatus.externalKeyingActive));
deckLink.set("statusMessage", JsonValue(mDeckLinkOutputStatus.statusMessage));
root.set("decklink", deckLink);
JsonValue videoIO = JsonValue::MakeObject();
videoIO.set("backend", JsonValue(mDeckLinkOutputStatus.backendName));
videoIO.set("modelName", JsonValue(mDeckLinkOutputStatus.modelName));
videoIO.set("supportsInternalKeying", JsonValue(mDeckLinkOutputStatus.supportsInternalKeying));
videoIO.set("supportsExternalKeying", JsonValue(mDeckLinkOutputStatus.supportsExternalKeying));
videoIO.set("keyerInterfaceAvailable", JsonValue(mDeckLinkOutputStatus.keyerInterfaceAvailable));
videoIO.set("externalKeyingRequested", JsonValue(mDeckLinkOutputStatus.externalKeyingRequested));
videoIO.set("externalKeyingActive", JsonValue(mDeckLinkOutputStatus.externalKeyingActive));
videoIO.set("statusMessage", JsonValue(mDeckLinkOutputStatus.statusMessage));
videoIO.set("backend", JsonValue(VideoIOBackendName(mVideoIOStatus.backendId)));
videoIO.set("deviceName", JsonValue(mVideoIOStatus.deviceName));
videoIO.set("hasInputDevice", JsonValue(mVideoIOStatus.hasInputDevice));
videoIO.set("hasInputSource", JsonValue(mVideoIOStatus.hasInputSource));
videoIO.set("inputModeName", JsonValue(mVideoIOStatus.inputDisplayModeName));
videoIO.set("outputModeName", JsonValue(mVideoIOStatus.outputDisplayModeName));
JsonValue capabilities = JsonValue::MakeObject();
capabilities.set("supportsInternalKeying", JsonValue(mVideoIOStatus.capabilities.supportsInternalKeying));
capabilities.set("supportsExternalKeying", JsonValue(mVideoIOStatus.capabilities.supportsExternalKeying));
capabilities.set("keyerInterfaceAvailable", JsonValue(mVideoIOStatus.capabilities.keyerInterfaceAvailable));
videoIO.set("capabilities", capabilities);
videoIO.set("externalKeyingRequested", JsonValue(mVideoIOStatus.externalKeyingRequested));
videoIO.set("externalKeyingActive", JsonValue(mVideoIOStatus.externalKeyingActive));
videoIO.set("statusMessage", JsonValue(mVideoIOStatus.statusMessage));
videoIO.set("formatStatusMessage", JsonValue(mVideoIOStatus.formatStatusMessage));
root.set("videoIO", videoIO);
JsonValue performance = JsonValue::MakeObject();

View File

@@ -2,6 +2,7 @@
#include "RuntimeJson.h"
#include "ShaderTypes.h"
#include "VideoIOTypes.h"
#include <atomic>
#include <chrono>
@@ -38,10 +39,7 @@ public:
void SetCompileStatus(bool succeeded, const std::string& message);
void SetSignalStatus(bool hasSignal, unsigned width, unsigned height, const std::string& modeName);
bool TrySetSignalStatus(bool hasSignal, unsigned width, unsigned height, const std::string& modeName);
void SetDeckLinkOutputStatus(const std::string& modelName, bool supportsInternalKeying, bool supportsExternalKeying,
bool keyerInterfaceAvailable, bool externalKeyingRequested, bool externalKeyingActive, const std::string& statusMessage);
void SetVideoIOStatus(const std::string& backendName, const std::string& modelName, bool supportsInternalKeying, bool supportsExternalKeying,
bool keyerInterfaceAvailable, bool externalKeyingRequested, bool externalKeyingActive, const std::string& statusMessage);
void SetVideoIOStatus(const VideoIOState& state);
void SetPerformanceStats(double frameBudgetMilliseconds, double renderMilliseconds);
bool TrySetPerformanceStats(double frameBudgetMilliseconds, double renderMilliseconds);
void SetFramePacingStats(double completionIntervalMilliseconds, double smoothedCompletionIntervalMilliseconds,
@@ -56,6 +54,7 @@ public:
bool TryGetLayerRenderStates(unsigned outputWidth, unsigned outputHeight, std::vector<RuntimeRenderState>& states) const;
void RefreshDynamicRenderStateFields(std::vector<RuntimeRenderState>& states) const;
std::string BuildStateJson() const;
uint64_t GetRenderStateVersion() const { return mRenderStateVersion.load(std::memory_order_relaxed); }
const std::filesystem::path& GetRepoRoot() const { return mRepoRoot; }
const std::filesystem::path& GetUiRoot() const { return mUiRoot; }
@@ -64,11 +63,8 @@ public:
unsigned short GetServerPort() const { return mServerPort; }
unsigned short GetOscPort() const { return mConfig.oscPort; }
unsigned GetMaxTemporalHistoryFrames() const { return mConfig.maxTemporalHistoryFrames; }
bool ExternalKeyingEnabled() const { return mConfig.enableExternalKeying; }
const std::string& GetInputVideoFormat() const { return mConfig.inputVideoFormat; }
const std::string& GetInputFrameRate() const { return mConfig.inputFrameRate; }
const std::string& GetOutputVideoFormat() const { return mConfig.outputVideoFormat; }
const std::string& GetOutputFrameRate() const { return mConfig.outputFrameRate; }
bool ExternalKeyingEnabled() const { return mConfig.videoIO.externalKeyingEnabled; }
VideoIOConfiguration GetVideoIOConfiguration() const { return mConfig.videoIO; }
void SetServerPort(unsigned short port);
bool AutoReloadEnabled() const { return mAutoReloadEnabled; }
@@ -80,23 +76,22 @@ private:
unsigned short oscPort = 9000;
bool autoReload = true;
unsigned maxTemporalHistoryFrames = 4;
bool enableExternalKeying = false;
std::string inputVideoFormat = "1080p";
std::string inputFrameRate = "59.94";
std::string outputVideoFormat = "1080p";
std::string outputFrameRate = "59.94";
VideoIOConfiguration videoIO;
};
struct DeckLinkOutputStatus
struct VideoIOStatusSnapshot
{
std::string backendName = "decklink";
std::string modelName;
bool supportsInternalKeying = false;
bool supportsExternalKeying = false;
bool keyerInterfaceAvailable = false;
VideoIOBackendId backendId = VideoIOBackendId::DeckLink;
std::string deviceName;
VideoIOCapabilities capabilities;
bool hasInputDevice = false;
bool hasInputSource = false;
std::string inputDisplayModeName = "1080p59.94";
std::string outputDisplayModeName = "1080p59.94";
bool externalKeyingRequested = false;
bool externalKeyingActive = false;
std::string statusMessage;
std::string formatStatusMessage;
};
struct LayerPersistentState
@@ -135,6 +130,7 @@ private:
const LayerPersistentState* FindLayerById(const std::string& layerId) const;
std::string GenerateLayerId();
void SetSignalStatusLocked(bool hasSignal, unsigned width, unsigned height, const std::string& modeName);
void MarkRenderStateDirtyLocked();
void SetPerformanceStatsLocked(double frameBudgetMilliseconds, double renderMilliseconds);
void SetFramePacingStatsLocked(double completionIntervalMilliseconds, double smoothedCompletionIntervalMilliseconds,
double maxCompletionIntervalMilliseconds, uint64_t lateFrameCount, uint64_t droppedFrameCount, uint64_t flushedFrameCount);
@@ -174,11 +170,12 @@ private:
uint64_t mLateFrameCount;
uint64_t mDroppedFrameCount;
uint64_t mFlushedFrameCount;
DeckLinkOutputStatus mDeckLinkOutputStatus;
VideoIOStatusSnapshot mVideoIOStatus;
unsigned short mServerPort;
bool mAutoReloadEnabled;
std::chrono::steady_clock::time_point mStartTime;
std::chrono::steady_clock::time_point mLastScanTime;
std::atomic<uint64_t> mFrameCounter;
std::atomic<uint64_t> mFrameCounter{ 0 };
std::atomic<uint64_t> mRenderStateVersion{ 0 };
uint64_t mNextLayerId;
};

View File

@@ -0,0 +1,16 @@
#include "VideoIOBackendFactory.h"
#include "DeckLinkSession.h"
#include "VideoIOTypes.h"
std::unique_ptr<VideoIODevice> CreateVideoIODevice(VideoIOBackendId backendId, std::string& error)
{
switch (backendId)
{
case VideoIOBackendId::DeckLink:
return std::make_unique<DeckLinkSession>();
}
error = "Unsupported video I/O backend.";
return nullptr;
}

View File

@@ -0,0 +1,10 @@
#pragma once
#include "VideoIOConfig.h"
#include <memory>
#include <string>
class VideoIODevice;
std::unique_ptr<VideoIODevice> CreateVideoIODevice(VideoIOBackendId backendId, std::string& error);

View File

@@ -0,0 +1,35 @@
#include "VideoIOConfig.h"
#include <algorithm>
#include <cctype>
namespace
{
std::string NormalizeToken(std::string value)
{
std::transform(value.begin(), value.end(), value.begin(),
[](unsigned char ch) { return static_cast<char>(std::tolower(ch)); });
return value;
}
}
const char* VideoIOBackendName(VideoIOBackendId backendId)
{
switch (backendId)
{
case VideoIOBackendId::DeckLink:
return "decklink";
}
return "unknown";
}
bool ParseVideoIOBackendId(const std::string& value, VideoIOBackendId& backendId)
{
const std::string normalized = NormalizeToken(value);
if (normalized.empty() || normalized == "decklink")
{
backendId = VideoIOBackendId::DeckLink;
return true;
}
return false;
}

View File

@@ -0,0 +1,44 @@
#pragma once
#include <string>
enum class VideoIOBackendId
{
DeckLink
};
const char* VideoIOBackendName(VideoIOBackendId backendId);
bool ParseVideoIOBackendId(const std::string& value, VideoIOBackendId& backendId);
struct FrameSize
{
unsigned width = 0;
unsigned height = 0;
bool IsEmpty() const { return width == 0 || height == 0; }
};
inline bool operator==(const FrameSize& left, const FrameSize& right)
{
return left.width == right.width && left.height == right.height;
}
inline bool operator!=(const FrameSize& left, const FrameSize& right)
{
return !(left == right);
}
struct VideoIOModeConfiguration
{
std::string videoFormat = "1080p";
std::string frameRate = "59.94";
};
struct VideoIOConfiguration
{
VideoIOBackendId backendId = VideoIOBackendId::DeckLink;
VideoIOModeConfiguration inputMode;
VideoIOModeConfiguration outputMode;
bool externalKeyingEnabled = false;
bool preferTenBit = true;
};

View File

@@ -1,15 +1,17 @@
#pragma once
#include "DeckLinkDisplayMode.h"
#include "VideoIOConfig.h"
#include "VideoIOFormat.h"
#include <cstdint>
#include <functional>
#include <string>
enum class VideoIOBackend
struct VideoIOCapabilities
{
DeckLink
bool supportsInternalKeying = false;
bool supportsExternalKeying = false;
bool keyerInterfaceAvailable = false;
};
enum class VideoIOCompletionResult
@@ -21,15 +23,9 @@ enum class VideoIOCompletionResult
Unknown
};
struct VideoIOConfig
{
VideoFormatSelection videoModes;
bool externalKeyingEnabled = false;
bool preferTenBit = true;
};
struct VideoIOState
{
VideoIOBackendId backendId = VideoIOBackendId::DeckLink;
FrameSize inputFrameSize;
FrameSize outputFrameSize;
VideoIOPixelFormat inputPixelFormat = VideoIOPixelFormat::Uyvy8;
@@ -40,14 +36,13 @@ struct VideoIOState
unsigned outputPackTextureWidth = 0;
std::string inputDisplayModeName = "1080p59.94";
std::string outputDisplayModeName = "1080p59.94";
std::string outputModelName;
std::string deviceName;
std::string statusMessage;
std::string formatStatusMessage;
bool hasInputDevice = false;
bool hasInputSource = false;
bool supportsInternalKeying = false;
bool supportsExternalKeying = false;
bool keyerInterfaceAvailable = false;
VideoIOCapabilities capabilities;
bool externalKeyingRequested = false;
bool externalKeyingActive = false;
double frameBudgetMilliseconds = 0.0;
};
@@ -93,11 +88,12 @@ public:
using OutputFrameCallback = std::function<void(const VideoIOCompletion&)>;
virtual ~VideoIODevice() = default;
virtual VideoIOBackendId BackendId() const = 0;
virtual void ReleaseResources() = 0;
virtual bool DiscoverDevicesAndModes(const VideoFormatSelection& videoModes, std::string& error) = 0;
virtual bool SelectPreferredFormats(const VideoFormatSelection& videoModes, bool outputAlphaRequired, std::string& error) = 0;
virtual bool ConfigureInput(InputFrameCallback callback, const VideoFormat& inputVideoMode, std::string& error) = 0;
virtual bool ConfigureOutput(OutputFrameCallback callback, const VideoFormat& outputVideoMode, bool externalKeyingEnabled, std::string& error) = 0;
virtual bool DiscoverDevicesAndModes(const VideoIOConfiguration& config, std::string& error) = 0;
virtual bool SelectPreferredFormats(const VideoIOConfiguration& config, std::string& error) = 0;
virtual bool ConfigureInput(InputFrameCallback callback, std::string& error) = 0;
virtual bool ConfigureOutput(OutputFrameCallback callback, std::string& error) = 0;
virtual bool Start() = 0;
virtual bool Stop() = 0;
virtual const VideoIOState& State() const = 0;
@@ -126,10 +122,11 @@ public:
unsigned OutputPackTextureWidth() const { return State().outputPackTextureWidth; }
const std::string& FormatStatusMessage() const { return State().formatStatusMessage; }
const std::string& InputDisplayModeName() const { return State().inputDisplayModeName; }
const std::string& OutputModelName() const { return State().outputModelName; }
bool SupportsInternalKeying() const { return State().supportsInternalKeying; }
bool SupportsExternalKeying() const { return State().supportsExternalKeying; }
bool KeyerInterfaceAvailable() const { return State().keyerInterfaceAvailable; }
const std::string& DeviceName() const { return State().deviceName; }
bool SupportsInternalKeying() const { return State().capabilities.supportsInternalKeying; }
bool SupportsExternalKeying() const { return State().capabilities.supportsExternalKeying; }
bool KeyerInterfaceAvailable() const { return State().capabilities.keyerInterfaceAvailable; }
bool ExternalKeyingRequested() const { return State().externalKeyingRequested; }
bool ExternalKeyingActive() const { return State().externalKeyingActive; }
const std::string& StatusMessage() const { return State().statusMessage; }
double FrameBudgetMilliseconds() const { return State().frameBudgetMilliseconds; }

View File

@@ -13,10 +13,10 @@ std::string NormalizeModeToken(const std::string& value)
return normalized;
}
bool ResolveConfiguredDisplayMode(const std::string& videoFormat, const std::string& frameRate, BMDDisplayMode& displayMode, std::string& displayModeName)
bool ResolveConfiguredDeckLinkDisplayMode(const VideoIOModeConfiguration& mode, BMDDisplayMode& displayMode, std::string& displayModeName)
{
VideoFormat videoMode;
if (!ResolveConfiguredVideoFormat(videoFormat, frameRate, videoMode))
DeckLinkVideoMode videoMode;
if (!ResolveConfiguredDeckLinkVideoMode(mode, videoMode))
return false;
displayMode = videoMode.displayMode;
@@ -24,10 +24,10 @@ bool ResolveConfiguredDisplayMode(const std::string& videoFormat, const std::str
return true;
}
bool ResolveConfiguredVideoFormat(const std::string& videoFormat, const std::string& frameRate, VideoFormat& videoMode)
bool ResolveConfiguredDeckLinkVideoMode(const VideoIOModeConfiguration& mode, DeckLinkVideoMode& videoMode)
{
const std::string formatToken = NormalizeModeToken(videoFormat);
const std::string frameToken = NormalizeModeToken(frameRate);
const std::string formatToken = NormalizeModeToken(mode.videoFormat);
const std::string frameToken = NormalizeModeToken(mode.frameRate);
const std::string combinedToken = formatToken + frameToken;
struct ModeOption
@@ -98,25 +98,22 @@ bool ResolveConfiguredVideoFormat(const std::string& videoFormat, const std::str
return false;
}
bool ResolveConfiguredVideoFormats(
const std::string& inputVideoFormat,
const std::string& inputFrameRate,
const std::string& outputVideoFormat,
const std::string& outputFrameRate,
VideoFormatSelection& videoModes,
bool ResolveConfiguredDeckLinkVideoModes(
const VideoIOConfiguration& config,
DeckLinkVideoModeSelection& videoModes,
std::string& error)
{
if (!ResolveConfiguredVideoFormat(inputVideoFormat, inputFrameRate, videoModes.input))
if (!ResolveConfiguredDeckLinkVideoMode(config.inputMode, videoModes.input))
{
error = "Unsupported DeckLink inputVideoFormat/inputFrameRate in config/runtime-host.json: " +
inputVideoFormat + " / " + inputFrameRate;
error = "Unsupported DeckLink input mode in config/runtime-host.json: " +
config.inputMode.videoFormat + " / " + config.inputMode.frameRate;
return false;
}
if (!ResolveConfiguredVideoFormat(outputVideoFormat, outputFrameRate, videoModes.output))
if (!ResolveConfiguredDeckLinkVideoMode(config.outputMode, videoModes.output))
{
error = "Unsupported DeckLink outputVideoFormat/outputFrameRate in config/runtime-host.json: " +
outputVideoFormat + " / " + outputFrameRate;
error = "Unsupported DeckLink output mode in config/runtime-host.json: " +
config.outputMode.videoFormat + " / " + config.outputMode.frameRate;
return false;
}

View File

@@ -1,47 +1,27 @@
#pragma once
#include "DeckLinkAPI_h.h"
#include "VideoIOConfig.h"
#include <string>
struct FrameSize
{
unsigned width = 0;
unsigned height = 0;
bool IsEmpty() const { return width == 0 || height == 0; }
};
inline bool operator==(const FrameSize& left, const FrameSize& right)
{
return left.width == right.width && left.height == right.height;
}
inline bool operator!=(const FrameSize& left, const FrameSize& right)
{
return !(left == right);
}
struct VideoFormat
struct DeckLinkVideoMode
{
BMDDisplayMode displayMode = bmdModeHD1080p5994;
std::string displayName = "1080p59.94";
};
struct VideoFormatSelection
struct DeckLinkVideoModeSelection
{
VideoFormat input;
VideoFormat output;
DeckLinkVideoMode input;
DeckLinkVideoMode output;
};
std::string NormalizeModeToken(const std::string& value);
bool ResolveConfiguredDisplayMode(const std::string& videoFormat, const std::string& frameRate, BMDDisplayMode& displayMode, std::string& displayModeName);
bool ResolveConfiguredVideoFormat(const std::string& videoFormat, const std::string& frameRate, VideoFormat& videoMode);
bool ResolveConfiguredVideoFormats(
const std::string& inputVideoFormat,
const std::string& inputFrameRate,
const std::string& outputVideoFormat,
const std::string& outputFrameRate,
VideoFormatSelection& videoModes,
bool ResolveConfiguredDeckLinkDisplayMode(const VideoIOModeConfiguration& mode, BMDDisplayMode& displayMode, std::string& displayModeName);
bool ResolveConfiguredDeckLinkVideoMode(const VideoIOModeConfiguration& mode, DeckLinkVideoMode& videoMode);
bool ResolveConfiguredDeckLinkVideoModes(
const VideoIOConfiguration& config,
DeckLinkVideoModeSelection& videoModes,
std::string& error);
bool FindDeckLinkDisplayMode(IDeckLinkDisplayModeIterator* iterator, BMDDisplayMode targetMode, IDeckLinkDisplayMode** foundMode);

View File

@@ -92,14 +92,19 @@ void DeckLinkSession::ReleaseResources()
output.Release();
}
bool DeckLinkSession::DiscoverDevicesAndModes(const VideoFormatSelection& videoModes, std::string& error)
bool DeckLinkSession::DiscoverDevicesAndModes(const VideoIOConfiguration& config, std::string& error)
{
CComPtr<IDeckLinkIterator> deckLinkIterator;
CComPtr<IDeckLinkDisplayMode> inputMode;
CComPtr<IDeckLinkDisplayMode> outputMode;
mState.inputDisplayModeName = videoModes.input.displayName;
mState.outputDisplayModeName = videoModes.output.displayName;
mState.backendId = BackendId();
mState.externalKeyingRequested = config.externalKeyingEnabled;
if (!ResolveConfiguredDeckLinkVideoModes(config, mConfiguredModes, error))
return false;
mState.inputDisplayModeName = mConfiguredModes.input.displayName;
mState.outputDisplayModeName = mConfiguredModes.output.displayName;
HRESULT result = CoCreateInstance(CLSID_CDeckLinkIterator, nullptr, CLSCTX_ALL, IID_IDeckLinkIterator, reinterpret_cast<void**>(&deckLinkIterator));
if (FAILED(result))
@@ -151,9 +156,9 @@ bool DeckLinkSession::DiscoverDevicesAndModes(const VideoFormatSelection& videoM
output.Release();
else
{
mState.outputModelName = modelName;
mState.supportsInternalKeying = deviceSupportsInternalKeying;
mState.supportsExternalKeying = deviceSupportsExternalKeying;
mState.deviceName = modelName;
mState.capabilities.supportsInternalKeying = deviceSupportsInternalKeying;
mState.capabilities.supportsExternalKeying = deviceSupportsExternalKeying;
}
}
@@ -178,9 +183,9 @@ bool DeckLinkSession::DiscoverDevicesAndModes(const VideoFormatSelection& videoM
return false;
}
if (input && !FindDeckLinkDisplayMode(inputDisplayModeIterator, videoModes.input.displayMode, &inputMode))
if (input && !FindDeckLinkDisplayMode(inputDisplayModeIterator, mConfiguredModes.input.displayMode, &inputMode))
{
error = "Cannot get specified input BMDDisplayMode for configured mode: " + videoModes.input.displayName;
error = "Cannot get specified input BMDDisplayMode for configured mode: " + mConfiguredModes.input.displayName;
ReleaseResources();
return false;
}
@@ -194,9 +199,9 @@ bool DeckLinkSession::DiscoverDevicesAndModes(const VideoFormatSelection& videoM
return false;
}
if (!FindDeckLinkDisplayMode(outputDisplayModeIterator, videoModes.output.displayMode, &outputMode))
if (!FindDeckLinkDisplayMode(outputDisplayModeIterator, mConfiguredModes.output.displayMode, &outputMode))
{
error = "Cannot get specified output BMDDisplayMode for configured mode: " + videoModes.output.displayName;
error = "Cannot get specified output BMDDisplayMode for configured mode: " + mConfiguredModes.output.displayName;
ReleaseResources();
return false;
}
@@ -223,7 +228,7 @@ bool DeckLinkSession::DiscoverDevicesAndModes(const VideoFormatSelection& videoM
return true;
}
bool DeckLinkSession::SelectPreferredFormats(const VideoFormatSelection& videoModes, bool outputAlphaRequired, std::string& error)
bool DeckLinkSession::SelectPreferredFormats(const VideoIOConfiguration& config, std::string& error)
{
if (!output)
{
@@ -233,19 +238,19 @@ bool DeckLinkSession::SelectPreferredFormats(const VideoFormatSelection& videoMo
mState.formatStatusMessage.clear();
const bool inputTenBitSupported = input != nullptr && InputSupportsFormat(input, videoModes.input.displayMode, bmdFormat10BitYUV);
const bool inputTenBitSupported = input != nullptr && InputSupportsFormat(input, mConfiguredModes.input.displayMode, bmdFormat10BitYUV);
mState.inputPixelFormat = input != nullptr ? ChoosePreferredVideoIOFormat(inputTenBitSupported) : VideoIOPixelFormat::Uyvy8;
if (input != nullptr && !inputTenBitSupported)
mState.formatStatusMessage += "DeckLink input does not report 10-bit YUV support for the configured mode; using 8-bit capture. ";
const bool outputTenBitSupported = OutputSupportsFormat(output, videoModes.output.displayMode, bmdFormat10BitYUV);
const bool outputTenBitYuvaSupported = OutputSupportsFormat(output, videoModes.output.displayMode, bmdFormat10BitYUVA);
mState.outputPixelFormat = outputAlphaRequired
const bool outputTenBitSupported = OutputSupportsFormat(output, mConfiguredModes.output.displayMode, bmdFormat10BitYUV);
const bool outputTenBitYuvaSupported = OutputSupportsFormat(output, mConfiguredModes.output.displayMode, bmdFormat10BitYUVA);
mState.outputPixelFormat = config.externalKeyingEnabled
? (outputTenBitYuvaSupported ? VideoIOPixelFormat::Yuva10 : VideoIOPixelFormat::Bgra8)
: (outputTenBitSupported ? VideoIOPixelFormat::V210 : VideoIOPixelFormat::Bgra8);
if (outputAlphaRequired && outputTenBitYuvaSupported)
if (config.externalKeyingEnabled && outputTenBitYuvaSupported)
mState.formatStatusMessage += "External keying requires alpha; using 10-bit YUVA output. ";
else if (outputAlphaRequired)
else if (config.externalKeyingEnabled)
mState.formatStatusMessage += "External keying requires alpha, but DeckLink output does not report 10-bit YUVA support for the configured mode; using 8-bit BGRA output. ";
else if (!outputTenBitSupported)
mState.formatStatusMessage += "DeckLink output does not report 10-bit YUV support for the configured mode; using 8-bit BGRA output. ";
@@ -286,7 +291,7 @@ bool DeckLinkSession::SelectPreferredFormats(const VideoFormatSelection& videoMo
return true;
}
bool DeckLinkSession::ConfigureInput(InputFrameCallback callback, const VideoFormat& inputVideoMode, std::string& error)
bool DeckLinkSession::ConfigureInput(InputFrameCallback callback, std::string& error)
{
mInputFrameCallback = std::move(callback);
@@ -298,7 +303,7 @@ bool DeckLinkSession::ConfigureInput(InputFrameCallback callback, const VideoFor
}
const BMDPixelFormat deckLinkInputPixelFormat = DeckLinkPixelFormatForVideoIO(mState.inputPixelFormat);
if (input->EnableVideoInput(inputVideoMode.displayMode, deckLinkInputPixelFormat, bmdVideoInputFlagDefault) != S_OK)
if (input->EnableVideoInput(mConfiguredModes.input.displayMode, deckLinkInputPixelFormat, bmdVideoInputFlagDefault) != S_OK)
{
if (mState.inputPixelFormat == VideoIOPixelFormat::V210)
{
@@ -306,7 +311,7 @@ bool DeckLinkSession::ConfigureInput(InputFrameCallback callback, const VideoFor
mState.inputPixelFormat = VideoIOPixelFormat::Uyvy8;
mState.inputFrameRowBytes = mState.inputFrameSize.width * 2u;
mState.captureTextureWidth = mState.inputFrameSize.width / 2u;
if (input->EnableVideoInput(inputVideoMode.displayMode, bmdFormat8BitYUV, bmdVideoInputFlagDefault) == S_OK)
if (input->EnableVideoInput(mConfiguredModes.input.displayMode, bmdFormat8BitYUV, bmdVideoInputFlagDefault) == S_OK)
{
std::ostringstream status;
status << "DeckLink formats: capture " << VideoIOPixelFormatName(mState.inputPixelFormat)
@@ -341,26 +346,26 @@ input_enabled:
return true;
}
bool DeckLinkSession::ConfigureOutput(OutputFrameCallback callback, const VideoFormat& outputVideoMode, bool externalKeyingEnabled, std::string& error)
bool DeckLinkSession::ConfigureOutput(OutputFrameCallback callback, std::string& error)
{
mOutputFrameCallback = std::move(callback);
if (output->EnableVideoOutput(outputVideoMode.displayMode, bmdVideoOutputFlagDefault) != S_OK)
if (output->EnableVideoOutput(mConfiguredModes.output.displayMode, bmdVideoOutputFlagDefault) != S_OK)
{
error = "DeckLink output setup failed while enabling video output.";
return false;
}
if (output->QueryInterface(IID_IDeckLinkKeyer, (void**)&keyer) == S_OK && keyer != NULL)
mState.keyerInterfaceAvailable = true;
mState.capabilities.keyerInterfaceAvailable = true;
if (externalKeyingEnabled)
if (mState.externalKeyingRequested)
{
if (!mState.supportsExternalKeying)
if (!mState.capabilities.supportsExternalKeying)
{
mState.statusMessage = "External keying was requested, but the selected DeckLink output does not report external keying support.";
}
else if (!mState.keyerInterfaceAvailable)
else if (!mState.capabilities.keyerInterfaceAvailable)
{
mState.statusMessage = "External keying was requested, but the selected DeckLink output does not expose the IDeckLinkKeyer interface.";
}
@@ -374,7 +379,7 @@ bool DeckLinkSession::ConfigureOutput(OutputFrameCallback callback, const VideoF
mState.statusMessage = "External keying is active on the selected DeckLink output.";
}
}
else if (mState.supportsExternalKeying)
else if (mState.capabilities.supportsExternalKeying)
{
mState.statusMessage = "Selected DeckLink output supports external keying. Set enableExternalKeying to true in runtime-host.json to request it.";
}

View File

@@ -20,41 +20,14 @@ public:
DeckLinkSession() = default;
~DeckLinkSession();
VideoIOBackendId BackendId() const override { return VideoIOBackendId::DeckLink; }
void ReleaseResources() override;
bool DiscoverDevicesAndModes(const VideoFormatSelection& videoModes, std::string& error) override;
bool SelectPreferredFormats(const VideoFormatSelection& videoModes, bool outputAlphaRequired, std::string& error) override;
bool ConfigureInput(InputFrameCallback callback, const VideoFormat& inputVideoMode, std::string& error) override;
bool ConfigureOutput(OutputFrameCallback callback, const VideoFormat& outputVideoMode, bool externalKeyingEnabled, std::string& error) override;
bool DiscoverDevicesAndModes(const VideoIOConfiguration& config, std::string& error) override;
bool SelectPreferredFormats(const VideoIOConfiguration& config, std::string& error) override;
bool ConfigureInput(InputFrameCallback callback, std::string& error) override;
bool ConfigureOutput(OutputFrameCallback callback, std::string& error) override;
bool Start() override;
bool Stop() override;
bool HasInputDevice() const { return mState.hasInputDevice; }
bool HasInputSource() const { return mState.hasInputSource; }
void SetInputSourceMissing(bool missing) { mState.hasInputSource = !missing; }
bool InputOutputDimensionsDiffer() const { return mState.inputFrameSize != mState.outputFrameSize; }
const FrameSize& InputFrameSize() const { return mState.inputFrameSize; }
const FrameSize& OutputFrameSize() const { return mState.outputFrameSize; }
unsigned InputFrameWidth() const { return mState.inputFrameSize.width; }
unsigned InputFrameHeight() const { return mState.inputFrameSize.height; }
unsigned OutputFrameWidth() const { return mState.outputFrameSize.width; }
unsigned OutputFrameHeight() const { return mState.outputFrameSize.height; }
VideoIOPixelFormat InputPixelFormat() const { return mState.inputPixelFormat; }
VideoIOPixelFormat OutputPixelFormat() const { return mState.outputPixelFormat; }
bool InputIsTenBit() const { return VideoIOPixelFormatIsTenBit(mState.inputPixelFormat); }
bool OutputIsTenBit() const { return VideoIOPixelFormatIsTenBit(mState.outputPixelFormat); }
unsigned InputFrameRowBytes() const { return mState.inputFrameRowBytes; }
unsigned OutputFrameRowBytes() const { return mState.outputFrameRowBytes; }
unsigned CaptureTextureWidth() const { return mState.captureTextureWidth; }
unsigned OutputPackTextureWidth() const { return mState.outputPackTextureWidth; }
const std::string& FormatStatusMessage() const { return mState.formatStatusMessage; }
const std::string& InputDisplayModeName() const { return mState.inputDisplayModeName; }
const std::string& OutputModelName() const { return mState.outputModelName; }
bool SupportsInternalKeying() const { return mState.supportsInternalKeying; }
bool SupportsExternalKeying() const { return mState.supportsExternalKeying; }
bool KeyerInterfaceAvailable() const { return mState.keyerInterfaceAvailable; }
bool ExternalKeyingActive() const { return mState.externalKeyingActive; }
const std::string& StatusMessage() const { return mState.statusMessage; }
void SetStatusMessage(const std::string& message) { mState.statusMessage = message; }
const VideoIOState& State() const override { return mState; }
VideoIOState& MutableState() override { return mState; }
double FrameBudgetMilliseconds() const;
@@ -76,4 +49,5 @@ private:
VideoPlayoutScheduler mScheduler;
InputFrameCallback mInputFrameCallback;
OutputFrameCallback mOutputFrameCallback;
DeckLinkVideoModeSelection mConfiguredModes;
};

View File

@@ -2,6 +2,7 @@
"shaderLibrary": "shaders",
"serverPort": 8080,
"oscPort": 9000,
"videoBackend": "decklink",
"inputVideoFormat": "1080p",
"inputFrameRate": "59.94",
"outputVideoFormat": "1080p",

View File

@@ -6,6 +6,8 @@ float4 balatroSwirl(float2 screenSize, float2 screenCoords, float time, float se
float2 uv = (screenCoords - 0.5 * screenSize) / safeScreenLength - offset - seedOffset;
float uvLength = length(uv);
// First warp: convert to polar space and twist the angle more near the
// center, creating the large spiral motion.
float speed = spinRotation * spinEase * 0.2;
if (isRotate)
speed = time * speed;
@@ -19,6 +21,8 @@ float4 balatroSwirl(float2 screenSize, float2 screenCoords, float time, float se
speed = (time + seed * 17.0) * spinSpeed;
float2 uv2 = float2(uv.x + uv.y, uv.x + uv.y);
// Second warp: a short iterative feedback loop turns the spiral into
// painterly bands while preserving a fixed compile-time loop bound.
for (int i = 0; i < 5; ++i)
{
uv2 += float2(sin(max(uv.x, uv.y)), sin(max(uv.x, uv.y))) + uv;
@@ -32,6 +36,8 @@ float4 balatroSwirl(float2 screenSize, float2 screenCoords, float time, float se
float c1p = max(0.0, 1.0 - contrastMod * abs(1.0 - paintRes));
float c2p = max(0.0, 1.0 - contrastMod * abs(paintRes));
float c3p = 1.0 - min(1.0, c1p + c2p);
// Three soft band weights drive the palette; lighting rides on the brightest
// bands so the swirl keeps dimensional highlights.
float light = (lighting - 0.2) * max(c1p * 5.0 - 4.0, 0.0) + lighting * max(c2p * 5.0 - 4.0, 0.0);
float safeContrast = max(contrast, 0.001);

View File

@@ -0,0 +1,102 @@
{
"id": "crt-bulge",
"name": "CRT Bulge",
"description": "Warps the image like convex CRT glass, with optional rounded screen edges and vignette darkening.",
"category": "Distortion",
"entryPoint": "shadeVideo",
"parameters": [
{
"id": "bulgeAmount",
"label": "Bulge",
"type": "float",
"default": -0.04,
"min": -0.5,
"max": 0.8,
"step": 0.01,
"description": "Positive values swell the center outward; negative values pinch it inward."
},
{
"id": "zoom",
"label": "Zoom",
"type": "float",
"default": 1.04,
"min": 0.5,
"max": 2,
"step": 0.01,
"description": "Scales the source before distortion, useful for hiding warped edges."
},
{
"id": "edgeRoundness",
"label": "Edge Roundness",
"type": "float",
"default": 0.08,
"min": 0,
"max": 0.35,
"step": 0.01,
"description": "Rounds the visible screen corners like older CRT glass."
},
{
"id": "edgeFeather",
"label": "Edge Feather",
"type": "float",
"default": 2,
"min": 0,
"max": 24,
"step": 0.1,
"description": "Softens the rounded screen edge in pixels."
},
{
"id": "sourceEdgeFeather",
"label": "Source Edge Feather",
"type": "float",
"default": 1.5,
"min": 0,
"max": 16,
"step": 0.1,
"description": "Antialiases warped source edges when the distortion reveals outside-frame pixels."
},
{
"id": "vignetteAmount",
"label": "Vignette",
"type": "float",
"default": 0.18,
"min": 0,
"max": 1,
"step": 0.01,
"description": "Darkens the glass toward the screen edges."
},
{
"id": "edgeMode",
"label": "Edge Mode",
"type": "enum",
"default": "black",
"options": [
{
"value": "black",
"label": "Black"
},
{
"value": "clamp",
"label": "Clamp"
},
{
"value": "mirror",
"label": "Mirror"
}
],
"description": "Chooses how warped samples outside the source frame are filled."
},
{
"id": "outsideColor",
"label": "Outside Color",
"type": "color",
"default": [
0,
0,
0,
1
],
"description": "Color used outside the curved screen or source frame."
}
]
}

View File

@@ -0,0 +1,71 @@
float mirroredCoordinate(float coordinate)
{
float wrapped = frac(coordinate * 0.5) * 2.0;
return wrapped <= 1.0 ? wrapped : 2.0 - wrapped;
}
float roundedBoxMask(float2 point, float2 halfSize, float radius, float feather)
{
float2 distanceToEdge = abs(point) - (halfSize - radius);
float outsideDistance = length(max(distanceToEdge, float2(0.0, 0.0))) - radius;
float insideDistance = min(max(distanceToEdge.x, distanceToEdge.y), 0.0);
float signedDistance = outsideDistance + insideDistance;
return 1.0 - smoothstep(0.0, max(feather, 0.00001), signedDistance);
}
float sourceBoundsMask(float2 uv, float2 resolution)
{
float2 pixel = 1.0 / max(resolution, float2(1.0, 1.0));
float2 feather = pixel * max(sourceEdgeFeather, 0.0);
float left = smoothstep(0.0, max(feather.x, 0.00001), uv.x);
float right = 1.0 - smoothstep(1.0 - max(feather.x, 0.00001), 1.0, uv.x);
float top = smoothstep(0.0, max(feather.y, 0.00001), uv.y);
float bottom = 1.0 - smoothstep(1.0 - max(feather.y, 0.00001), 1.0, uv.y);
return saturate(left * right * top * bottom);
}
float2 applyBulge(float2 uv, float2 resolution)
{
float2 centered = uv * 2.0 - 1.0;
float aspect = resolution.x / max(resolution.y, 1.0);
float2 aspectCentered = float2(centered.x * aspect, centered.y);
float radiusSq = dot(aspectCentered, aspectCentered);
float amount = clamp(bulgeAmount, -0.95, 0.95);
float scale = 1.0 / max(1.0 + amount * radiusSq, 0.05);
return centered * scale / max(zoom, 0.001) * 0.5 + 0.5;
}
float4 sampleWarped(float2 uv, float2 resolution, out bool insideSource)
{
insideSource = uv.x >= 0.0 && uv.x <= 1.0 && uv.y >= 0.0 && uv.y <= 1.0;
if (edgeMode == 1)
return sampleVideo(clamp(uv, 0.0, 1.0));
if (edgeMode == 2)
return sampleVideo(float2(mirroredCoordinate(uv.x), mirroredCoordinate(uv.y)));
float edgeMask = sourceBoundsMask(uv, resolution);
float4 color = sampleVideo(clamp(uv, 0.0, 1.0));
return lerp(outsideColor, color, edgeMask);
}
float4 shadeVideo(ShaderContext context)
{
float2 resolution = max(context.outputResolution, float2(1.0, 1.0));
float2 sourceUv = applyBulge(context.uv, resolution);
bool insideSource = false;
float4 color = sampleWarped(sourceUv, resolution, insideSource);
float2 centered = context.uv * 2.0 - 1.0;
float feather = max(edgeFeather, 0.0) / min(resolution.x, resolution.y);
float screenMask = roundedBoxMask(centered, float2(1.0, 1.0), saturate(edgeRoundness), feather);
color = lerp(outsideColor, color, screenMask);
float2 aspectCentered = float2(centered.x * resolution.x / max(resolution.y, 1.0), centered.y);
float edgeDistance = saturate(length(aspectCentered) * 0.72);
float vignette = lerp(1.0, 1.0 - saturate(vignetteAmount), smoothstep(0.35, 1.05, edgeDistance));
color.rgb *= vignette;
return saturate(color);
}

View File

@@ -31,6 +31,8 @@ float normalizedFisheyeRadius(float theta, float halfFov)
{
float safeHalfFov = max(halfFov, 0.0001);
// Match common fisheye projection families while keeping the selected FOV
// normalized to the same source-image radius.
if (fisheyeModel == 1)
{
return sin(theta * 0.5) / max(sin(safeHalfFov * 0.5), 0.0001);
@@ -49,6 +51,7 @@ float normalizedFisheyeRadius(float theta, float halfFov)
float3 equirectangularRay(float2 uv)
{
// Convert equirectangular UVs into longitude/latitude on the unit sphere.
float longitude = (uv.x - 0.5) * TWO_PI;
float latitude = (0.5 - uv.y) * PI;
float latitudeCos = cos(latitude);
@@ -82,6 +85,8 @@ float4 sampleEdgeFilledVideo(float2 sourceUv, ShaderContext context)
float inwardLength = max(length(inward), 0.000001);
inward /= inwardLength;
// Outside the fisheye image, sample back inward from the nearest edge so the
// fill looks like stretched lens content instead of a hard color plate.
float blurDistance = max(edgeBlur, 0.0);
float4 color = sampleVideo(clampedUv) * 0.32;
color += sampleVideo(saturate(clampedUv + inward * blurDistance * 0.35)) * 0.26;
@@ -114,6 +119,7 @@ float4 shadeVideo(ShaderContext context)
float phi = atan2(ray.y, ray.x);
float fisheyeRadius = normalizedFisheyeRadius(theta, halfFov);
// Project the mirrored sphere ray back into the circular fisheye source.
float2 sourceUv = float2(
center.x + cos(phi) * fisheyeRadius * radius.x,
center.y - sin(phi) * fisheyeRadius * radius.y

View File

@@ -43,6 +43,8 @@ float normalizedFisheyeRadius(float theta, float halfFov)
{
float safeHalfFov = max(halfFov, 0.0001);
// Different fisheye lenses map angle to image radius differently. Normalize
// each model by the selected half-FOV so the outer lens edge stays at 1.0.
if (fisheyeModel == 1)
{
return sin(theta * 0.5) / max(sin(safeHalfFov * 0.5), 0.0001);
@@ -67,6 +69,8 @@ float4 shadeVideo(ShaderContext context)
float virtualFov = radiansFromDegrees(clamp(virtualFovDegrees, 1.0, 175.0));
float tanHalfFov = tan(virtualFov * 0.5);
// Build a virtual output-camera ray, then rotate it into the fisheye lens
// coordinate system before asking where that ray lands on the source image.
float3 ray = outputProjection == 1
? buildCylindricalRay(screen, outputAspect, tanHalfFov)
: buildRectilinearRay(screen, outputAspect, tanHalfFov);
@@ -86,6 +90,7 @@ float4 shadeVideo(ShaderContext context)
float phi = atan2(ray.y, ray.x);
float fisheyeRadius = normalizedFisheyeRadius(theta, halfFov);
// Polar lens coordinates become UVs inside the circular fisheye image.
float2 sourceUv = float2(
center.x + cos(phi) * fisheyeRadius * radius.x,
center.y - sin(phi) * fisheyeRadius * radius.y

View File

@@ -23,6 +23,8 @@ float3 matteSampleColor(float2 uv, ShaderContext context)
if (blur <= 0.0001)
return center;
// Pre-blur only the color used for screen comparison; the final image keeps
// its original detail and alpha is refined in a later pass.
float2 radius = pixel * blur;
float3 color = center * 0.36;
color += saturate(sampleVideo(saturate(uv + float2(radius.x, 0.0))).rgb) * 0.16;
@@ -37,6 +39,8 @@ float keyDistanceAt(float2 uv, ShaderContext context)
float3 color = matteSampleColor(uv, context);
float3 keyColor = saturate(screenColor.rgb);
float chromaDistance = distance(chroma709(color), chroma709(keyColor)) * 2.65;
// Direction distance is less sensitive to brightness, while chroma distance
// follows broadcast-style color difference; screenBalance blends the two.
float directionDistance = length(safeNormalize(max(color, float3(0.0001, 0.0001, 0.0001))) - safeNormalize(max(keyColor, float3(0.0001, 0.0001, 0.0001)))) * 0.55;
return lerp(directionDistance, chromaDistance, saturate(screenBalance));
}
@@ -65,6 +69,8 @@ float refinedAlphaFromMatte(float2 uv, ShaderContext context)
if (aaRadius > 0.0001)
{
// A small fixed kernel smooths edges and collects min/max alpha for
// black/white cleanup without needing dynamic loops or arrays.
float2 radius = pixel * aaRadius;
float2 halfRadius = radius * 0.5;
float alphaMin = centerAlpha;
@@ -126,6 +132,8 @@ float refinedAlphaFromMatte(float2 uv, ShaderContext context)
alpha = centerAlpha;
}
// Final matte shaping happens after blur/cleanup so clip and contrast affect
// the refined edge rather than the raw screen-distance estimate.
alpha = saturate((alpha - clipBlack) / max(clipWhite - clipBlack, 0.0001));
alpha = saturate((alpha - 0.5) * max(matteContrast, 0.0001) + 0.5);
alpha = pow(max(alpha, 0.0), max(matteGamma, 0.0001));
@@ -135,6 +143,8 @@ float refinedAlphaFromMatte(float2 uv, ShaderContext context)
float spillAmountForColor(float3 color)
{
float3 keyColor = saturate(screenColor.rgb);
// Measure spill as color energy aligned with the screen color minus the
// strongest opposing channel, leaving neutral highlights mostly intact.
float keyComponent = dot(color, safeNormalize(max(keyColor, float3(0.0001, 0.0001, 0.0001))));
float opposingComponent = max(max(color.r * (1.0 - keyColor.r), color.g * (1.0 - keyColor.g)), color.b * (1.0 - keyColor.b));
return saturate(keyComponent - opposingComponent + despillBias);
@@ -187,6 +197,8 @@ float4 applyKey(ShaderContext context)
float cropMask = cropMaskAt(context.uv, context);
alpha *= cropMask;
// Edge recovery is strongest around 50% alpha, where fringing usually lives,
// and fades away for solid foreground/background pixels.
float edgeAmount = saturate(1.0 - abs(alpha * 2.0 - 1.0));
despilled = lerp(despilled, despilled * saturate(edgeColor.rgb), edgeAmount * saturate(edgeRecover));

View File

@@ -36,6 +36,8 @@ float4 shadeVideo(ShaderContext context)
float4 accumulated = float4(0.0, 0.0, 0.0, 0.0);
float clampedSteps = clamp(raySteps, 1.0, 77.0);
// Ray-march a folded procedural field. distanceToSurface advances the ray,
// while inverse-distance accumulation creates the glowing filaments.
for (int i = 0; i < 77; ++i)
{
if (float(i) >= clampedSteps)
@@ -49,11 +51,14 @@ float4 shadeVideo(ShaderContext context)
position.xy = mul(rotateAroundZ(2.0 + originalPosition.z), position.xy);
position.xy = mul(happyAccidentMatrix(originalPosition, timeCos), position.xy);
// Color comes from pre-fold space so the palette varies smoothly even as
// the geometry folds into repeated cells.
float colorSeed = 0.5 * originalPosition.z + length(position - originalPosition);
float4 palette = 1.0 + sin(colorSeed + float4(0.0, 4.0, 3.0, 6.0));
palette /= 0.55 + 1.55 * dot(originalPosition.xy, originalPosition.xy);
position = abs(frac(position) - 0.5);
// Distance to a tiny box/cross primitive inside each repeated cell.
distanceToSurface = abs(min(length(position.xy) - 0.125, min(position.x, position.y) + 0.001)) + 0.001;
accumulated += palette.w * palette / distanceToSurface;
}

View File

@@ -7,6 +7,8 @@ float3 sampleLutCell(float3 index)
float g = floor(index.g + 0.5);
float b = floor(index.b + 0.5);
// The 33^3 cube is packed as blue slices laid horizontally, with red across
// each slice and green down the atlas.
float atlasWidth = LUT_SIZE * LUT_SIZE;
float2 lutUv;
lutUv.x = (r + b * LUT_SIZE + 0.5) / atlasWidth;
@@ -30,6 +32,9 @@ float3 applyLut33(float3 color)
float3 c011 = sampleLutCell(float3(baseIndex.r, nextIndex.g, nextIndex.b));
float3 c111 = sampleLutCell(float3(nextIndex.r, nextIndex.g, nextIndex.b));
// Tetrahedral interpolation chooses one of six paths through the cube.
// This avoids the muddy diagonals that simple trilinear LUT sampling can
// introduce for strong grades.
if (blend.r > blend.g)
{
if (blend.g > blend.b)
@@ -55,6 +60,8 @@ float hash12(float2 value)
float3 outputDither(float2 pixel)
{
// Subtract paired hashes to center the dither around zero, then scale to
// roughly one 8-bit code value.
float r = hash12(pixel + float2(17.0, 31.0)) - hash12(pixel + float2(83.0, 47.0));
float g = hash12(pixel + float2(29.0, 71.0)) - hash12(pixel + float2(53.0, 19.0));
float b = hash12(pixel + float2(61.0, 11.0)) - hash12(pixel + float2(7.0, 97.0));

View File

@@ -20,6 +20,8 @@ float4 shadeVideo(ShaderContext context)
float2 p = (fragCoord + fragCoord - resolution) / resolution.y / safeScale;
p -= center + float2(sin(seed * 6.2831853), cos(seed * 6.2831853)) * 0.035;
// Build a skewed coordinate system around an offset "black hole" so the
// waves pinch and stretch instead of staying radially symmetric.
float iterator = 0.2;
float2 diagonal = normalize(float2(-1.0 + seed * 0.5, 1.0 - seed * 0.35));
float2 blackholeCenter = p - iterator * diagonal;
@@ -30,6 +32,8 @@ float4 shadeVideo(ShaderContext context)
float2 v = singularitySpiral(c, time, iterator);
float2 waves = float2(0.0001, 0.0001);
// Iterative sine feedback creates the accretion texture; the iterator value
// also damps later steps to keep the pattern stable.
for (; iterator < 9.0; iterator += 1.0)
{
waves += 1.0 + sin(v);
@@ -40,6 +44,8 @@ float4 shadeVideo(ShaderContext context)
float disk = 2.0 + diskRadius * diskRadius * (0.25 * safeTightness) - diskRadius;
float centerDarkness = 0.5 + 1.0 / max(dot(c, c), 0.0001);
float rim = 0.025 + abs(length(p) - safeRingRadius) * safeTightness;
// Exponential falloff turns the accumulated wave field into bright rims and
// a darker center without hard thresholds.
float4 redBlueGradient = exp(c.x * float4(0.6, -0.4, -1.0, 0.0) * colorShift);
float4 waveColor = waves.xyyx;

View File

@@ -69,7 +69,7 @@
"id": "vignetteAmount",
"label": "Vignette",
"type": "float",
"default": 0.18,
"default": 0.3,
"min": 0,
"max": 0.6,
"step": 0.01,
@@ -154,6 +154,46 @@
"max": 6,
"step": 0.05,
"description": "Scale of the generated noise pattern."
},
{
"id": "scanlineAmount",
"label": "Scanlines",
"type": "float",
"default": 0.08,
"min": 0,
"max": 0.35,
"step": 0.005,
"description": "Subtle alternating-field luma modulation."
},
{
"id": "chromaCrawlAmount",
"label": "Chroma Crawl",
"type": "float",
"default": 0.035,
"min": 0,
"max": 0.2,
"step": 0.005,
"description": "Moving color shimmer around high-contrast edges."
},
{
"id": "generationLoss",
"label": "Generation Loss",
"type": "float",
"default": 0.18,
"min": 0,
"max": 1,
"step": 0.01,
"description": "Raises blacks, softens detail, lowers contrast, and desaturates chroma like copied tape."
},
{
"id": "sharpnessDrift",
"label": "Sharpness Drift",
"type": "float",
"default": 0.12,
"min": 0,
"max": 0.6,
"step": 0.01,
"description": "Slowly varies picture softness to mimic unstable tape focus."
}
]
}

View File

@@ -8,6 +8,8 @@ float2 jumpy(float2 uv, float framecount)
float2 look = uv;
float m = frac(framecount / 4.0);
float dy = look.y - m;
// Localize the horizontal tear to a moving scanline window instead of
// bending the whole frame equally.
float window = 1.0 / (1.0 + 80.0 * dy * dy);
look.x += 0.05 * sin(look.y * 10.0 + framecount) / 20.0 * onOff(4.0, 4.0, 0.3, framecount) * (0.5 + cos(framecount * 20.0)) * window;
float vShift = (0.1 * wiggle) * 0.4 * onOff(2.0, 3.0, 0.9, framecount) * (sin(framecount) * sin(framecount * 20.0) + (0.5 + 0.1 * sin(framecount * 200.0) * cos(framecount)));
@@ -44,11 +46,16 @@ float noiseHash(float2 p)
return frac(sin(dot(p, float2(127.1, 311.7))) * 43758.5453123);
}
// Gold Noise (c)2015 dcerisano@standard3d.com, adapted for Slang.
float goldNoise(float2 xy, float seed)
float staticHash(float2 p)
{
const float phi = 1.61803398874989484820459;
return frac(tan(distance(xy * phi, xy) * seed) * xy.x);
float3 p3 = frac(float3(p.x, p.y, p.x) * 0.1031);
p3 += dot(p3, p3.yzx + 33.33);
return frac((p3.x + p3.y) * p3.z);
}
float seededStaticHash(float2 p, float seed)
{
return staticHash(p + float2(seed * 37.13, seed * 17.71));
}
float grainScalar(float2 uv)
@@ -59,13 +66,17 @@ float grainScalar(float2 uv)
float3 animatedChromaGrain(float2 uv, float time, float2 outputResolution, float grainSize)
{
float safeGrainSize = max(grainSize, 0.001);
// Quantize the coordinates first so larger grain sizes become visible
// chroma blocks rather than simply lower-frequency smooth noise.
float2 baseUv = uv * outputResolution * float2(0.85, 0.95) / safeGrainSize;
float2 grainUv = floor(baseUv) + 0.5;
float2 drift = float2(time * 19.7, time * 23.3);
float frame = floor(time * 59.94);
float r = grainScalar(grainUv + drift + float2(13.1, 71.7));
float g = grainScalar(grainUv * float2(1.03, 0.97) + drift * 1.11 + float2(47.2, 19.4));
float b = grainScalar(grainUv * float2(0.96, 1.05) + drift * 0.91 + float2(83.6, 53.8));
// Change the grain field per frame instead of drifting it through UV space;
// continuous drift can alias into horizontal bands that march down-frame.
float r = staticHash(grainUv + float2(frame * 17.0 + 13.1, frame * 3.0 + 71.7));
float g = staticHash(grainUv * float2(1.03, 0.97) + float2(frame * 11.0 + 47.2, frame * 5.0 + 19.4));
float b = staticHash(grainUv * float2(0.96, 1.05) + float2(frame * 7.0 + 83.6, frame * 13.0 + 53.8));
return float3(r, g, b) * 2.0 - 1.0;
}
@@ -87,6 +98,8 @@ float valueNoise2(float2 p)
float tapeLineNoise(float2 uv, float time, float2 outputResolution)
{
float y = floor(uv.y * outputResolution.y);
// Combine stable per-line noise with frame-rate noise so bands have both
// slow tape wander and fast electronic shimmer.
float slowLine = valueNoise2(float2(y * 0.021, floor(time * 10.0)));
float fastLine = noiseHash(float2(y * 1.73, floor(time * 59.94)));
float line = (slowLine * 0.7 + fastLine * 0.3) * 2.0 - 1.0;
@@ -102,16 +115,19 @@ float3 analogStatic(float2 uv, float time, float2 outputResolution)
float frame = floor(time * 59.94);
float seed = frac(time);
// Several differently skewed hashes keep the snow from forming obvious
// diagonal or grid patterns at broadcast frame cadence.
float2 goldPixel = pixel + float2(0.37, 0.61) + frame;
float snowA = goldNoise(goldPixel, seed + 0.1);
float snowB = goldNoise(goldPixel * float2(0.37, 2.11) + float2(19.0, 41.0), seed + 0.2);
float snowC = goldNoise(goldPixel * float2(1.73, 0.81) + float2(53.0, 7.0), seed + 0.3);
float snowA = seededStaticHash(goldPixel, seed + 0.1);
float snowB = seededStaticHash(goldPixel * float2(0.37, 2.11) + float2(19.0, 41.0), seed + 0.2);
float snowC = seededStaticHash(goldPixel * float2(1.73, 0.81) + float2(53.0, 7.0), seed + 0.3);
float snow = (snowA * 0.72 + snowB * 0.28) * 2.0 - 1.0;
float lineNoise = tapeLineNoise(uv, time, safeResolution);
float dropoutSeed = goldNoise(float2(floor(uv.y * safeResolution.y * 0.25) + 1.0, frame + 2.0), seed + 0.4);
float dropoutSeed = seededStaticHash(float2(floor(uv.y * safeResolution.y * 0.25) + 1.0, frame + 2.0), seed + 0.4);
float dropout = smoothstep(0.965, 1.0, dropoutSeed);
float fleck = smoothstep(0.988, 1.0, snowA) - smoothstep(0.0, 0.012, snowC);
float fleckSeed = seededStaticHash(pixel + float2(frame * 13.0, -frame * 7.0), seed + 0.5);
float fleck = smoothstep(0.992, 1.0, fleckSeed) - smoothstep(0.0, 0.008, snowC);
float scan = sin(uv.y * safeResolution.y * 3.14159265);
float scanMask = 0.55 + 0.45 * scan * scan;
@@ -138,6 +154,85 @@ float3 softBloom(float2 uv, float2 outputResolution, float radius)
return sum;
}
float3 softCrossBlur(float2 uv, float2 outputResolution, float radius)
{
float2 pixel = 1.0 / max(outputResolution, float2(1.0, 1.0));
float2 offset = pixel * radius;
float3 sum = sampleVideo(frac(uv)).rgb * 0.40;
sum += sampleVideo(frac(uv + float2(offset.x, 0.0))).rgb * 0.15;
sum += sampleVideo(frac(uv - float2(offset.x, 0.0))).rgb * 0.15;
sum += sampleVideo(frac(uv + float2(0.0, offset.y))).rgb * 0.15;
sum += sampleVideo(frac(uv - float2(0.0, offset.y))).rgb * 0.15;
return sum;
}
float3 applyChromaCrawl(float3 color, float2 uv, float time, float2 outputResolution)
{
float amount = saturate(chromaCrawlAmount);
if (amount <= 0.0001)
return color;
float2 pixel = 1.0 / max(outputResolution, float2(1.0, 1.0));
float lumaCenter = dot(color, float3(0.299, 0.587, 0.114));
float lumaX = dot(sampleVideo(frac(uv + float2(pixel.x, 0.0))).rgb, float3(0.299, 0.587, 0.114));
float lumaY = dot(sampleVideo(frac(uv + float2(0.0, pixel.y))).rgb, float3(0.299, 0.587, 0.114));
float edge = saturate((abs(lumaX - lumaCenter) + abs(lumaY - lumaCenter)) * 6.0);
float phase = sin(uv.y * outputResolution.y * 1.35 + time * 36.0) * cos(uv.x * outputResolution.x * 0.55 - time * 21.0);
float2 crawlOffset = float2(phase, -phase * 0.35) * pixel * (1.0 + amount * 8.0);
float3 shiftedA = sampleVideo(frac(uv + crawlOffset)).rgb;
float3 shiftedB = sampleVideo(frac(uv - crawlOffset * 0.75)).rgb;
float3 crawled = color;
crawled.r = lerp(color.r, shiftedA.r, edge * amount);
crawled.b = lerp(color.b, shiftedB.b, edge * amount);
return crawled;
}
float3 applyGenerationLoss(float3 color, float2 uv, float2 outputResolution)
{
float loss = saturate(generationLoss);
if (loss <= 0.0001)
return color;
float3 softened = softCrossBlur(uv, outputResolution, 0.85 + loss * 2.2);
color = lerp(color, softened, loss * 0.42);
float luma = dot(color, float3(0.299, 0.587, 0.114));
float3 gray = float3(luma, luma, luma);
color = lerp(color, gray, loss * 0.32);
color = (color - 0.5) * (1.0 - loss * 0.18) + 0.5;
color = color * (1.0 - loss * 0.08) + float3(0.035, 0.035, 0.04) * loss;
return color;
}
float3 applySharpnessDrift(float3 color, float2 uv, float time, float2 outputResolution)
{
float drift = saturate(sharpnessDrift);
if (drift <= 0.0001)
return color;
float wobble = 0.5 + 0.5 * sin(time * 1.7 + sin(time * 0.37) * 2.0);
float radius = 0.35 + wobble * 2.25;
float3 softened = softCrossBlur(uv, outputResolution, radius);
return lerp(color, softened, drift * (0.35 + 0.65 * wobble));
}
float3 applySubtleScanlines(float3 color, float2 uv, float time, float2 outputResolution)
{
float amount = saturate(scanlineAmount);
if (amount <= 0.0001)
return color;
float scan = sin((uv.y * outputResolution.y + floor(time * 59.94) * 0.5) * 3.14159265);
float field = 0.5 + 0.5 * scan;
float luma = dot(color, float3(0.299, 0.587, 0.114));
float visibility = lerp(1.0, 0.45, saturate(luma));
float modulation = 1.0 - amount * visibility * (0.35 + 0.65 * field);
color.rgb *= modulation;
color.rgb += amount * 0.015 * (1.0 - field);
return color;
}
float3 blurVhs(float2 uv, float d, int sampleCount)
{
float3 sum = float3(0.0, 0.0, 0.0);
@@ -146,6 +241,8 @@ float3 blurVhs(float2 uv, float d, int sampleCount)
float2 pixelOffset = float2(d, 0.0);
float2 scale = 0.66 * 8.0 * pixelOffset;
// The circular tap pattern approximates soft tape smear while keeping the
// maximum loop bound fixed for shader compilation.
for (int i = 0; i < 15; ++i)
{
if (i >= sampleCount)
@@ -170,6 +267,8 @@ float4 buildTapeSmear(ShaderContext context)
float framecount = frac(time * wiggleSpeed / 7.0) * 7.0;
int sampleCount = int(clamp(blurSamples, 3.0, 15.0) + 0.5);
// Split the source into YIQ, smear each component by a different amount,
// then recombine to mimic luma/chroma bandwidth mismatch on tape.
float d = 0.1 - round(frac(time / 3.0)) * 0.1;
uv = jumpy(uv, framecount);
float s = 0.0001 * -d + 0.0001 * wiggle * sin(time * wiggleSpeed);
@@ -202,6 +301,8 @@ float4 finishVhs(ShaderContext context)
float time = distortedTapeTime(context);
float3 color = sampleVideo(context.uv).rgb;
// Radial red/blue offsets create lens and deck misregistration before the
// wider tape effects are layered in.
float2 centered = context.uv * 2.0 - 1.0;
centered.x *= context.outputResolution.x / max(context.outputResolution.y, 1.0);
float2 aberrationOffset = centered * (aberrationAmount * 0.0015);
@@ -219,16 +320,24 @@ float4 finishVhs(ShaderContext context)
float halationMask = smoothstep(0.45, 1.0, halationLuma) * halationAmount;
color += halationSource * float3(1.0, 0.38, 0.24) * halationMask * 0.35;
// Bloom and fade are applied as separate layers so highlights glow without
// flattening the full picture into the faded black level.
float3 bloomSource = softBloom(context.uv, context.outputResolution, 2.0 + smear * 2.5);
float bloomLuma = dot(bloomSource, float3(0.299, 0.587, 0.114));
float bloomMask = smoothstep(0.32, 1.0, bloomLuma) * bloomAmount;
color = lerp(color, bloomSource, bloomAmount * 0.18);
color += bloomSource * float3(1.0, 0.96, 0.92) * bloomMask * 0.24;
color = applySharpnessDrift(color, context.uv, time, context.outputResolution);
color = applyGenerationLoss(color, context.uv, context.outputResolution);
color = applyChromaCrawl(color, context.uv, time, context.outputResolution);
float3 speckle = animatedChromaGrain(context.uv, time, context.outputResolution, noiseSize);
float luma = dot(color, float3(0.299, 0.587, 0.114));
float noiseMask = lerp(0.65, 1.0, 1.0 - saturate(luma));
float chunkiness = lerp(1.0, 2.4, saturate((noiseSize - 1.0) / 5.0));
// Push darker regions harder: analog noise reads most naturally in shadows
// and avoids washing out bright highlights.
float3 chromaNoise = float3(speckle.x * 1.2, speckle.y * 0.28, speckle.z * 1.35);
color += chromaNoise * noiseAmount * noiseMask * chunkiness;
color.rg = lerp(color.rg, float2(color.r, color.g) + speckle.xy * noiseAmount * 0.2 * chunkiness, 0.35);
@@ -244,6 +353,8 @@ float4 finishVhs(ShaderContext context)
color = color * (1.0 - fadeAmount * 0.08) + float3(0.055, 0.055, 0.065) * fadeAmount;
color = lerp(color, softBloom(context.uv, context.outputResolution, 1.0 + smear), fadeAmount * 0.12);
color = applySubtleScanlines(color, context.uv, time, context.outputResolution);
float vignetteBase = context.uv.x * (1.0 - context.uv.x) * context.uv.y * (1.0 - context.uv.y);
float vignette = saturate(pow(vignetteBase * 16.0, 0.22));
color *= lerp(1.0 - vignetteAmount, 1.0, vignette);

View File

@@ -17,6 +17,8 @@ bool intersectCube(float3 rayOrigin, float3 rayDirection, float halfExtent, out
float3 boxMin = float3(-halfExtent, -halfExtent, -halfExtent);
float3 boxMax = float3(halfExtent, halfExtent, halfExtent);
// Slab intersection: find the ray interval that overlaps all three box
// axes, then keep the nearest positive hit.
float3 invDir = 1.0 / rayDirection;
float3 t0 = (boxMin - rayOrigin) * invDir;
float3 t1 = (boxMax - rayOrigin) * invDir;
@@ -43,6 +45,8 @@ float2 cubeFaceUv(float3 hitPoint, float halfExtent, float zoom)
float2 uv = float2(0.5, 0.5);
float safeZoom = max(zoom, 0.001);
// The dominant coordinate tells which face was hit; the other two axes
// become that face's local UVs.
if (face.x >= face.y && face.x >= face.z)
{
uv = hitPoint.x > 0.0
@@ -79,6 +83,8 @@ float4 shadeVideo(ShaderContext context)
float yaw = spin;
float pitch = spin * 0.61 + 0.35;
// Rotate the camera ray into cube-local space instead of rotating the cube
// geometry, which keeps the intersection math axis-aligned.
float3 localOrigin = rotateY(rotateX(rayOrigin, -pitch), -yaw);
float3 localDirection = rotateY(rotateX(rayDirection, -pitch), -yaw);
@@ -96,6 +102,8 @@ float4 shadeVideo(ShaderContext context)
float3 normal;
float3 face = abs(localHit);
// Reconstruct the face normal from the hit point so lighting follows the
// same face choice used for UV lookup.
if (face.x >= face.y && face.x >= face.z)
normal = float3(sign(localHit.x), 0.0, 0.0);
else if (face.y >= face.x && face.y >= face.z)

View File

@@ -0,0 +1,121 @@
{
"id": "video-plane-3d",
"name": "Video Plane 3D",
"description": "Places the video on a perspective 2D plane in 3D space with camera FOV, XYZ position, and pan/tilt/roll controls.",
"category": "Projection",
"entryPoint": "shadeVideo",
"parameters": [
{
"id": "fovDegrees",
"label": "FOV",
"type": "float",
"default": 45,
"min": 5,
"max": 150,
"step": 0.1,
"description": "Virtual camera vertical field of view in degrees."
},
{
"id": "positionX",
"label": "X",
"type": "float",
"default": 0,
"min": -4,
"max": 4,
"step": 0.01,
"description": "Horizontal plane position in world units."
},
{
"id": "positionY",
"label": "Y",
"type": "float",
"default": 0,
"min": -4,
"max": 4,
"step": 0.01,
"description": "Vertical plane position in world units."
},
{
"id": "positionZ",
"label": "Z",
"type": "float",
"default": 2.2,
"min": 0.1,
"max": 10,
"step": 0.01,
"description": "Depth of the plane in front of the virtual camera."
},
{
"id": "panDegrees",
"label": "Pan",
"type": "float",
"default": 0,
"min": -180,
"max": 180,
"step": 0.1,
"description": "Rotates the plane left/right around its vertical axis."
},
{
"id": "tiltDegrees",
"label": "Tilt",
"type": "float",
"default": 0,
"min": -120,
"max": 120,
"step": 0.1,
"description": "Rotates the plane up/down around its horizontal axis."
},
{
"id": "rollDegrees",
"label": "Roll",
"type": "float",
"default": 0,
"min": -180,
"max": 180,
"step": 0.1,
"description": "Rotates the plane around its face normal."
},
{
"id": "planeScale",
"label": "Plane Scale",
"type": "float",
"default": 1.4,
"min": 0.05,
"max": 6,
"step": 0.01,
"description": "Height of the video plane in world units; width follows the source aspect ratio."
},
{
"id": "edgeFeather",
"label": "Edge Feather",
"type": "float",
"default": 1.5,
"min": 0,
"max": 24,
"step": 0.1,
"description": "Softens the plane edge in source pixels."
},
{
"id": "backgroundMix",
"label": "Background Mix",
"type": "float",
"default": 0,
"min": 0,
"max": 1,
"step": 0.01,
"description": "Mixes the original video behind the projected plane."
},
{
"id": "outsideColor",
"label": "Outside Color",
"type": "color",
"default": [
0,
0,
0,
1
],
"description": "Color used where the camera ray misses the plane."
}
]
}

View File

@@ -0,0 +1,84 @@
static const float PI = 3.14159265358979323846;
float radiansFromDegrees(float degrees)
{
return degrees * (PI / 180.0);
}
float3 rotateX(float3 p, float angle)
{
float s = sin(angle);
float c = cos(angle);
return float3(p.x, c * p.y - s * p.z, s * p.y + c * p.z);
}
float3 rotateY(float3 p, float angle)
{
float s = sin(angle);
float c = cos(angle);
return float3(c * p.x + s * p.z, p.y, -s * p.x + c * p.z);
}
float3 rotateZ(float3 p, float angle)
{
float s = sin(angle);
float c = cos(angle);
return float3(c * p.x - s * p.y, s * p.x + c * p.y, p.z);
}
float3 rotateWorldToPlane(float3 value)
{
float pan = radiansFromDegrees(panDegrees);
float tilt = radiansFromDegrees(tiltDegrees);
float roll = radiansFromDegrees(rollDegrees);
return rotateZ(rotateX(rotateY(value, -pan), -tilt), -roll);
}
float planeEdgeMask(float2 uv, float2 inputResolution)
{
float2 feather = max(edgeFeather, 0.0) / max(inputResolution, float2(1.0, 1.0));
feather = max(feather, float2(0.00001, 0.00001));
float left = smoothstep(0.0, feather.x, uv.x);
float right = 1.0 - smoothstep(1.0 - feather.x, 1.0, uv.x);
float top = smoothstep(0.0, feather.y, uv.y);
float bottom = 1.0 - smoothstep(1.0 - feather.y, 1.0, uv.y);
return saturate(left * right * top * bottom);
}
float4 shadeVideo(ShaderContext context)
{
float2 outputResolution = max(context.outputResolution, float2(1.0, 1.0));
float outputAspect = outputResolution.x / outputResolution.y;
float sourceAspect = context.inputResolution.x / max(context.inputResolution.y, 1.0);
float tanHalfFov = tan(radiansFromDegrees(clamp(fovDegrees, 5.0, 150.0)) * 0.5);
float2 screen = float2(context.uv.x * 2.0 - 1.0, 1.0 - context.uv.y * 2.0);
float3 rayOrigin = float3(0.0, 0.0, 0.0);
float3 rayDirection = normalize(float3(screen.x * outputAspect * tanHalfFov, screen.y * tanHalfFov, 1.0));
float3 planePosition = float3(positionX, positionY, max(positionZ, 0.001));
float3 localOrigin = rotateWorldToPlane(rayOrigin - planePosition);
float3 localDirection = rotateWorldToPlane(rayDirection);
float backgroundAmount = saturate(backgroundMix);
float4 background = float4(lerp(outsideColor.rgb, context.sourceColor.rgb, backgroundAmount), 1.0);
if (abs(localDirection.z) < 0.00001)
return background;
float hitDistance = -localOrigin.z / localDirection.z;
if (hitDistance <= 0.0)
return background;
float3 localHit = localOrigin + localDirection * hitDistance;
float halfHeight = max(planeScale, 0.001) * 0.5;
float halfWidth = halfHeight * sourceAspect;
float2 planeUv = float2(
localHit.x / max(halfWidth * 2.0, 0.0001) + 0.5,
0.5 - localHit.y / max(halfHeight * 2.0, 0.0001)
);
float mask = planeEdgeMask(planeUv, max(context.inputResolution, float2(1.0, 1.0)));
float4 planeColor = sampleVideo(clamp(planeUv, 0.0, 1.0));
return saturate(lerp(background, planeColor, mask));
}

View File

@@ -47,6 +47,35 @@
"step": 0.1,
"description": "Rotates the source image around the frame center."
},
{
"id": "cropAspect",
"label": "Crop Aspect",
"type": "enum",
"default": "none",
"options": [
{
"value": "none",
"label": "None"
},
{
"value": "4x3",
"label": "4:3"
},
{
"value": "3x2",
"label": "3:2"
},
{
"value": "1x1",
"label": "1:1"
},
{
"value": "9x16",
"label": "9:16"
}
],
"description": "Crops the visible image to a centered preset aspect ratio without squeezing the source."
},
{
"id": "edgeMode",
"label": "Edge Mode",

View File

@@ -28,8 +28,42 @@ float2 applyEdgeMode(float2 uv, out bool inside)
return uv;
}
float selectedCropAspect()
{
if (cropAspect == 1)
return 4.0 / 3.0;
if (cropAspect == 2)
return 3.0 / 2.0;
if (cropAspect == 3)
return 1.0;
if (cropAspect == 4)
return 9.0 / 16.0;
return 0.0;
}
bool insideCropWindow(float2 uv, float2 resolution)
{
float targetAspect = selectedCropAspect();
if (targetAspect <= 0.0)
return true;
float outputAspect = resolution.x / max(resolution.y, 1.0);
float2 cropSize = float2(1.0, 1.0);
if (outputAspect > targetAspect)
cropSize.x = targetAspect / outputAspect;
else
cropSize.y = outputAspect / targetAspect;
float2 cropMin = (1.0 - cropSize) * 0.5;
float2 cropMax = cropMin + cropSize;
return uv.x >= cropMin.x && uv.x <= cropMax.x && uv.y >= cropMin.y && uv.y <= cropMax.y;
}
float4 shadeVideo(ShaderContext context)
{
if (!insideCropWindow(context.uv, max(context.outputResolution, float2(1.0, 1.0))))
return outsideColor;
float safeZoom = max(zoom, 0.001);
float2 sourceUv = (context.uv - 0.5) / safeZoom + 0.5;
sourceUv -= pan;

View File

@@ -18,6 +18,8 @@ float4 shadeVideo(ShaderContext context)
float resolutionAspect = max(context.outputResolution.x, 1.0) / max(context.outputResolution.y, 1.0);
float width = saturate(overlayScale);
float height = width * resolutionAspect / targetAspect;
// Keep the scope in a 16:9 frame, then shrink it if the requested scale
// would push the overlay beyond the screen bounds.
float fitScale = min(1.0 / max(width, 0.001), 1.0 / max(height, 0.001));
width *= min(fitScale, 1.0);
height *= min(fitScale, 1.0);
@@ -36,6 +38,8 @@ float4 shadeVideo(ShaderContext context)
float3 bg = lerp(color.rgb, float3(0.0, 0.0, 0.0), saturate(backgroundOpacity));
float labelHeight = min(max(pad.x * 0.95, 0.048), 0.12);
// Label textures are authored in UV space, so compensate for the overlay
// and output aspect ratios to keep the glyphs from stretching.
float labelWidth = labelHeight * height * max(context.outputResolution.y, 1.0) / max(width * max(context.outputResolution.x, 1.0), 0.001);
float labelX = max(pad.x * 0.5, labelWidth * 0.55);
float y0 = pad.y;
@@ -63,6 +67,8 @@ float4 shadeVideo(ShaderContext context)
float requestedSamples = clamp(waveformSamples, 1.0, 96.0);
float density = 0.0;
// For each output pixel, march through source rows at the same X coordinate
// and accumulate hits where sampled luma lands near this pixel's Y level.
for (int sampleIndex = 0; sampleIndex < 96; sampleIndex++)
{
float samplePosition = float(sampleIndex);

View File

@@ -1,5 +1,7 @@
float boxMask(float2 point, float2 halfSize, float feather)
{
// Signed-distance box mask gives the chart and border pixel-sized feathered
// edges without branching per side.
float2 distanceToEdge = abs(point) - halfSize;
float outsideDistance = length(max(distanceToEdge, float2(0.0, 0.0)));
float insideDistance = min(max(distanceToEdge.x, distanceToEdge.y), 0.0);
@@ -31,6 +33,8 @@ float applyToneCurve(float linearLevel)
float patchBrightness(int patchIndex, int count)
{
int clampedIndex = clamp(patchIndex, 0, max(count - 1, 0));
// Each patch is one stop brighter than the previous patch until it clips at
// the requested peak level, matching the Xyla-style exposure ramp.
float linearLevel = baseLevel * exp2(float(clampedIndex));
linearLevel = min(linearLevel, peakLevel);
return applyToneCurve(linearLevel);
@@ -60,6 +64,8 @@ float4 shadeVideo(ShaderContext context)
if (reverseOrder)
patchIndex = count - 1 - patchIndex;
// Build each patch as a slot along the main axis, then mask the cross-axis
// extents so vertical and horizontal charts share the same logic.
float patchSlotCenter = (floor(patchPosition) + 0.5) / float(count);
float localAxis = abs(normalizedAxis - patchSlotCenter) * float(count) * 2.0;
float safeGapSize = saturate(gapSize);

View File

@@ -0,0 +1,67 @@
#include "RuntimeHost.h"
#include <iostream>
namespace
{
int gFailures = 0;
void Expect(bool condition, const char* message)
{
if (condition)
return;
std::cerr << "FAIL: " << message << "\n";
++gFailures;
}
}
int main()
{
RuntimeHost runtimeHost;
std::string error;
Expect(runtimeHost.Initialize(error), "runtime host initializes");
Expect(error.empty(), "runtime host initialization does not report an error");
VideoIOState state;
state.backendId = VideoIOBackendId::DeckLink;
state.deviceName = "Test Device";
state.hasInputDevice = true;
state.hasInputSource = true;
state.inputDisplayModeName = "fake input";
state.outputDisplayModeName = "fake output";
state.capabilities.supportsInternalKeying = true;
state.capabilities.supportsExternalKeying = true;
state.capabilities.keyerInterfaceAvailable = true;
state.externalKeyingRequested = true;
state.externalKeyingActive = true;
state.statusMessage = "ready";
state.formatStatusMessage = "using fake formats";
runtimeHost.SetVideoIOStatus(state);
JsonValue root;
Expect(ParseJson(runtimeHost.BuildStateJson(), root, error), "runtime state json parses");
Expect(root.find("videoIO") != nullptr, "runtime state exposes videoIO");
Expect(root.find("decklink") == nullptr, "runtime state no longer exposes a decklink top-level block");
const JsonValue* app = root.find("app");
Expect(app != nullptr, "runtime state exposes app settings");
Expect(app != nullptr && app->find("videoBackend") != nullptr, "app settings expose videoBackend");
Expect(app != nullptr && app->find("videoBackend")->asString() == "decklink", "videoBackend serializes as decklink");
const JsonValue* videoIO = root.find("videoIO");
Expect(videoIO != nullptr && videoIO->find("backend") != nullptr, "videoIO exposes backend");
Expect(videoIO != nullptr && videoIO->find("backend")->asString() == "decklink", "videoIO backend serializes as decklink");
Expect(videoIO != nullptr && videoIO->find("deviceName") != nullptr, "videoIO exposes device name");
Expect(videoIO != nullptr && videoIO->find("deviceName")->asString() == "Test Device", "videoIO device name matches");
Expect(videoIO != nullptr && videoIO->find("capabilities") != nullptr, "videoIO exposes capabilities");
if (gFailures != 0)
{
std::cerr << gFailures << " RuntimeHost video I/O state test failure(s).\n";
return 1;
}
std::cout << "RuntimeHost video I/O state tests passed.\n";
return 0;
}

View File

@@ -0,0 +1,41 @@
#include "VideoIOBackendFactory.h"
#include "VideoIOTypes.h"
#include <iostream>
namespace
{
int gFailures = 0;
void Expect(bool condition, const char* message)
{
if (condition)
return;
std::cerr << "FAIL: " << message << "\n";
++gFailures;
}
}
int main()
{
std::string error;
std::unique_ptr<VideoIODevice> device = CreateVideoIODevice(VideoIOBackendId::DeckLink, error);
Expect(device != nullptr, "decklink backend factory returns a device");
Expect(!device || device->BackendId() == VideoIOBackendId::DeckLink, "decklink backend reports decklink id");
Expect(error.empty(), "supported backend does not produce an error");
error.clear();
device = CreateVideoIODevice(static_cast<VideoIOBackendId>(999), error);
Expect(device == nullptr, "unknown backend id is rejected");
Expect(!error.empty(), "unknown backend reports an error");
if (gFailures != 0)
{
std::cerr << gFailures << " VideoIO backend factory test failure(s).\n";
return 1;
}
std::cout << "VideoIO backend factory tests passed.\n";
return 0;
}

View File

@@ -19,20 +19,26 @@ void Expect(bool condition, const char* message)
class FakeVideoIODevice : public VideoIODevice
{
public:
VideoIOBackendId BackendId() const override { return VideoIOBackendId::DeckLink; }
void ReleaseResources() override {}
bool DiscoverDevicesAndModes(const VideoFormatSelection&, std::string&) override
bool DiscoverDevicesAndModes(const VideoIOConfiguration&, std::string&) override
{
mState.backendId = BackendId();
mState.inputFrameSize = { 1920, 1080 };
mState.outputFrameSize = { 1920, 1080 };
mState.inputDisplayModeName = "fake 1080p";
mState.outputModelName = "Fake Video IO";
mState.outputDisplayModeName = "fake 1080p";
mState.deviceName = "Fake Video IO";
mState.capabilities.supportsInternalKeying = true;
mState.capabilities.supportsExternalKeying = true;
mState.hasInputDevice = true;
return true;
}
bool SelectPreferredFormats(const VideoFormatSelection&, bool, std::string&) override
bool SelectPreferredFormats(const VideoIOConfiguration& config, std::string&) override
{
mState.externalKeyingRequested = config.externalKeyingEnabled;
mState.inputPixelFormat = VideoIOPixelFormat::Uyvy8;
mState.outputPixelFormat = VideoIOPixelFormat::Bgra8;
mState.inputFrameRowBytes = VideoIORowBytes(mState.inputPixelFormat, mState.inputFrameSize.width);
@@ -42,13 +48,13 @@ public:
return true;
}
bool ConfigureInput(InputFrameCallback callback, const VideoFormat&, std::string&) override
bool ConfigureInput(InputFrameCallback callback, std::string&) override
{
mInputCallback = callback;
return true;
}
bool ConfigureOutput(OutputFrameCallback callback, const VideoFormat&, bool, std::string&) override
bool ConfigureOutput(OutputFrameCallback callback, std::string&) override
{
mOutputCallback = callback;
return true;
@@ -114,19 +120,19 @@ private:
int main()
{
FakeVideoIODevice device;
VideoFormatSelection selection;
VideoIOConfiguration config;
std::string error;
bool inputSeen = false;
bool outputSeen = false;
Expect(device.DiscoverDevicesAndModes(selection, error), "fake discovery succeeds");
Expect(device.SelectPreferredFormats(selection, false, error), "fake format selection succeeds");
Expect(device.DiscoverDevicesAndModes(config, error), "fake discovery succeeds");
Expect(device.SelectPreferredFormats(config, error), "fake format selection succeeds");
Expect(device.ConfigureInput([&](const VideoIOFrame& frame) {
inputSeen = frame.bytes != nullptr && frame.width == 1920 && frame.pixelFormat == VideoIOPixelFormat::Uyvy8;
}, selection.input, error), "fake input config succeeds");
}, error), "fake input config succeeds");
Expect(device.ConfigureOutput([&](const VideoIOCompletion& completion) {
outputSeen = completion.result == VideoIOCompletionResult::Completed;
}, selection.output, false, error), "fake output config succeeds");
}, error), "fake output config succeeds");
Expect(device.Start(), "fake device starts");
VideoIOOutputFrame outputFrame;