This commit is contained in:
2026-05-02 15:18:14 +10:00
parent a76d37c2e8
commit ac88520f85
27 changed files with 0 additions and 2533 deletions

19
.vscode/launch.json vendored
View File

@@ -1,19 +0,0 @@
{
// Use IntelliSense to learn about possible attributes.
// Hover to view descriptions of existing attributes.
// For more information, visit: https://go.microsoft.com/fwlink/?linkid=830387
"version": "0.2.0",
"configurations": [
{
"name": "(Windows) Launch",
"type": "cppvsdbg",
"request": "launch",
"program": "${workspaceFolder}/build/windows-debug/Debug/video-shader.exe",
"args": [],
"stopAtEntry": false,
"cwd": "${fileDirname}",
"environment": [],
"console": "internalConsole"
}
]
}

View File

@@ -1,101 +0,0 @@
cmake_minimum_required(VERSION 3.24)
project(video-shader VERSION 0.1.0 LANGUAGES CXX C)
set(CMAKE_CXX_STANDARD 20)
set(CMAKE_CXX_STANDARD_REQUIRED ON)
set(CMAKE_CXX_EXTENSIONS OFF)
if (NOT WIN32)
message(FATAL_ERROR "Milestone 1 targets Windows x64 because it uses DeckLink COM and Direct3D 11.")
endif()
set(DECKLINK_SDK_DIR "${CMAKE_CURRENT_SOURCE_DIR}/3rdParty/Blackmagic DeckLink SDK 16.0")
set(DECKLINK_IDL "${DECKLINK_SDK_DIR}/Win/include/DeckLinkAPI.idl")
if (NOT EXISTS "${DECKLINK_IDL}")
message(FATAL_ERROR "DeckLink SDK not found at ${DECKLINK_SDK_DIR}")
endif()
set(WINDOWS_KITS_10_DIR "$ENV{ProgramFiles\(x86\)}/Windows Kits/10")
file(GLOB WINDOWS_SDK_MIDL_DIRS
"${WINDOWS_KITS_10_DIR}/bin/*/x64"
"${WINDOWS_KITS_10_DIR}/bin/*/arm64"
)
list(SORT WINDOWS_SDK_MIDL_DIRS)
list(REVERSE WINDOWS_SDK_MIDL_DIRS)
find_program(MIDL_EXECUTABLE
NAMES midl midl.exe
HINTS ${WINDOWS_SDK_MIDL_DIRS}
DOC "Microsoft Interface Definition Language compiler"
REQUIRED
)
set(GENERATED_DIR "${CMAKE_CURRENT_BINARY_DIR}/generated")
set(CONFIGURED_DIR "${CMAKE_CURRENT_BINARY_DIR}/configured")
set(DECKLINK_GENERATED_HEADER "${GENERATED_DIR}/DeckLinkAPI_h.h")
set(DECKLINK_GENERATED_IID "${GENERATED_DIR}/DeckLinkAPI_i.c")
add_custom_command(
OUTPUT "${DECKLINK_GENERATED_HEADER}" "${DECKLINK_GENERATED_IID}"
COMMAND "${CMAKE_COMMAND}" -E make_directory "${GENERATED_DIR}"
COMMAND "${MIDL_EXECUTABLE}"
/nologo
/char signed
/env x64
/h DeckLinkAPI_h.h
/iid DeckLinkAPI_i.c
/proxy DeckLinkAPI_p.c
/tlb DeckLinkAPI.tlb
/out "${GENERATED_DIR}"
/I "${DECKLINK_SDK_DIR}/Win/include"
"${DECKLINK_IDL}"
DEPENDS "${DECKLINK_IDL}"
VERBATIM
)
add_library(decklink_api_iids STATIC "${DECKLINK_GENERATED_IID}")
target_include_directories(decklink_api_iids PUBLIC "${GENERATED_DIR}")
set(APP_CONTENT_DIR "${CMAKE_CURRENT_SOURCE_DIR}/web")
set(APP_SHADER_DIR "${CMAKE_CURRENT_SOURCE_DIR}/shaders")
configure_file(src/AppConfig.h.in "${CONFIGURED_DIR}/AppConfig.h" @ONLY)
add_executable(video-shader
src/main.cpp
src/AppState.cpp
src/DeckLinkPipeline.cpp
src/D3DProcessor.cpp
src/FileUtil.cpp
src/JsonUtil.cpp
src/ShaderLibrary.cpp
src/WebServer.cpp
)
target_include_directories(video-shader PRIVATE
"${CMAKE_CURRENT_SOURCE_DIR}/src"
"${GENERATED_DIR}"
"${CONFIGURED_DIR}"
)
target_compile_definitions(video-shader PRIVATE
NOMINMAX
WIN32_LEAN_AND_MEAN
)
target_link_libraries(video-shader PRIVATE
decklink_api_iids
d3d11
dxgi
d3dcompiler
ws2_32
bcrypt
ole32
oleaut32
)
add_custom_command(TARGET video-shader POST_BUILD
COMMAND "${CMAKE_COMMAND}" -E copy_directory "${CMAKE_CURRENT_SOURCE_DIR}/web" "$<TARGET_FILE_DIR:video-shader>/web"
COMMAND "${CMAKE_COMMAND}" -E copy_directory "${CMAKE_CURRENT_SOURCE_DIR}/shaders" "$<TARGET_FILE_DIR:video-shader>/shaders"
)

View File

@@ -1,22 +0,0 @@
{
"version": 6,
"configurePresets": [
{
"name": "windows-debug",
"displayName": "Windows Debug",
"generator": "Visual Studio 17 2022",
"architecture": "x64",
"binaryDir": "${sourceDir}/build/windows-debug",
"cacheVariables": {
"CMAKE_BUILD_TYPE": "Debug"
}
}
],
"buildPresets": [
{
"name": "windows-debug",
"configurePreset": "windows-debug",
"configuration": "Debug"
}
]
}

View File

@@ -1,25 +0,0 @@
# video-shader
Milestone 1 is a Windows x64 native service that captures DeckLink video, processes frames as `RGBA16f` through a D3D11 HLSL shader, and outputs the processed image on the same DeckLink device.
## Build
```powershell
cmake --preset windows-debug
cmake --build --preset windows-debug
```
Run:
```powershell
.\build\windows-debug\Debug\video-shader.exe
```
Open `http://127.0.0.1:8080`.
## Notes
- The DeckLink SDK must remain at `3rdParty/Blackmagic DeckLink SDK 16.0`.
- Milestone 1 captures video only. Audio is intentionally not routed yet.
- The processing boundary is D3D11 `DXGI_FORMAT_R16G16B16A16_FLOAT`.
- The current implementation uses an isolated CPU DeckLink bridge around the D3D11 processing core. The GPU Direct/D3D11 interop point is kept behind the frame bridge boundary for a later allocator-backed implementation.

View File

@@ -1,53 +0,0 @@
struct VSOut
{
float4 position : SV_Position;
float2 uv : TEXCOORD0;
};
cbuffer FrameConstants : register(b0)
{
float width;
float height;
float frameIndex;
float frameReserved;
};
cbuffer ShaderParameters : register(b1)
{
float amount;
float3 parameterPadding;
};
Texture2D<float4> inputFrame : register(t0);
SamplerState linearClampSampler : register(s0);
VSOut fullscreen_vs(uint vertexId : SV_VertexID)
{
float2 positions[3] =
{
float2(-1.0, -1.0),
float2(-1.0, 3.0),
float2( 3.0, -1.0)
};
float2 uvs[3] =
{
float2(0.0, 1.0),
float2(0.0, -1.0),
float2(2.0, 1.0)
};
VSOut output;
output.position = float4(positions[vertexId], 0.0, 1.0);
output.uv = uvs[vertexId];
return output;
}
float4 main(VSOut input) : SV_Target
{
float4 color = inputFrame.Sample(linearClampSampler, input.uv);
float luminance = dot(color.rgb, float3(0.2126, 0.7152, 0.0722));
float3 gray = luminance.xxx;
color.rgb = lerp(color.rgb, gray, saturate(amount));
return color;
}

View File

@@ -1,17 +0,0 @@
{
"id": "builtin.black_white",
"name": "Black & White",
"version": 1,
"entry": "main",
"type": "fullscreen_pixel",
"parameters": [
{
"id": "amount",
"label": "Amount",
"type": "float",
"default": 1.0,
"min": 0.0,
"max": 1.0
}
]
}

View File

@@ -1,4 +0,0 @@
#pragma once
#define APP_CONTENT_DIR "@APP_CONTENT_DIR@"
#define APP_SHADER_DIR "@APP_SHADER_DIR@"

View File

@@ -1,130 +0,0 @@
#include "AppState.h"
#include "JsonUtil.h"
#include <algorithm>
#include <sstream>
PipelineStatus AppState::status() const
{
std::scoped_lock lock(m_mutex);
return m_status;
}
void AppState::updateStatus(const PipelineStatus& status)
{
std::scoped_lock lock(m_mutex);
m_status = status;
}
void AppState::setError(std::string error)
{
std::scoped_lock lock(m_mutex);
m_status.error = std::move(error);
}
void AppState::clearError()
{
std::scoped_lock lock(m_mutex);
m_status.error.clear();
}
void AppState::setShaders(std::vector<ShaderInfo> shaders)
{
std::scoped_lock lock(m_mutex);
m_shaders = std::move(shaders);
}
std::vector<ShaderInfo> AppState::shaders() const
{
std::scoped_lock lock(m_mutex);
return m_shaders;
}
bool AppState::setShaderParameter(const std::string& shaderId, const std::string& parameterId, float value)
{
std::scoped_lock lock(m_mutex);
for (auto& shader : m_shaders)
{
if (shader.id != shaderId)
continue;
for (auto& parameter : shader.parameters)
{
if (parameter.id == parameterId)
{
parameter.value = std::clamp(value, parameter.minValue, parameter.maxValue);
return true;
}
}
}
return false;
}
float AppState::shaderAmount(const std::string& shaderId, const std::string& parameterId, float fallback) const
{
std::scoped_lock lock(m_mutex);
for (const auto& shader : m_shaders)
if (shader.id == shaderId)
for (const auto& parameter : shader.parameters)
if (parameter.id == parameterId)
return parameter.value;
return fallback;
}
std::string AppState::statusJson() const
{
std::scoped_lock lock(m_mutex);
std::ostringstream json;
json << "{"
<< "\"running\":" << (m_status.running ? "true" : "false") << ","
<< "\"deckLinkAvailable\":" << (m_status.deckLinkAvailable ? "true" : "false") << ","
<< "\"deviceName\":\"" << jsonEscape(m_status.deviceName) << "\","
<< "\"mode\":\"" << jsonEscape(m_status.mode) << "\","
<< "\"outputFormat\":\"" << jsonEscape(m_status.outputFormat) << "\","
<< "\"error\":\"" << jsonEscape(m_status.error) << "\","
<< "\"framesCaptured\":" << m_status.framesCaptured << ","
<< "\"framesOutput\":" << m_status.framesOutput << ","
<< "\"framesDropped\":" << m_status.framesDropped << ","
<< "\"frameRate\":" << m_status.frameRate
<< "}";
return json.str();
}
std::string AppState::shadersJson() const
{
std::scoped_lock lock(m_mutex);
std::ostringstream json;
json << "{\"shaders\":[";
for (size_t i = 0; i < m_shaders.size(); ++i)
{
const auto& shader = m_shaders[i];
if (i != 0)
json << ",";
json << "{"
<< "\"id\":\"" << jsonEscape(shader.id) << "\","
<< "\"name\":\"" << jsonEscape(shader.name) << "\","
<< "\"version\":" << shader.version << ","
<< "\"type\":\"" << jsonEscape(shader.type) << "\","
<< "\"entry\":\"" << jsonEscape(shader.entry) << "\","
<< "\"parameters\":[";
for (size_t p = 0; p < shader.parameters.size(); ++p)
{
const auto& parameter = shader.parameters[p];
if (p != 0)
json << ",";
json << "{"
<< "\"id\":\"" << jsonEscape(parameter.id) << "\","
<< "\"label\":\"" << jsonEscape(parameter.label) << "\","
<< "\"type\":\"float\","
<< "\"value\":" << parameter.value << ","
<< "\"default\":" << parameter.defaultValue << ","
<< "\"min\":" << parameter.minValue << ","
<< "\"max\":" << parameter.maxValue
<< "}";
}
json << "]}";
}
json << "]}";
return json.str();
}

View File

@@ -1,61 +0,0 @@
#pragma once
#include <cstdint>
#include <mutex>
#include <string>
#include <vector>
struct ShaderParameter
{
std::string id;
std::string label;
float value = 0.0f;
float defaultValue = 0.0f;
float minValue = 0.0f;
float maxValue = 1.0f;
};
struct ShaderInfo
{
std::string id;
std::string name;
int version = 1;
std::string type;
std::string entry;
std::string hlslPath;
std::vector<ShaderParameter> parameters;
};
struct PipelineStatus
{
bool running = false;
bool deckLinkAvailable = false;
std::string deviceName = "No DeckLink device selected";
std::string mode = "No signal";
std::string outputFormat = "Unavailable";
std::string error;
uint64_t framesCaptured = 0;
uint64_t framesOutput = 0;
uint64_t framesDropped = 0;
double frameRate = 0.0;
};
class AppState
{
public:
PipelineStatus status() const;
void updateStatus(const PipelineStatus& status);
void setError(std::string error);
void clearError();
void setShaders(std::vector<ShaderInfo> shaders);
std::vector<ShaderInfo> shaders() const;
bool setShaderParameter(const std::string& shaderId, const std::string& parameterId, float value);
float shaderAmount(const std::string& shaderId, const std::string& parameterId, float fallback) const;
std::string statusJson() const;
std::string shadersJson() const;
private:
mutable std::mutex m_mutex;
PipelineStatus m_status;
std::vector<ShaderInfo> m_shaders;
};

View File

@@ -1,396 +0,0 @@
#include "D3DProcessor.h"
#include <DirectXPackedVector.h>
#include <d3dcompiler.h>
#include <algorithm>
#include <array>
#include <cstring>
#include <sstream>
namespace
{
std::string hresultMessage(const char* action, HRESULT hr)
{
std::ostringstream message;
message << action << " failed: 0x" << std::hex << static_cast<unsigned long>(hr);
return message.str();
}
bool frameBytes(IDeckLinkVideoFrame* frame, BMDBufferAccessFlags access, Microsoft::WRL::ComPtr<IDeckLinkVideoBuffer>& buffer, void** bytes, std::string& error)
{
if (frame == nullptr)
{
error = "DeckLink frame is null";
return false;
}
HRESULT hr = frame->QueryInterface(IID_IDeckLinkVideoBuffer, reinterpret_cast<void**>(buffer.GetAddressOf()));
if (FAILED(hr) || !buffer)
{
error = hresultMessage("QueryInterface IDeckLinkVideoBuffer", hr);
return false;
}
hr = buffer->StartAccess(access);
if (FAILED(hr))
{
error = hresultMessage("IDeckLinkVideoBuffer::StartAccess", hr);
return false;
}
hr = buffer->GetBytes(bytes);
if (FAILED(hr) || *bytes == nullptr)
{
buffer->EndAccess(access);
error = hresultMessage("IDeckLinkVideoBuffer::GetBytes", hr);
return false;
}
return true;
}
bool compileShader(const std::wstring& path, const char* entry, const char* target, ID3DBlob** blob, std::string& error)
{
Microsoft::WRL::ComPtr<ID3DBlob> errors;
const UINT flags =
#if defined(_DEBUG)
D3DCOMPILE_DEBUG | D3DCOMPILE_SKIP_OPTIMIZATION;
#else
D3DCOMPILE_OPTIMIZATION_LEVEL3;
#endif
const HRESULT hr = D3DCompileFromFile(path.c_str(), nullptr, D3D_COMPILE_STANDARD_FILE_INCLUDE, entry, target, flags, 0, blob, errors.GetAddressOf());
if (FAILED(hr))
{
if (errors)
error.assign(static_cast<const char*>(errors->GetBufferPointer()), errors->GetBufferSize());
else
error = hresultMessage("D3DCompileFromFile", hr);
return false;
}
return true;
}
float srgbByteToFloat(uint8_t value)
{
return static_cast<float>(value) / 255.0f;
}
void writePacked8(void* dst, const float* rgba, int width, int height, int rowBytes, BMDPixelFormat pixelFormat)
{
for (int y = 0; y < height; ++y)
{
auto* row = static_cast<uint8_t*>(dst) + y * rowBytes;
for (int x = 0; x < width; ++x)
{
const float* px = rgba + (static_cast<size_t>(y) * width + x) * 4;
const uint8_t r = static_cast<uint8_t>(std::clamp(px[0], 0.0f, 1.0f) * 255.0f + 0.5f);
const uint8_t g = static_cast<uint8_t>(std::clamp(px[1], 0.0f, 1.0f) * 255.0f + 0.5f);
const uint8_t b = static_cast<uint8_t>(std::clamp(px[2], 0.0f, 1.0f) * 255.0f + 0.5f);
if (pixelFormat == bmdFormat8BitARGB)
{
row[x * 4 + 0] = 255;
row[x * 4 + 1] = r;
row[x * 4 + 2] = g;
row[x * 4 + 3] = b;
}
else
{
row[x * 4 + 0] = b;
row[x * 4 + 1] = g;
row[x * 4 + 2] = r;
row[x * 4 + 3] = 255;
}
}
}
}
}
bool D3DProcessor::initialize(const ShaderInfo& shader, std::string& error)
{
std::scoped_lock lock(m_mutex);
m_shader = shader;
const D3D_FEATURE_LEVEL requestedLevels[] = { D3D_FEATURE_LEVEL_11_1, D3D_FEATURE_LEVEL_11_0 };
D3D_FEATURE_LEVEL actualLevel = D3D_FEATURE_LEVEL_11_0;
UINT flags = D3D11_CREATE_DEVICE_BGRA_SUPPORT;
#if defined(_DEBUG)
flags |= D3D11_CREATE_DEVICE_DEBUG;
#endif
HRESULT hr = D3D11CreateDevice(nullptr, D3D_DRIVER_TYPE_HARDWARE, nullptr, flags, requestedLevels, 2, D3D11_SDK_VERSION, &m_device, &actualLevel, &m_context);
if (FAILED(hr))
{
#if defined(_DEBUG)
flags &= ~D3D11_CREATE_DEVICE_DEBUG;
hr = D3D11CreateDevice(nullptr, D3D_DRIVER_TYPE_HARDWARE, nullptr, flags, requestedLevels, 2, D3D11_SDK_VERSION, &m_device, &actualLevel, &m_context);
#endif
if (FAILED(hr))
{
error = hresultMessage("D3D11CreateDevice", hr);
return false;
}
}
Microsoft::WRL::ComPtr<ID3DBlob> vsBlob;
if (!compileShader(std::filesystem::path(shader.hlslPath).wstring(), "fullscreen_vs", "vs_5_0", vsBlob.GetAddressOf(), error))
return false;
Microsoft::WRL::ComPtr<ID3DBlob> psBlob;
if (!compileShader(std::filesystem::path(shader.hlslPath).wstring(), shader.entry.c_str(), "ps_5_0", psBlob.GetAddressOf(), error))
return false;
hr = m_device->CreateVertexShader(vsBlob->GetBufferPointer(), vsBlob->GetBufferSize(), nullptr, &m_vertexShader);
if (FAILED(hr))
{
error = hresultMessage("CreateVertexShader", hr);
return false;
}
hr = m_device->CreatePixelShader(psBlob->GetBufferPointer(), psBlob->GetBufferSize(), nullptr, &m_pixelShader);
if (FAILED(hr))
{
error = hresultMessage("CreatePixelShader", hr);
return false;
}
D3D11_SAMPLER_DESC samplerDesc = {};
samplerDesc.Filter = D3D11_FILTER_MIN_MAG_MIP_LINEAR;
samplerDesc.AddressU = D3D11_TEXTURE_ADDRESS_CLAMP;
samplerDesc.AddressV = D3D11_TEXTURE_ADDRESS_CLAMP;
samplerDesc.AddressW = D3D11_TEXTURE_ADDRESS_CLAMP;
samplerDesc.MaxLOD = D3D11_FLOAT32_MAX;
hr = m_device->CreateSamplerState(&samplerDesc, &m_sampler);
if (FAILED(hr))
{
error = hresultMessage("CreateSamplerState", hr);
return false;
}
D3D11_BUFFER_DESC cbDesc = {};
cbDesc.ByteWidth = sizeof(FrameConstants);
cbDesc.Usage = D3D11_USAGE_DEFAULT;
cbDesc.BindFlags = D3D11_BIND_CONSTANT_BUFFER;
hr = m_device->CreateBuffer(&cbDesc, nullptr, &m_frameConstants);
if (FAILED(hr))
{
error = hresultMessage("CreateBuffer frame constants", hr);
return false;
}
cbDesc.ByteWidth = sizeof(ParameterConstants);
hr = m_device->CreateBuffer(&cbDesc, nullptr, &m_parameterConstants);
if (FAILED(hr))
{
error = hresultMessage("CreateBuffer parameter constants", hr);
return false;
}
return true;
}
bool D3DProcessor::processFrame(IDeckLinkVideoFrame* inputFrame, float amount, BMDPixelFormat outputFormat, IDeckLinkMutableVideoFrame* outputFrame, std::string& error)
{
std::scoped_lock lock(m_mutex);
const int width = static_cast<int>(inputFrame->GetWidth());
const int height = static_cast<int>(inputFrame->GetHeight());
if (!ensureResources(width, height, error))
return false;
if (!uploadInput(inputFrame, error))
return false;
if (!render(amount, error))
return false;
return copyToOutput(outputFormat, outputFrame, error);
}
bool D3DProcessor::ensureResources(int width, int height, std::string& error)
{
if (width == m_width && height == m_height)
return true;
m_inputSrv.Reset();
m_outputRtv.Reset();
m_inputTexture.Reset();
m_outputTexture.Reset();
m_readbackTexture.Reset();
D3D11_TEXTURE2D_DESC desc = {};
desc.Width = static_cast<UINT>(width);
desc.Height = static_cast<UINT>(height);
desc.MipLevels = 1;
desc.ArraySize = 1;
desc.Format = DXGI_FORMAT_R16G16B16A16_FLOAT;
desc.SampleDesc.Count = 1;
desc.Usage = D3D11_USAGE_DEFAULT;
desc.BindFlags = D3D11_BIND_SHADER_RESOURCE;
HRESULT hr = m_device->CreateTexture2D(&desc, nullptr, &m_inputTexture);
if (FAILED(hr))
{
error = hresultMessage("CreateTexture2D input", hr);
return false;
}
hr = m_device->CreateShaderResourceView(m_inputTexture.Get(), nullptr, &m_inputSrv);
if (FAILED(hr))
{
error = hresultMessage("CreateShaderResourceView input", hr);
return false;
}
desc.BindFlags = D3D11_BIND_RENDER_TARGET | D3D11_BIND_SHADER_RESOURCE;
hr = m_device->CreateTexture2D(&desc, nullptr, &m_outputTexture);
if (FAILED(hr))
{
error = hresultMessage("CreateTexture2D output", hr);
return false;
}
hr = m_device->CreateRenderTargetView(m_outputTexture.Get(), nullptr, &m_outputRtv);
if (FAILED(hr))
{
error = hresultMessage("CreateRenderTargetView output", hr);
return false;
}
desc.Usage = D3D11_USAGE_STAGING;
desc.BindFlags = 0;
desc.CPUAccessFlags = D3D11_CPU_ACCESS_READ;
hr = m_device->CreateTexture2D(&desc, nullptr, &m_readbackTexture);
if (FAILED(hr))
{
error = hresultMessage("CreateTexture2D readback", hr);
return false;
}
m_width = width;
m_height = height;
return true;
}
bool D3DProcessor::uploadInput(IDeckLinkVideoFrame* inputFrame, std::string& error)
{
void* bytes = nullptr;
Microsoft::WRL::ComPtr<IDeckLinkVideoBuffer> inputBuffer;
if (!frameBytes(inputFrame, bmdBufferAccessRead, inputBuffer, &bytes, error))
return false;
const int rowBytes = static_cast<int>(inputFrame->GetRowBytes());
const BMDPixelFormat pixelFormat = inputFrame->GetPixelFormat();
std::vector<uint16_t> rgba16f(static_cast<size_t>(m_width) * m_height * 4);
if (pixelFormat == bmdFormat8BitBGRA || pixelFormat == bmdFormat8BitARGB)
{
for (int y = 0; y < m_height; ++y)
{
const auto* row = static_cast<const uint8_t*>(bytes) + y * rowBytes;
for (int x = 0; x < m_width; ++x)
{
const uint8_t b0 = row[x * 4 + 0];
const uint8_t b1 = row[x * 4 + 1];
const uint8_t b2 = row[x * 4 + 2];
const uint8_t b3 = row[x * 4 + 3];
const float r = pixelFormat == bmdFormat8BitBGRA ? srgbByteToFloat(b2) : srgbByteToFloat(b1);
const float g = pixelFormat == bmdFormat8BitBGRA ? srgbByteToFloat(b1) : srgbByteToFloat(b2);
const float b = pixelFormat == bmdFormat8BitBGRA ? srgbByteToFloat(b0) : srgbByteToFloat(b3);
const float a = pixelFormat == bmdFormat8BitBGRA ? srgbByteToFloat(b3) : srgbByteToFloat(b0);
const size_t out = (static_cast<size_t>(y) * m_width + x) * 4;
rgba16f[out + 0] = DirectX::PackedVector::XMConvertFloatToHalf(r);
rgba16f[out + 1] = DirectX::PackedVector::XMConvertFloatToHalf(g);
rgba16f[out + 2] = DirectX::PackedVector::XMConvertFloatToHalf(b);
rgba16f[out + 3] = DirectX::PackedVector::XMConvertFloatToHalf(a);
}
}
}
else
{
// Milestone fallback for YUV/10-bit modes: copy luma-ish bytes into gray until GPU Direct format converters land.
for (int y = 0; y < m_height; ++y)
{
const auto* row = static_cast<const uint8_t*>(bytes) + y * rowBytes;
for (int x = 0; x < m_width; ++x)
{
const float v = srgbByteToFloat(row[std::min(x * 2, rowBytes - 1)]);
const size_t out = (static_cast<size_t>(y) * m_width + x) * 4;
rgba16f[out + 0] = DirectX::PackedVector::XMConvertFloatToHalf(v);
rgba16f[out + 1] = DirectX::PackedVector::XMConvertFloatToHalf(v);
rgba16f[out + 2] = DirectX::PackedVector::XMConvertFloatToHalf(v);
rgba16f[out + 3] = DirectX::PackedVector::XMConvertFloatToHalf(1.0f);
}
}
}
inputBuffer->EndAccess(bmdBufferAccessRead);
m_context->UpdateSubresource(m_inputTexture.Get(), 0, nullptr, rgba16f.data(), static_cast<UINT>(m_width * sizeof(uint16_t) * 4), 0);
return true;
}
bool D3DProcessor::render(float amount, std::string&)
{
const FrameConstants frameConstants{ static_cast<float>(m_width), static_cast<float>(m_height), static_cast<float>(m_frameIndex++), 0.0f };
const ParameterConstants parameterConstants{ amount };
m_context->UpdateSubresource(m_frameConstants.Get(), 0, nullptr, &frameConstants, 0, 0);
m_context->UpdateSubresource(m_parameterConstants.Get(), 0, nullptr, &parameterConstants, 0, 0);
const D3D11_VIEWPORT viewport{ 0.0f, 0.0f, static_cast<float>(m_width), static_cast<float>(m_height), 0.0f, 1.0f };
m_context->RSSetViewports(1, &viewport);
ID3D11RenderTargetView* rtv = m_outputRtv.Get();
m_context->OMSetRenderTargets(1, &rtv, nullptr);
m_context->IASetPrimitiveTopology(D3D11_PRIMITIVE_TOPOLOGY_TRIANGLELIST);
m_context->VSSetShader(m_vertexShader.Get(), nullptr, 0);
m_context->PSSetShader(m_pixelShader.Get(), nullptr, 0);
ID3D11ShaderResourceView* srv = m_inputSrv.Get();
ID3D11SamplerState* sampler = m_sampler.Get();
ID3D11Buffer* frame = m_frameConstants.Get();
ID3D11Buffer* parameters = m_parameterConstants.Get();
m_context->PSSetShaderResources(0, 1, &srv);
m_context->PSSetSamplers(0, 1, &sampler);
m_context->PSSetConstantBuffers(0, 1, &frame);
m_context->PSSetConstantBuffers(1, 1, &parameters);
m_context->Draw(3, 0);
ID3D11ShaderResourceView* nullSrv = nullptr;
m_context->PSSetShaderResources(0, 1, &nullSrv);
return true;
}
bool D3DProcessor::copyToOutput(BMDPixelFormat outputFormat, IDeckLinkMutableVideoFrame* outputFrame, std::string& error)
{
if (outputFormat != bmdFormat8BitBGRA && outputFormat != bmdFormat8BitARGB)
{
error = "Unsupported output pixel format selected";
return false;
}
m_context->CopyResource(m_readbackTexture.Get(), m_outputTexture.Get());
D3D11_MAPPED_SUBRESOURCE mapped = {};
HRESULT hr = m_context->Map(m_readbackTexture.Get(), 0, D3D11_MAP_READ, 0, &mapped);
if (FAILED(hr))
{
error = hresultMessage("Map readback texture", hr);
return false;
}
void* outBytes = nullptr;
Microsoft::WRL::ComPtr<IDeckLinkVideoBuffer> outputBuffer;
if (!frameBytes(outputFrame, bmdBufferAccessWrite, outputBuffer, &outBytes, error))
{
m_context->Unmap(m_readbackTexture.Get(), 0);
return false;
}
if (outputFormat == bmdFormat8BitBGRA || outputFormat == bmdFormat8BitARGB)
{
std::vector<float> rgba(static_cast<size_t>(m_width) * m_height * 4);
for (int y = 0; y < m_height; ++y)
{
const auto* src = reinterpret_cast<const uint16_t*>(static_cast<const uint8_t*>(mapped.pData) + y * mapped.RowPitch);
for (int x = 0; x < m_width * 4; ++x)
rgba[static_cast<size_t>(y) * m_width * 4 + x] = DirectX::PackedVector::XMConvertHalfToFloat(src[x]);
}
writePacked8(outBytes, rgba.data(), m_width, m_height, static_cast<int>(outputFrame->GetRowBytes()), outputFormat);
}
outputBuffer->EndAccess(bmdBufferAccessWrite);
m_context->Unmap(m_readbackTexture.Get(), 0);
return true;
}

View File

@@ -1,58 +0,0 @@
#pragma once
#include "AppState.h"
#include "DeckLinkApi.h"
#include <d3d11.h>
#include <wrl/client.h>
#include <cstdint>
#include <filesystem>
#include <mutex>
#include <vector>
class D3DProcessor
{
public:
bool initialize(const ShaderInfo& shader, std::string& error);
bool processFrame(IDeckLinkVideoFrame* inputFrame, float amount, BMDPixelFormat outputFormat, IDeckLinkMutableVideoFrame* outputFrame, std::string& error);
private:
struct FrameConstants
{
float width = 0.0f;
float height = 0.0f;
float frameIndex = 0.0f;
float reserved = 0.0f;
};
struct ParameterConstants
{
float amount = 1.0f;
float padding[3] = {};
};
bool ensureResources(int width, int height, std::string& error);
bool uploadInput(IDeckLinkVideoFrame* inputFrame, std::string& error);
bool render(float amount, std::string& error);
bool copyToOutput(BMDPixelFormat outputFormat, IDeckLinkMutableVideoFrame* outputFrame, std::string& error);
std::mutex m_mutex;
uint64_t m_frameIndex = 0;
int m_width = 0;
int m_height = 0;
ShaderInfo m_shader;
Microsoft::WRL::ComPtr<ID3D11Device> m_device;
Microsoft::WRL::ComPtr<ID3D11DeviceContext> m_context;
Microsoft::WRL::ComPtr<ID3D11VertexShader> m_vertexShader;
Microsoft::WRL::ComPtr<ID3D11PixelShader> m_pixelShader;
Microsoft::WRL::ComPtr<ID3D11SamplerState> m_sampler;
Microsoft::WRL::ComPtr<ID3D11Buffer> m_frameConstants;
Microsoft::WRL::ComPtr<ID3D11Buffer> m_parameterConstants;
Microsoft::WRL::ComPtr<ID3D11Texture2D> m_inputTexture;
Microsoft::WRL::ComPtr<ID3D11ShaderResourceView> m_inputSrv;
Microsoft::WRL::ComPtr<ID3D11Texture2D> m_outputTexture;
Microsoft::WRL::ComPtr<ID3D11RenderTargetView> m_outputRtv;
Microsoft::WRL::ComPtr<ID3D11Texture2D> m_readbackTexture;
};

View File

@@ -1,6 +0,0 @@
#pragma once
#include <Unknwn.h>
#include <combaseapi.h>
#include <DeckLinkAPI_h.h>

View File

@@ -1,563 +0,0 @@
#include "DeckLinkPipeline.h"
#include <comdef.h>
#include <sstream>
namespace
{
std::string hrText(const char* action, HRESULT hr)
{
std::ostringstream out;
out << action << " failed: 0x" << std::hex << static_cast<unsigned long>(hr);
return out.str();
}
std::string pixelFormatName(BMDPixelFormat format)
{
switch (format)
{
case bmdFormat10BitRGB: return "10-bit RGB";
case bmdFormat8BitBGRA: return "8-bit BGRA";
case bmdFormat8BitARGB: return "8-bit ARGB";
case bmdFormat8BitYUV: return "8-bit YUV";
case bmdFormat10BitYUV: return "10-bit YUV";
default: return "Unknown";
}
}
BMDPixelFormat detectedInputFormat(BMDDetectedVideoInputFormatFlags flags)
{
const bool rgb = (flags & bmdDetectedVideoInputRGB444) != 0;
if (rgb)
{
if (flags & bmdDetectedVideoInput10BitDepth)
return bmdFormat10BitRGB;
return bmdFormat8BitARGB;
}
if (flags & bmdDetectedVideoInput10BitDepth)
return bmdFormat10BitYUV;
return bmdFormat8BitYUV;
}
}
DeckLinkInputCallback::DeckLinkInputCallback(DeckLinkPipeline& pipeline) : m_pipeline(pipeline) {}
HRESULT DeckLinkInputCallback::QueryInterface(REFIID iid, LPVOID* ppv)
{
if (ppv == nullptr)
return E_POINTER;
if (iid == IID_IUnknown || iid == IID_IDeckLinkInputCallback)
{
*ppv = static_cast<IDeckLinkInputCallback*>(this);
AddRef();
return S_OK;
}
*ppv = nullptr;
return E_NOINTERFACE;
}
ULONG DeckLinkInputCallback::AddRef()
{
return ++m_refCount;
}
ULONG DeckLinkInputCallback::Release()
{
const ULONG count = --m_refCount;
if (count == 0)
delete this;
return count;
}
HRESULT DeckLinkInputCallback::VideoInputFormatChanged(BMDVideoInputFormatChangedEvents, IDeckLinkDisplayMode* mode, BMDDetectedVideoInputFormatFlags flags)
{
m_pipeline.onFormatChanged(mode, flags);
return S_OK;
}
HRESULT DeckLinkInputCallback::VideoInputFrameArrived(IDeckLinkVideoInputFrame* videoFrame, IDeckLinkAudioInputPacket*)
{
if (videoFrame)
m_pipeline.onFrameArrived(videoFrame);
return S_OK;
}
DeckLinkOutputCallback::DeckLinkOutputCallback(DeckLinkPipeline& pipeline) : m_pipeline(pipeline) {}
HRESULT DeckLinkOutputCallback::QueryInterface(REFIID iid, LPVOID* ppv)
{
if (ppv == nullptr)
return E_POINTER;
if (iid == IID_IUnknown || iid == IID_IDeckLinkVideoOutputCallback)
{
*ppv = static_cast<IDeckLinkVideoOutputCallback*>(this);
AddRef();
return S_OK;
}
*ppv = nullptr;
return E_NOINTERFACE;
}
ULONG DeckLinkOutputCallback::AddRef()
{
return ++m_refCount;
}
ULONG DeckLinkOutputCallback::Release()
{
const ULONG count = --m_refCount;
if (count == 0)
delete this;
return count;
}
HRESULT DeckLinkOutputCallback::ScheduledFrameCompleted(IDeckLinkVideoFrame*, BMDOutputFrameCompletionResult result)
{
m_pipeline.onOutputFrameCompleted(result);
return S_OK;
}
HRESULT DeckLinkOutputCallback::ScheduledPlaybackHasStopped()
{
return S_OK;
}
DeckLinkPipeline::DeckLinkPipeline(AppState& appState, D3DProcessor& processor) : m_appState(appState), m_processor(processor) {}
DeckLinkPipeline::~DeckLinkPipeline()
{
stop();
}
bool DeckLinkPipeline::initialize(std::string& error)
{
std::scoped_lock lock(m_mutex);
if (!selectDevice(error))
return false;
m_inputCallback.Attach(new DeckLinkInputCallback(*this));
m_outputCallback.Attach(new DeckLinkOutputCallback(*this));
m_input->SetCallback(m_inputCallback.Get());
m_output->SetScheduledFrameCompletionCallback(m_outputCallback.Get());
m_status.deckLinkAvailable = true;
m_status.deviceName = deckLinkName(m_deckLink.Get());
updateStatusLocked();
m_initialized = true;
return true;
}
bool DeckLinkPipeline::start(std::string& error)
{
std::scoped_lock lock(m_mutex);
if (!m_initialized && !selectDevice(error))
return false;
Microsoft::WRL::ComPtr<IDeckLinkDisplayMode> startupMode;
if (!findStartupMode(startupMode, error))
return false;
if (!configureForMode(startupMode.Get(), bmdFormat8BitYUV, error))
return false;
HRESULT hr = m_input->StartStreams();
if (FAILED(hr))
{
error = hrText("StartStreams", hr);
return false;
}
m_status.running = true;
m_status.error.clear();
updateStatusLocked();
return true;
}
void DeckLinkPipeline::stop()
{
std::scoped_lock lock(m_mutex);
if (m_input)
{
m_input->StopStreams();
m_input->DisableVideoInput();
m_input->SetCallback(nullptr);
}
if (m_output)
{
m_output->StopScheduledPlayback(0, nullptr, 0);
m_output->DisableVideoOutput();
m_output->SetScheduledFrameCompletionCallback(nullptr);
}
m_streamStarted = false;
m_totalScheduled = 0;
m_status.running = false;
updateStatusLocked();
}
void DeckLinkPipeline::onFormatChanged(IDeckLinkDisplayMode* mode, BMDDetectedVideoInputFormatFlags flags)
{
std::scoped_lock lock(m_mutex);
std::string error;
const BMDPixelFormat inputFormat = detectedInputFormat(flags);
if (!configureForMode(mode, inputFormat, error))
{
m_status.error = error;
m_status.framesDropped++;
}
else if (m_status.running)
{
HRESULT hr = m_input->StartStreams();
if (FAILED(hr))
{
m_status.error = hrText("StartStreams after format change", hr);
m_status.framesDropped++;
}
}
updateStatusLocked();
}
void DeckLinkPipeline::onFrameArrived(IDeckLinkVideoInputFrame* videoFrame)
{
std::scoped_lock lock(m_mutex);
if (!m_status.running || !m_output || !m_activeMode)
return;
if (videoFrame->GetFlags() & bmdFrameHasNoInputSource)
{
m_status.mode = "No signal";
m_status.framesDropped++;
updateStatusLocked();
return;
}
m_status.framesCaptured++;
IDeckLinkVideoFrame* sourceFrame = videoFrame;
Microsoft::WRL::ComPtr<IDeckLinkMutableVideoFrame> convertedInputFrame;
constexpr BMDPixelFormat processingFormat = bmdFormat8BitARGB;
if (videoFrame->GetPixelFormat() != processingFormat)
{
if (!m_converter)
{
m_status.error = "DeckLink video converter is unavailable for input conversion";
m_status.framesDropped++;
updateStatusLocked();
return;
}
int inputRowBytes = 0;
HRESULT hr = m_output->RowBytesForPixelFormat(processingFormat, static_cast<int>(videoFrame->GetWidth()), &inputRowBytes);
if (FAILED(hr))
{
m_status.error = hrText("RowBytesForPixelFormat input conversion", hr);
m_status.framesDropped++;
updateStatusLocked();
return;
}
hr = m_output->CreateVideoFrame(videoFrame->GetWidth(), videoFrame->GetHeight(), inputRowBytes, processingFormat, bmdFrameFlagDefault, &convertedInputFrame);
if (FAILED(hr))
{
m_status.error = hrText("CreateVideoFrame input conversion", hr);
m_status.framesDropped++;
updateStatusLocked();
return;
}
hr = m_converter->ConvertFrame(videoFrame, convertedInputFrame.Get());
if (FAILED(hr))
{
m_status.error = hrText("ConvertFrame input", hr);
m_status.framesDropped++;
updateStatusLocked();
return;
}
sourceFrame = convertedInputFrame.Get();
}
Microsoft::WRL::ComPtr<IDeckLinkMutableVideoFrame> processingFrame;
int processingRowBytes = 0;
HRESULT hr = m_output->RowBytesForPixelFormat(processingFormat, static_cast<int>(videoFrame->GetWidth()), &processingRowBytes);
if (FAILED(hr))
{
m_status.error = hrText("RowBytesForPixelFormat", hr);
m_status.framesDropped++;
updateStatusLocked();
return;
}
hr = m_output->CreateVideoFrame(
videoFrame->GetWidth(),
videoFrame->GetHeight(),
processingRowBytes,
processingFormat,
bmdFrameFlagDefault,
&processingFrame);
if (FAILED(hr))
{
m_status.error = hrText("CreateVideoFrame", hr);
m_status.framesDropped++;
updateStatusLocked();
return;
}
std::string error;
const float amount = m_appState.shaderAmount("builtin.black_white", "amount", 1.0f);
if (!m_processor.processFrame(sourceFrame, amount, processingFormat, processingFrame.Get(), error))
{
m_status.error = error;
m_status.framesDropped++;
updateStatusLocked();
return;
}
Microsoft::WRL::ComPtr<IDeckLinkMutableVideoFrame> outputFrame = processingFrame;
if (m_outputFormat != processingFormat)
{
if (!m_converter)
{
m_status.error = "DeckLink video converter is unavailable for preferred output format";
m_status.framesDropped++;
updateStatusLocked();
return;
}
int outputRowBytes = 0;
hr = m_output->RowBytesForPixelFormat(m_outputFormat, static_cast<int>(videoFrame->GetWidth()), &outputRowBytes);
if (FAILED(hr))
{
m_status.error = hrText("RowBytesForPixelFormat preferred output", hr);
m_status.framesDropped++;
updateStatusLocked();
return;
}
hr = m_output->CreateVideoFrame(videoFrame->GetWidth(), videoFrame->GetHeight(), outputRowBytes, m_outputFormat, bmdFrameFlagDefault, &outputFrame);
if (FAILED(hr))
{
m_status.error = hrText("CreateVideoFrame preferred output", hr);
m_status.framesDropped++;
updateStatusLocked();
return;
}
hr = m_converter->ConvertFrame(processingFrame.Get(), outputFrame.Get());
if (FAILED(hr))
{
m_status.error = hrText("ConvertFrame preferred output", hr);
m_status.framesDropped++;
updateStatusLocked();
return;
}
}
hr = m_output->DisplayVideoFrameSync(outputFrame.Get());
if (FAILED(hr))
{
m_status.error = hrText("DisplayVideoFrameSync", hr);
m_status.framesDropped++;
updateStatusLocked();
return;
}
m_status.framesOutput++;
updateStatusLocked();
}
void DeckLinkPipeline::onOutputFrameCompleted(BMDOutputFrameCompletionResult result)
{
if (result != bmdOutputFrameCompleted)
{
std::scoped_lock lock(m_mutex);
m_status.framesDropped++;
updateStatusLocked();
}
}
bool DeckLinkPipeline::selectDevice(std::string& error)
{
Microsoft::WRL::ComPtr<IDeckLinkIterator> iterator;
IDeckLinkIterator* rawIterator = nullptr;
HRESULT hr = CoCreateInstance(CLSID_CDeckLinkIterator, nullptr, CLSCTX_ALL, IID_IDeckLinkIterator, reinterpret_cast<void**>(&rawIterator));
iterator.Attach(rawIterator);
if (FAILED(hr))
{
error = "Unable to create DeckLink iterator. Install Blackmagic Desktop Video drivers.";
return false;
}
Microsoft::WRL::ComPtr<IDeckLink> candidate;
while (iterator->Next(&candidate) == S_OK)
{
Microsoft::WRL::ComPtr<IDeckLinkInput> input;
Microsoft::WRL::ComPtr<IDeckLinkOutput> output;
Microsoft::WRL::ComPtr<IDeckLinkConfiguration> configuration;
if (SUCCEEDED(candidate.As(&input)) && SUCCEEDED(candidate.As(&output)))
{
m_deckLink = candidate;
m_input = input;
m_output = output;
candidate.As(&configuration);
m_configuration = configuration;
IDeckLinkVideoConversion* rawConverter = nullptr;
if (SUCCEEDED(CoCreateInstance(CLSID_CDeckLinkVideoConversion, nullptr, CLSCTX_ALL, IID_IDeckLinkVideoConversion, reinterpret_cast<void**>(&rawConverter))))
m_converter.Attach(rawConverter);
return true;
}
candidate.Reset();
}
error = "No DeckLink device with both input and output support was found";
return false;
}
bool DeckLinkPipeline::findStartupMode(Microsoft::WRL::ComPtr<IDeckLinkDisplayMode>& selectedMode, std::string& error)
{
static constexpr BMDDisplayMode kPreferredModes[] = {
bmdModeHD1080p5994,
bmdModeHD1080p6000,
bmdModeHD1080p2997,
bmdModeHD1080p30,
bmdModeHD1080i5994,
bmdModeHD1080i6000,
};
for (BMDDisplayMode preferredMode : kPreferredModes)
{
BOOL supported = false;
BMDDisplayMode actualMode = bmdModeUnknown;
if (m_input->DoesSupportVideoMode(bmdVideoConnectionUnspecified, preferredMode, bmdFormat8BitYUV, bmdNoVideoInputConversion, bmdSupportedVideoModeDefault, &actualMode, &supported) == S_OK && supported)
{
if (m_input->GetDisplayMode(preferredMode, &selectedMode) == S_OK && selectedMode)
return true;
}
}
Microsoft::WRL::ComPtr<IDeckLinkDisplayModeIterator> iterator;
HRESULT hr = m_input->GetDisplayModeIterator(&iterator);
if (FAILED(hr))
{
error = hrText("GetDisplayModeIterator", hr);
return false;
}
if (iterator->Next(&selectedMode) != S_OK || !selectedMode)
{
error = "Selected DeckLink device did not report any input display modes";
return false;
}
return true;
}
bool DeckLinkPipeline::configureForMode(IDeckLinkDisplayMode* mode, BMDPixelFormat inputFormat, std::string& error)
{
if (!mode)
{
error = "Cannot configure DeckLink pipeline without a display mode";
return false;
}
m_input->StopStreams();
m_input->DisableVideoInput();
m_output->StopScheduledPlayback(0, nullptr, 0);
m_output->DisableVideoOutput();
m_streamStarted = false;
m_totalScheduled = 0;
BMDPixelFormat outputFormat = bmdFormat8BitARGB;
if (!chooseOutputFormat(mode->GetDisplayMode(), outputFormat))
{
error = "Selected DeckLink device does not support 10-bit RGB, 8-bit ARGB, 10-bit YUV, or 8-bit YUV output for the detected mode";
return false;
}
if (m_configuration)
m_configuration->SetFlag(bmdDeckLinkConfig444SDIVideoOutput, outputFormat == bmdFormat10BitRGB || outputFormat == bmdFormat8BitARGB);
HRESULT hr = m_output->EnableVideoOutput(mode->GetDisplayMode(), bmdVideoOutputFlagDefault);
if (FAILED(hr))
{
error = hrText("EnableVideoOutput", hr);
return false;
}
hr = m_input->EnableVideoInput(mode->GetDisplayMode(), inputFormat, bmdVideoInputEnableFormatDetection);
if (FAILED(hr))
{
error = hrText("EnableVideoInput", hr);
return false;
}
mode->GetFrameRate(&m_frameDuration, &m_frameTimescale);
m_activeMode = mode;
m_inputFormat = inputFormat;
m_outputFormat = outputFormat;
m_status.mode = displayModeName(mode);
m_status.outputFormat = pixelFormatName(outputFormat);
const double frameRate = m_frameDuration > 0 ? static_cast<double>(m_frameTimescale) / static_cast<double>(m_frameDuration) : 0.0;
const BMDFieldDominance fieldDominance = mode->GetFieldDominance();
m_status.frameRate = (fieldDominance == bmdUpperFieldFirst || fieldDominance == bmdLowerFieldFirst) ? frameRate * 2.0 : frameRate;
return true;
}
bool DeckLinkPipeline::chooseOutputFormat(BMDDisplayMode displayMode, BMDPixelFormat& outputFormat)
{
BOOL supported = false;
BMDDisplayMode actualMode = bmdModeUnknown;
if (m_output->DoesSupportVideoMode(bmdVideoConnectionUnspecified, displayMode, bmdFormat8BitARGB, bmdNoVideoOutputConversion, bmdSupportedVideoModeDefault, &actualMode, &supported) == S_OK && supported)
{
outputFormat = bmdFormat8BitARGB;
return true;
}
supported = false;
actualMode = bmdModeUnknown;
if (m_output->DoesSupportVideoMode(bmdVideoConnectionUnspecified, displayMode, bmdFormat10BitYUV, bmdNoVideoOutputConversion, bmdSupportedVideoModeDefault, &actualMode, &supported) == S_OK && supported)
{
outputFormat = bmdFormat10BitYUV;
return true;
}
supported = false;
actualMode = bmdModeUnknown;
if (m_output->DoesSupportVideoMode(bmdVideoConnectionUnspecified, displayMode, bmdFormat8BitYUV, bmdNoVideoOutputConversion, bmdSupportedVideoModeDefault, &actualMode, &supported) == S_OK && supported)
{
outputFormat = bmdFormat8BitYUV;
return true;
}
supported = false;
actualMode = bmdModeUnknown;
if (m_output->DoesSupportVideoMode(bmdVideoConnectionUnspecified, displayMode, bmdFormat10BitRGB, bmdNoVideoOutputConversion, bmdSupportedVideoModeDefault, &actualMode, &supported) == S_OK && supported)
{
outputFormat = bmdFormat10BitRGB;
return true;
}
return false;
}
void DeckLinkPipeline::updateStatusLocked()
{
m_appState.updateStatus(m_status);
}
std::string DeckLinkPipeline::deckLinkName(IDeckLink* deckLink) const
{
if (!deckLink)
return "No DeckLink device selected";
BSTR name = nullptr;
if (deckLink->GetDisplayName(&name) != S_OK || !name)
return "DeckLink device";
_bstr_t wrapper(name, false);
return static_cast<const char*>(wrapper);
}
std::string DeckLinkPipeline::displayModeName(IDeckLinkDisplayMode* mode) const
{
if (!mode)
return "Unknown mode";
BSTR name = nullptr;
if (mode->GetName(&name) != S_OK || !name)
return "Unknown mode";
_bstr_t wrapper(name, false);
return static_cast<const char*>(wrapper);
}

View File

@@ -1,93 +0,0 @@
#pragma once
#include "AppState.h"
#include "D3DProcessor.h"
#include "DeckLinkApi.h"
#include <wrl/client.h>
#include <atomic>
#include <mutex>
#include <string>
class DeckLinkPipeline;
class DeckLinkInputCallback final : public IDeckLinkInputCallback
{
public:
explicit DeckLinkInputCallback(DeckLinkPipeline& pipeline);
HRESULT STDMETHODCALLTYPE QueryInterface(REFIID iid, LPVOID* ppv) override;
ULONG STDMETHODCALLTYPE AddRef() override;
ULONG STDMETHODCALLTYPE Release() override;
HRESULT STDMETHODCALLTYPE VideoInputFormatChanged(BMDVideoInputFormatChangedEvents events, IDeckLinkDisplayMode* mode, BMDDetectedVideoInputFormatFlags flags) override;
HRESULT STDMETHODCALLTYPE VideoInputFrameArrived(IDeckLinkVideoInputFrame* videoFrame, IDeckLinkAudioInputPacket* audioPacket) override;
private:
std::atomic<ULONG> m_refCount = 1;
DeckLinkPipeline& m_pipeline;
};
class DeckLinkOutputCallback final : public IDeckLinkVideoOutputCallback
{
public:
explicit DeckLinkOutputCallback(DeckLinkPipeline& pipeline);
HRESULT STDMETHODCALLTYPE QueryInterface(REFIID iid, LPVOID* ppv) override;
ULONG STDMETHODCALLTYPE AddRef() override;
ULONG STDMETHODCALLTYPE Release() override;
HRESULT STDMETHODCALLTYPE ScheduledFrameCompleted(IDeckLinkVideoFrame* completedFrame, BMDOutputFrameCompletionResult result) override;
HRESULT STDMETHODCALLTYPE ScheduledPlaybackHasStopped() override;
private:
std::atomic<ULONG> m_refCount = 1;
DeckLinkPipeline& m_pipeline;
};
class DeckLinkPipeline
{
public:
DeckLinkPipeline(AppState& appState, D3DProcessor& processor);
~DeckLinkPipeline();
bool initialize(std::string& error);
bool start(std::string& error);
void stop();
void onFormatChanged(IDeckLinkDisplayMode* mode, BMDDetectedVideoInputFormatFlags flags);
void onFrameArrived(IDeckLinkVideoInputFrame* videoFrame);
void onOutputFrameCompleted(BMDOutputFrameCompletionResult result);
private:
friend class DeckLinkInputCallback;
friend class DeckLinkOutputCallback;
bool selectDevice(std::string& error);
bool findStartupMode(Microsoft::WRL::ComPtr<IDeckLinkDisplayMode>& selectedMode, std::string& error);
bool configureForMode(IDeckLinkDisplayMode* mode, BMDPixelFormat inputFormat, std::string& error);
bool chooseOutputFormat(BMDDisplayMode displayMode, BMDPixelFormat& outputFormat);
void updateStatusLocked();
std::string deckLinkName(IDeckLink* deckLink) const;
std::string displayModeName(IDeckLinkDisplayMode* mode) const;
AppState& m_appState;
D3DProcessor& m_processor;
mutable std::recursive_mutex m_mutex;
Microsoft::WRL::ComPtr<IDeckLink> m_deckLink;
Microsoft::WRL::ComPtr<IDeckLinkInput> m_input;
Microsoft::WRL::ComPtr<IDeckLinkOutput> m_output;
Microsoft::WRL::ComPtr<IDeckLinkConfiguration> m_configuration;
Microsoft::WRL::ComPtr<IDeckLinkVideoConversion> m_converter;
Microsoft::WRL::ComPtr<IDeckLinkDisplayMode> m_activeMode;
Microsoft::WRL::ComPtr<DeckLinkInputCallback> m_inputCallback;
Microsoft::WRL::ComPtr<DeckLinkOutputCallback> m_outputCallback;
PipelineStatus m_status;
BMDPixelFormat m_inputFormat = bmdFormat8BitYUV;
BMDPixelFormat m_outputFormat = bmdFormat8BitARGB;
BMDTimeValue m_frameDuration = 1001;
BMDTimeScale m_frameTimescale = 30000;
uint64_t m_totalScheduled = 0;
bool m_initialized = false;
bool m_streamStarted = false;
};

View File

@@ -1,29 +0,0 @@
#include "FileUtil.h"
#include <fstream>
#include <sstream>
std::optional<std::string> readTextFile(const std::filesystem::path& path)
{
std::ifstream file(path, std::ios::binary);
if (!file)
return std::nullopt;
std::ostringstream buffer;
buffer << file.rdbuf();
return buffer.str();
}
std::string contentTypeForPath(const std::filesystem::path& path)
{
const auto ext = path.extension().string();
if (ext == ".html")
return "text/html; charset=utf-8";
if (ext == ".css")
return "text/css; charset=utf-8";
if (ext == ".js")
return "application/javascript; charset=utf-8";
if (ext == ".json")
return "application/json; charset=utf-8";
return "application/octet-stream";
}

View File

@@ -1,8 +0,0 @@
#pragma once
#include <filesystem>
#include <optional>
#include <string>
std::optional<std::string> readTextFile(const std::filesystem::path& path);
std::string contentTypeForPath(const std::filesystem::path& path);

View File

@@ -1,48 +0,0 @@
#include "JsonUtil.h"
#include <charconv>
std::string jsonEscape(const std::string& value)
{
std::string out;
out.reserve(value.size());
for (char ch : value)
{
switch (ch)
{
case '\\': out += "\\\\"; break;
case '"': out += "\\\""; break;
case '\n': out += "\\n"; break;
case '\r': out += "\\r"; break;
case '\t': out += "\\t"; break;
default: out += ch; break;
}
}
return out;
}
std::optional<float> findJsonFloat(const std::string& body, const std::string& key)
{
const std::string quotedKey = "\"" + key + "\"";
size_t pos = body.find(quotedKey);
if (pos == std::string::npos)
return std::nullopt;
pos = body.find(':', pos + quotedKey.size());
if (pos == std::string::npos)
return std::nullopt;
++pos;
while (pos < body.size() && (body[pos] == ' ' || body[pos] == '\t' || body[pos] == '\r' || body[pos] == '\n'))
++pos;
size_t end = pos;
while (end < body.size() && (body[end] == '-' || body[end] == '+' || body[end] == '.' || (body[end] >= '0' && body[end] <= '9') || body[end] == 'e' || body[end] == 'E'))
++end;
float value = 0.0f;
const auto result = std::from_chars(body.data() + pos, body.data() + end, value);
if (result.ec != std::errc())
return std::nullopt;
return value;
}

View File

@@ -1,7 +0,0 @@
#pragma once
#include <optional>
#include <string>
std::string jsonEscape(const std::string& value);
std::optional<float> findJsonFloat(const std::string& body, const std::string& key);

View File

@@ -1,57 +0,0 @@
#include "ShaderLibrary.h"
#include "FileUtil.h"
#include "JsonUtil.h"
#include <stdexcept>
namespace
{
std::string findString(const std::string& body, const std::string& key, const std::string& fallback)
{
const std::string quotedKey = "\"" + key + "\"";
size_t pos = body.find(quotedKey);
if (pos == std::string::npos)
return fallback;
pos = body.find(':', pos + quotedKey.size());
if (pos == std::string::npos)
return fallback;
pos = body.find('"', pos);
if (pos == std::string::npos)
return fallback;
const size_t end = body.find('"', pos + 1);
if (end == std::string::npos)
return fallback;
return body.substr(pos + 1, end - pos - 1);
}
}
std::vector<ShaderInfo> loadShaders(const std::filesystem::path& shaderRoot)
{
const auto metadataPath = shaderRoot / "black_white" / "shader.json";
const auto hlslPath = shaderRoot / "black_white" / "shader.hlsl";
auto metadata = readTextFile(metadataPath);
if (!metadata)
throw std::runtime_error("Missing shader metadata: " + metadataPath.string());
if (!std::filesystem::exists(hlslPath))
throw std::runtime_error("Missing shader HLSL: " + hlslPath.string());
ShaderInfo shader;
shader.id = findString(*metadata, "id", "builtin.black_white");
shader.name = findString(*metadata, "name", "Black & White");
shader.version = static_cast<int>(findJsonFloat(*metadata, "version").value_or(1.0f));
shader.entry = findString(*metadata, "entry", "main");
shader.type = findString(*metadata, "type", "fullscreen_pixel");
shader.hlslPath = hlslPath.string();
ShaderParameter amount;
amount.id = "amount";
amount.label = "Amount";
amount.defaultValue = findJsonFloat(*metadata, "default").value_or(1.0f);
amount.value = amount.defaultValue;
amount.minValue = findJsonFloat(*metadata, "min").value_or(0.0f);
amount.maxValue = findJsonFloat(*metadata, "max").value_or(1.0f);
shader.parameters.push_back(amount);
return { shader };
}

View File

@@ -1,8 +0,0 @@
#pragma once
#include "AppState.h"
#include <filesystem>
#include <vector>
std::vector<ShaderInfo> loadShaders(const std::filesystem::path& shaderRoot);

View File

@@ -1,367 +0,0 @@
#include "WebServer.h"
#include "FileUtil.h"
#include "JsonUtil.h"
#include <bcrypt.h>
#include <ws2tcpip.h>
#include <algorithm>
#include <array>
#include <chrono>
#include <cctype>
#include <cstdint>
#include <sstream>
namespace
{
std::string reasonPhrase(int status)
{
switch (status)
{
case 200: return "OK";
case 204: return "No Content";
case 400: return "Bad Request";
case 404: return "Not Found";
case 500: return "Internal Server Error";
default: return "OK";
}
}
std::string headerValue(const std::string& request, const std::string& name)
{
const std::string needle = name + ":";
size_t pos = request.find(needle);
if (pos == std::string::npos)
{
std::string lowerRequest = request;
std::string lowerNeedle = needle;
std::transform(lowerRequest.begin(), lowerRequest.end(), lowerRequest.begin(), [](unsigned char ch) { return static_cast<char>(std::tolower(ch)); });
std::transform(lowerNeedle.begin(), lowerNeedle.end(), lowerNeedle.begin(), [](unsigned char ch) { return static_cast<char>(std::tolower(ch)); });
pos = lowerRequest.find(lowerNeedle);
}
if (pos == std::string::npos)
return {};
pos = request.find(':', pos);
if (pos == std::string::npos)
return {};
++pos;
while (pos < request.size() && request[pos] == ' ')
++pos;
size_t end = request.find("\r\n", pos);
if (end == std::string::npos)
end = request.size();
return request.substr(pos, end - pos);
}
std::string base64(const uint8_t* data, size_t size)
{
static constexpr char table[] = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/";
std::string out;
out.reserve(((size + 2) / 3) * 4);
for (size_t i = 0; i < size; i += 3)
{
const uint32_t b0 = data[i];
const uint32_t b1 = i + 1 < size ? data[i + 1] : 0;
const uint32_t b2 = i + 2 < size ? data[i + 2] : 0;
const uint32_t v = (b0 << 16) | (b1 << 8) | b2;
out.push_back(table[(v >> 18) & 0x3f]);
out.push_back(table[(v >> 12) & 0x3f]);
out.push_back(i + 1 < size ? table[(v >> 6) & 0x3f] : '=');
out.push_back(i + 2 < size ? table[v & 0x3f] : '=');
}
return out;
}
std::string websocketAccept(const std::string& key)
{
const std::string source = key + "258EAFA5-E914-47DA-95CA-C5AB0DC85B11";
std::array<uint8_t, 20> hash = {};
BCRYPT_ALG_HANDLE algorithm = nullptr;
if (BCryptOpenAlgorithmProvider(&algorithm, BCRYPT_SHA1_ALGORITHM, nullptr, 0) != 0)
return {};
BCryptHash(algorithm, nullptr, 0, reinterpret_cast<PUCHAR>(const_cast<char*>(source.data())), static_cast<ULONG>(source.size()), hash.data(), static_cast<ULONG>(hash.size()));
BCryptCloseAlgorithmProvider(algorithm, 0);
return base64(hash.data(), hash.size());
}
bool sendAll(SOCKET socket, const std::string& bytes)
{
size_t sent = 0;
while (sent < bytes.size())
{
const int n = send(socket, bytes.data() + sent, static_cast<int>(bytes.size() - sent), 0);
if (n <= 0)
return false;
sent += static_cast<size_t>(n);
}
return true;
}
std::string websocketFrame(const std::string& payload)
{
std::string frame;
frame.push_back(static_cast<char>(0x81));
if (payload.size() < 126)
{
frame.push_back(static_cast<char>(payload.size()));
}
else if (payload.size() <= 65535)
{
frame.push_back(static_cast<char>(126));
frame.push_back(static_cast<char>((payload.size() >> 8) & 0xff));
frame.push_back(static_cast<char>(payload.size() & 0xff));
}
else
{
frame.push_back(static_cast<char>(127));
for (int i = 7; i >= 0; --i)
frame.push_back(static_cast<char>((payload.size() >> (i * 8)) & 0xff));
}
frame += payload;
return frame;
}
}
WebServer::WebServer(AppState& appState, DeckLinkPipeline& pipeline, std::filesystem::path contentRoot)
: m_appState(appState), m_pipeline(pipeline), m_contentRoot(std::move(contentRoot))
{
}
WebServer::~WebServer()
{
stop();
}
bool WebServer::start(uint16_t port, std::string& error)
{
WSADATA data = {};
if (WSAStartup(MAKEWORD(2, 2), &data) != 0)
{
error = "WSAStartup failed";
return false;
}
m_listenSocket = socket(AF_INET, SOCK_STREAM, IPPROTO_TCP);
if (m_listenSocket == INVALID_SOCKET)
{
error = "socket failed";
return false;
}
sockaddr_in addr = {};
addr.sin_family = AF_INET;
addr.sin_addr.s_addr = htonl(INADDR_LOOPBACK);
addr.sin_port = htons(port);
if (bind(m_listenSocket, reinterpret_cast<sockaddr*>(&addr), sizeof(addr)) == SOCKET_ERROR)
{
error = "bind failed on 127.0.0.1:" + std::to_string(port);
return false;
}
if (listen(m_listenSocket, SOMAXCONN) == SOCKET_ERROR)
{
error = "listen failed";
return false;
}
m_running = true;
m_acceptThread = std::thread(&WebServer::acceptLoop, this);
m_statusThread = std::thread(&WebServer::statusLoop, this);
return true;
}
void WebServer::stop()
{
m_running = false;
if (m_listenSocket != INVALID_SOCKET)
{
closesocket(m_listenSocket);
m_listenSocket = INVALID_SOCKET;
}
{
std::scoped_lock lock(m_wsMutex);
for (SOCKET ws : m_webSockets)
closesocket(ws);
m_webSockets.clear();
}
if (m_acceptThread.joinable())
m_acceptThread.join();
if (m_statusThread.joinable())
m_statusThread.join();
WSACleanup();
}
void WebServer::broadcastState()
{
const std::string payload = "{\"type\":\"state\",\"status\":" + m_appState.statusJson() + ",\"shaders\":" + m_appState.shadersJson() + "}";
const std::string frame = websocketFrame(payload);
std::scoped_lock lock(m_wsMutex);
auto it = m_webSockets.begin();
while (it != m_webSockets.end())
{
if (sendAll(*it, frame))
++it;
else
{
closesocket(*it);
it = m_webSockets.erase(it);
}
}
}
void WebServer::acceptLoop()
{
while (m_running)
{
SOCKET client = accept(m_listenSocket, nullptr, nullptr);
if (client == INVALID_SOCKET)
continue;
std::thread(&WebServer::handleClient, this, client).detach();
}
}
void WebServer::statusLoop()
{
while (m_running)
{
broadcastState();
std::this_thread::sleep_for(std::chrono::milliseconds(500));
}
}
void WebServer::handleClient(SOCKET client)
{
std::string request;
std::array<char, 8192> buffer = {};
int received = recv(client, buffer.data(), static_cast<int>(buffer.size()), 0);
if (received <= 0)
{
closesocket(client);
return;
}
request.assign(buffer.data(), static_cast<size_t>(received));
if (request.find("Upgrade: websocket") != std::string::npos || request.find("upgrade: websocket") != std::string::npos)
handleWebSocket(client, request);
else
handleHttp(client, request);
}
void WebServer::handleHttp(SOCKET client, const std::string& request)
{
std::istringstream firstLine(request.substr(0, request.find("\r\n")));
std::string method;
std::string target;
firstLine >> method >> target;
const size_t bodyPos = request.find("\r\n\r\n");
const std::string body = bodyPos == std::string::npos ? std::string() : request.substr(bodyPos + 4);
if (method == "GET" && target == "/api/status")
sendJson(client, 200, m_appState.statusJson());
else if (method == "GET" && target == "/api/shaders")
sendJson(client, 200, m_appState.shadersJson());
else if (method == "POST" && target == "/api/pipeline/start")
{
std::string error;
if (m_pipeline.start(error))
sendJson(client, 200, m_appState.statusJson());
else
{
m_appState.setError(error);
sendJson(client, 500, "{\"error\":\"" + jsonEscape(error) + "\"}");
}
broadcastState();
}
else if (method == "POST" && target == "/api/pipeline/stop")
{
m_pipeline.stop();
sendJson(client, 200, m_appState.statusJson());
broadcastState();
}
else if (method == "PATCH" && target == "/api/shaders/builtin.black_white/parameters")
{
const auto amount = findJsonFloat(body, "amount");
if (!amount || !m_appState.setShaderParameter("builtin.black_white", "amount", *amount))
sendJson(client, 400, "{\"error\":\"Expected JSON body with numeric amount\"}");
else
{
sendJson(client, 200, m_appState.shadersJson());
broadcastState();
}
}
else if (method == "GET")
{
const auto path = routeStaticPath(target);
auto file = readTextFile(path);
if (!file)
sendText(client, 404, "text/plain; charset=utf-8", "Not found");
else
sendText(client, 200, contentTypeForPath(path), *file);
}
else
{
sendJson(client, 404, "{\"error\":\"Route not found\"}");
}
closesocket(client);
}
void WebServer::handleWebSocket(SOCKET client, const std::string& request)
{
const std::string key = headerValue(request, "Sec-WebSocket-Key");
if (key.empty())
{
closesocket(client);
return;
}
std::ostringstream response;
response << "HTTP/1.1 101 Switching Protocols\r\n"
<< "Upgrade: websocket\r\n"
<< "Connection: Upgrade\r\n"
<< "Sec-WebSocket-Accept: " << websocketAccept(key) << "\r\n\r\n";
if (!sendAll(client, response.str()))
{
closesocket(client);
return;
}
{
std::scoped_lock lock(m_wsMutex);
m_webSockets.push_back(client);
}
broadcastState();
}
void WebServer::sendJson(SOCKET client, int status, const std::string& body)
{
sendText(client, status, "application/json; charset=utf-8", body);
}
void WebServer::sendText(SOCKET client, int status, const std::string& contentType, const std::string& body)
{
std::ostringstream response;
response << "HTTP/1.1 " << status << " " << reasonPhrase(status) << "\r\n"
<< "Content-Type: " << contentType << "\r\n"
<< "Content-Length: " << body.size() << "\r\n"
<< "Connection: close\r\n\r\n"
<< body;
sendAll(client, response.str());
}
std::string WebServer::routeStaticPath(const std::string& target) const
{
std::string clean = target;
const size_t query = clean.find('?');
if (query != std::string::npos)
clean.resize(query);
if (clean == "/")
clean = "/index.html";
while (clean.find("..") != std::string::npos)
clean.erase(clean.find(".."), 2);
if (!clean.empty() && clean.front() == '/')
clean.erase(clean.begin());
return (m_contentRoot / clean).string();
}

View File

@@ -1,44 +0,0 @@
#pragma once
#include "AppState.h"
#include "DeckLinkPipeline.h"
#include <atomic>
#include <filesystem>
#include <mutex>
#include <string>
#include <thread>
#include <vector>
#include <winsock2.h>
class WebServer
{
public:
WebServer(AppState& appState, DeckLinkPipeline& pipeline, std::filesystem::path contentRoot);
~WebServer();
bool start(uint16_t port, std::string& error);
void stop();
void broadcastState();
private:
void acceptLoop();
void statusLoop();
void handleClient(SOCKET client);
void handleHttp(SOCKET client, const std::string& request);
void handleWebSocket(SOCKET client, const std::string& request);
void sendJson(SOCKET client, int status, const std::string& body);
void sendText(SOCKET client, int status, const std::string& contentType, const std::string& body);
std::string routeStaticPath(const std::string& target) const;
AppState& m_appState;
DeckLinkPipeline& m_pipeline;
std::filesystem::path m_contentRoot;
std::atomic<bool> m_running = false;
SOCKET m_listenSocket = INVALID_SOCKET;
std::thread m_acceptThread;
std::thread m_statusThread;
std::mutex m_wsMutex;
std::vector<SOCKET> m_webSockets;
};

View File

@@ -1,84 +0,0 @@
#include "AppState.h"
#include "AppConfig.h"
#include "D3DProcessor.h"
#include "DeckLinkPipeline.h"
#include "ShaderLibrary.h"
#include "WebServer.h"
#include <combaseapi.h>
#include <atomic>
#include <chrono>
#include <csignal>
#include <iostream>
#include <thread>
namespace
{
std::atomic<bool> g_running = true;
void signalHandler(int)
{
g_running = false;
}
}
int main()
{
std::signal(SIGINT, signalHandler);
std::signal(SIGTERM, signalHandler);
HRESULT hr = CoInitializeEx(nullptr, COINIT_MULTITHREADED);
if (FAILED(hr))
{
std::cerr << "CoInitializeEx failed: 0x" << std::hex << static_cast<unsigned long>(hr) << "\n";
return 1;
}
AppState appState;
D3DProcessor processor;
try
{
auto shaders = loadShaders(APP_SHADER_DIR);
appState.setShaders(shaders);
std::string error;
if (!processor.initialize(shaders.front(), error))
{
std::cerr << "Shader/D3D initialization failed:\n" << error << "\n";
CoUninitialize();
return 1;
}
}
catch (const std::exception& ex)
{
std::cerr << ex.what() << "\n";
CoUninitialize();
return 1;
}
DeckLinkPipeline pipeline(appState, processor);
std::string error;
if (!pipeline.initialize(error))
{
appState.setError(error);
std::cerr << error << "\n";
}
WebServer webServer(appState, pipeline, APP_CONTENT_DIR);
if (!webServer.start(8080, error))
{
std::cerr << error << "\n";
CoUninitialize();
return 1;
}
std::cout << "video-shader is running at http://127.0.0.1:8080\n";
while (g_running)
std::this_thread::sleep_for(std::chrono::milliseconds(200));
webServer.stop();
pipeline.stop();
CoUninitialize();
return 0;
}

View File

@@ -1,6 +0,0 @@
{
"name": "video-shader",
"version-string": "0.1.0",
"description": "DeckLink video IO shader processing tool.",
"dependencies": []
}

View File

@@ -1,92 +0,0 @@
const state = {
shaders: []
};
const el = (id) => document.getElementById(id);
async function api(path, options = {}) {
const response = await fetch(path, {
headers: { "content-type": "application/json" },
...options
});
if (!response.ok) {
const text = await response.text();
throw new Error(text || response.statusText);
}
return response.json();
}
function renderStatus(status) {
el("device").textContent = status.deviceName || "No DeckLink device selected";
el("running").textContent = status.running ? "Running" : "Stopped";
el("mode").textContent = status.mode || "No signal";
el("outputFormat").textContent = status.outputFormat || "Unavailable";
el("frames").textContent = `${status.framesCaptured ?? 0} / ${status.framesOutput ?? 0}`;
el("frameRate").textContent = Number(status.frameRate || 0).toFixed(2);
el("dropped").textContent = status.framesDropped ?? 0;
el("error").textContent = status.error || "";
}
function renderShaders(payload) {
state.shaders = payload.shaders || [];
const host = el("shaders");
host.innerHTML = "";
for (const shader of state.shaders) {
const card = document.createElement("div");
card.className = "shader";
const amount = shader.parameters.find((p) => p.id === "amount");
card.innerHTML = `
<header>
<strong>${shader.name}</strong>
<span>${shader.type}</span>
</header>
<label>
<span>${amount.label}</span>
<input type="range" min="${amount.min}" max="${amount.max}" step="0.01" value="${amount.value}">
<output>${Number(amount.value).toFixed(2)}</output>
</label>
`;
const input = card.querySelector("input");
const output = card.querySelector("output");
input.addEventListener("input", async () => {
output.textContent = Number(input.value).toFixed(2);
await api(`/api/shaders/${shader.id}/parameters`, {
method: "PATCH",
body: JSON.stringify({ amount: Number(input.value) })
});
});
host.appendChild(card);
}
}
async function refresh() {
renderStatus(await api("/api/status"));
renderShaders(await api("/api/shaders"));
}
function connectWs() {
const ws = new WebSocket(`ws://${location.host}/ws`);
ws.addEventListener("message", (event) => {
const message = JSON.parse(event.data);
if (message.type === "state") {
renderStatus(message.status);
renderShaders(message.shaders);
}
});
ws.addEventListener("close", () => setTimeout(connectWs, 1000));
}
el("start").addEventListener("click", async () => {
try {
renderStatus(await api("/api/pipeline/start", { method: "POST" }));
} catch (error) {
el("error").textContent = error.message;
}
});
el("stop").addEventListener("click", async () => {
renderStatus(await api("/api/pipeline/stop", { method: "POST" }));
});
refresh();
connectWs();

View File

@@ -1,60 +0,0 @@
<!doctype html>
<html lang="en">
<head>
<meta charset="utf-8">
<meta name="viewport" content="width=device-width, initial-scale=1">
<title>video-shader</title>
<link rel="stylesheet" href="/styles.css">
</head>
<body>
<main class="shell">
<section class="topbar">
<div>
<h1>video-shader</h1>
<p id="device">DeckLink device</p>
</div>
<div class="actions">
<button id="start">Start</button>
<button id="stop">Stop</button>
</div>
</section>
<section class="status-grid">
<article>
<span>State</span>
<strong id="running">Stopped</strong>
</article>
<article>
<span>Mode</span>
<strong id="mode">No signal</strong>
</article>
<article>
<span>Output</span>
<strong id="outputFormat">Unavailable</strong>
</article>
<article>
<span>Frames</span>
<strong id="frames">0 / 0</strong>
</article>
</section>
<section class="workbench">
<div class="shader-list">
<h2>Shader Stack</h2>
<div id="shaders"></div>
</div>
<div class="meter">
<h2>Runtime</h2>
<dl>
<dt>Frame rate</dt>
<dd id="frameRate">0.00</dd>
<dt>Dropped</dt>
<dd id="dropped">0</dd>
</dl>
<p id="error"></p>
</div>
</section>
</main>
<script src="/app.js"></script>
</body>
</html>

View File

@@ -1,175 +0,0 @@
:root {
color-scheme: dark;
font-family: "Segoe UI", Arial, sans-serif;
background: #141717;
color: #eef3ef;
}
* {
box-sizing: border-box;
}
body {
margin: 0;
}
.shell {
width: min(1120px, calc(100vw - 32px));
margin: 0 auto;
padding: 28px 0;
}
.topbar {
display: flex;
align-items: center;
justify-content: space-between;
gap: 20px;
padding: 0 0 22px;
border-bottom: 1px solid #33403b;
}
h1,
h2,
p {
margin: 0;
}
h1 {
font-size: 28px;
font-weight: 650;
}
h2 {
font-size: 16px;
font-weight: 650;
margin-bottom: 14px;
}
.topbar p,
article span,
dt {
color: #aab8b0;
}
.actions {
display: flex;
gap: 10px;
}
button {
appearance: none;
border: 1px solid #60736a;
border-radius: 6px;
background: #24302b;
color: #f3fff7;
min-width: 84px;
height: 38px;
font: inherit;
cursor: pointer;
}
button:hover {
background: #314139;
}
.status-grid {
display: grid;
grid-template-columns: repeat(4, minmax(0, 1fr));
gap: 12px;
margin: 22px 0;
}
article,
.shader-list,
.meter {
border: 1px solid #33403b;
border-radius: 8px;
background: #1b211f;
}
article {
padding: 16px;
}
article span {
display: block;
font-size: 12px;
margin-bottom: 8px;
}
article strong {
display: block;
min-height: 26px;
font-size: 20px;
font-weight: 650;
overflow-wrap: anywhere;
}
.workbench {
display: grid;
grid-template-columns: 1fr 340px;
gap: 12px;
}
.shader-list,
.meter {
padding: 18px;
}
.shader {
display: grid;
gap: 14px;
padding: 14px;
border: 1px solid #3c4b45;
border-radius: 6px;
background: #202925;
}
.shader header {
display: flex;
justify-content: space-between;
gap: 12px;
}
label {
display: grid;
grid-template-columns: 120px 1fr 52px;
align-items: center;
gap: 12px;
color: #cdd8d1;
}
input[type="range"] {
width: 100%;
}
dl {
display: grid;
grid-template-columns: 120px 1fr;
gap: 10px 12px;
margin: 0;
}
dd {
margin: 0;
font-weight: 650;
}
#error {
min-height: 24px;
margin-top: 18px;
color: #ffb4a8;
overflow-wrap: anywhere;
}
@media (max-width: 820px) {
.topbar,
.workbench {
grid-template-columns: 1fr;
display: grid;
}
.status-grid {
grid-template-columns: repeat(2, minmax(0, 1fr));
}
}