Skip to content

Instantly share code, notes, and snippets.

@softworkz
Created February 17, 2025 13:24
FFmpeg Filtergraph Printing
{
"FilterGraphs": {
"Graphs": [
{
"Description": "[0:0]vpp_qsv@f1=format=nv12[f1_out0]",
"Filters": [
{
"Filter": {
"Description": "Quick Sync Video VPP.",
"HwDeviceContext": {
"DeviceType": "qsv",
"HasHwDeviceContext": true
},
"Inputs": [
{
"LinkType": "Input",
"ChannelLayout": 0,
"Channels": 0,
"DestPadName": "default",
"Format": "qsv",
"Height": 1080,
"HwFramesContext": {
"HasHwFramesContext": true,
"Height": 1088,
"HwDeviceContext": {
"DeviceType": "qsv",
"HasHwDeviceContext": true
},
"HwPixelFormat": "qsv",
"SwPixelFormat": "p010le",
"Width": 1920
},
"MediaTypeId": 0,
"SampleRate": 0,
"SAR": "1:1",
"SourceName": "graph 0 video input from stream 0:0",
"SourcePadName": "default",
"Width": 1920
}
],
"Name": "vpp_qsv@f1",
"Name2": "vpp_qsv",
"Outputs": [
{
"LinkType": "Output",
"ChannelLayout": 0,
"Channels": 0,
"DestName": "format",
"DestPadName": "default",
"Format": "qsv",
"Height": 1080,
"HwFramesContext": {
"HasHwFramesContext": true,
"Height": 1088,
"HwDeviceContext": {
"DeviceType": "qsv",
"HasHwDeviceContext": true
},
"HwPixelFormat": "qsv",
"SwPixelFormat": "nv12",
"Width": 1920
},
"MediaTypeId": 0,
"SampleRate": 0,
"SAR": "1:1",
"SourceName": "vpp_qsv@f1",
"Width": 1920
}
]
}
},
{
"Filter": {
"Description": "Buffer video frames, and make them accessible to the filterchain.",
"Inputs": [
],
"Name": "graph 0 video input from stream 0:0",
"Name2": "buffer",
"Outputs": [
{
"LinkType": "Output",
"ChannelLayout": 0,
"Channels": 0,
"DestName": "vpp_qsv@f1",
"DestPadName": "default",
"Format": "qsv",
"Height": 1080,
"HwFramesContext": {
"HasHwFramesContext": true,
"Height": 1088,
"HwDeviceContext": {
"DeviceType": "qsv",
"HasHwDeviceContext": true
},
"HwPixelFormat": "qsv",
"SwPixelFormat": "p010le",
"Width": 1920
},
"MediaTypeId": 0,
"SampleRate": 0,
"SAR": "1:1",
"SourceName": "graph 0 video input from stream 0:0",
"Width": 1920
}
]
}
},
{
"Filter": {
"Description": "Buffer video frames, and make them available to the end of the filter graph.",
"Inputs": [
{
"LinkType": "Input",
"ChannelLayout": 0,
"Channels": 0,
"DestPadName": "default",
"Format": "qsv",
"Height": 1080,
"HwFramesContext": {
"HasHwFramesContext": true,
"Height": 1088,
"HwDeviceContext": {
"DeviceType": "qsv",
"HasHwDeviceContext": true
},
"HwPixelFormat": "qsv",
"SwPixelFormat": "nv12",
"Width": 1920
},
"MediaTypeId": 0,
"SampleRate": 0,
"SAR": "1:1",
"SourceName": "format",
"SourcePadName": "default",
"Width": 1920
}
],
"Name": "out_0_0",
"Name2": "buffersink",
"Outputs": [
]
}
},
{
"Filter": {
"Description": "Convert the input video to one of the specified pixel formats.",
"Inputs": [
{
"LinkType": "Input",
"ChannelLayout": 0,
"Channels": 0,
"DestPadName": "default",
"Format": "qsv",
"Height": 1080,
"HwFramesContext": {
"HasHwFramesContext": true,
"Height": 1088,
"HwDeviceContext": {
"DeviceType": "qsv",
"HasHwDeviceContext": true
},
"HwPixelFormat": "qsv",
"SwPixelFormat": "nv12",
"Width": 1920
},
"MediaTypeId": 0,
"SampleRate": 0,
"SAR": "1:1",
"SourceName": "vpp_qsv@f1",
"SourcePadName": "default",
"Width": 1920
}
],
"Name": "format",
"Name2": "format",
"Outputs": [
{
"LinkType": "Output",
"ChannelLayout": 0,
"Channels": 0,
"DestName": "out_0_0",
"DestPadName": "default",
"Format": "qsv",
"Height": 1080,
"HwFramesContext": {
"HasHwFramesContext": true,
"Height": 1088,
"HwDeviceContext": {
"DeviceType": "qsv",
"HasHwDeviceContext": true
},
"HwPixelFormat": "qsv",
"SwPixelFormat": "nv12",
"Width": 1920
},
"MediaTypeId": 0,
"SampleRate": 0,
"SAR": "1:1",
"SourceName": "format",
"Width": 1920
}
]
}
}
],
"GraphIndex": 0,
"Inputs": [
{
"LinkType": "Input",
"ChannelLayout": 0,
"Channels": 0,
"Description": "Buffer video frames, and make them accessible to the filterchain.",
"Format": "qsv",
"Height": 1080,
"HwFramesContext": {
"HasHwFramesContext": true,
"Height": 1088,
"HwDeviceContext": {
"DeviceType": "qsv",
"HasHwDeviceContext": true
},
"HwPixelFormat": "qsv",
"SwPixelFormat": "p010le",
"Width": 1920
},
"MediaType": "video",
"MediaTypeId": 0,
"Name1": "vpp_qsv:default",
"Name2": "graph 0 video input from stream 0:0",
"Name3": "buffer",
"SampleRate": 0,
"SAR": "1:1",
"Width": 1920
}
],
"Outputs": [
{
"LinkType": "Output",
"ChannelLayout": 0,
"Channels": 0,
"Description": "Buffer video frames, and make them available to the end of the filter graph.",
"Format": "qsv",
"Height": 1080,
"MediaType": "video",
"MediaTypeId": 0,
"Name1": "vpp_qsv:default",
"Name2": "out_0_0",
"Name3": "buffersink",
"SampleRate": 0,
"Width": 1920
}
]
},
{
"Filters": [
{
"Filter": {
"Description": "Change input volume.",
"HwDeviceContext": {
"DeviceType": "qsv",
"HasHwDeviceContext": true
},
"Inputs": [
{
"LinkType": "Input",
"ChannelLayout": 0,
"Channels": 0,
"ChannelString": "5.1(side)",
"DestPadName": "default",
"Height": 0,
"MediaTypeId": 0,
"SampleRate": 46034,
"SourceName": "graph_1_in_0_1",
"SourcePadName": "default",
"Width": 0
}
],
"Name": "Parsed_volume_0",
"Name2": "volume",
"Outputs": [
{
"LinkType": "Output",
"ChannelLayout": 0,
"Channels": 0,
"ChannelString": "5.1(side)",
"DestName": "auto_aresample_0",
"DestPadName": "default",
"Height": 0,
"MediaTypeId": 0,
"SampleRate": 46034,
"SourceName": "Parsed_volume_0",
"Width": 0
}
]
}
},
{
"Filter": {
"Description": "Buffer audio frames, and make them accessible to the filterchain.",
"Inputs": [
],
"Name": "graph_1_in_0_1",
"Name2": "abuffer",
"Outputs": [
{
"LinkType": "Output",
"ChannelLayout": 0,
"Channels": 0,
"ChannelString": "5.1(side)",
"DestName": "Parsed_volume_0",
"DestPadName": "default",
"Height": 0,
"MediaTypeId": 0,
"SampleRate": 46034,
"SourceName": "graph_1_in_0_1",
"Width": 0
}
]
}
},
{
"Filter": {
"Description": "Buffer audio frames, and make them available to the end of the filter graph.",
"Inputs": [
{
"LinkType": "Input",
"ChannelLayout": 0,
"Channels": 0,
"ChannelString": "stereo",
"DestPadName": "default",
"Height": 0,
"MediaTypeId": 0,
"SampleRate": 44100,
"SourceName": "format_out_0_1",
"SourcePadName": "default",
"Width": 0
}
],
"Name": "out_0_1",
"Name2": "abuffersink",
"Outputs": [
]
}
},
{
"Filter": {
"Description": "Convert the input audio to one of the specified formats.",
"Inputs": [
{
"LinkType": "Input",
"ChannelLayout": 0,
"Channels": 0,
"ChannelString": "stereo",
"DestPadName": "default",
"Height": 0,
"MediaTypeId": 0,
"SampleRate": 44100,
"SourceName": "auto_aresample_0",
"SourcePadName": "default",
"Width": 0
}
],
"Name": "format_out_0_1",
"Name2": "aformat",
"Outputs": [
{
"LinkType": "Output",
"ChannelLayout": 0,
"Channels": 0,
"ChannelString": "stereo",
"DestName": "out_0_1",
"DestPadName": "default",
"Height": 0,
"MediaTypeId": 0,
"SampleRate": 44100,
"SourceName": "format_out_0_1",
"Width": 0
}
]
}
},
{
"Filter": {
"Description": "Resample audio data.",
"Inputs": [
{
"LinkType": "Input",
"ChannelLayout": 0,
"Channels": 0,
"ChannelString": "5.1(side)",
"DestPadName": "default",
"Height": 0,
"MediaTypeId": 0,
"SampleRate": 46034,
"SourceName": "Parsed_volume_0",
"SourcePadName": "default",
"Width": 0
}
],
"Name": "auto_aresample_0",
"Name2": "aresample",
"Outputs": [
{
"LinkType": "Output",
"ChannelLayout": 0,
"Channels": 0,
"ChannelString": "stereo",
"DestName": "format_out_0_1",
"DestPadName": "default",
"Height": 0,
"MediaTypeId": 0,
"SampleRate": 44100,
"SourceName": "auto_aresample_0",
"Width": 0
}
]
}
}
],
"GraphIndex": 1,
"Inputs": [
{
"LinkType": "Input",
"ChannelLayout": 0,
"Channels": 0,
"Description": "Buffer audio frames, and make them accessible to the filterchain.",
"Format": "gray",
"Height": 0,
"MediaType": "audio",
"MediaTypeId": 1,
"Name2": "graph_1_in_0_1",
"Name3": "abuffer",
"SampleRate": 0,
"SAR": "0:1",
"Width": 0
}
],
"Outputs": [
{
"LinkType": "Output",
"ChannelLayout": 0,
"Channels": 0,
"Description": "Buffer audio frames, and make them available to the end of the filter graph.",
"Format": "gray",
"Height": 0,
"MediaType": "audio",
"MediaTypeId": 1,
"Name2": "out_0_1",
"Name3": "abuffersink",
"SampleRate": 0,
"Width": 0
}
]
}
]
},
}
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment