Skip to content

Instantly share code, notes, and snippets.

@kyv
Forked from MathieuDuponchelle/mixit.py
Last active August 29, 2015 14:06
Show Gist options
  • Star 0 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save kyv/000596318ef7af07b9a3 to your computer and use it in GitHub Desktop.
Save kyv/000596318ef7af07b9a3 to your computer and use it in GitHub Desktop.
from gi.repository import GstPbutils
from gi.repository import Gtk
from gi.repository import Gst
from gi.repository import GES
from gi.repository import GObject
import sys
import signal
def handle_sigint(sig, frame):
Gtk.main_quit()
def busMessageCb(unused_bus, message):
if message.type == Gst.MessageType.EOS:
print "eos"
Gtk.main_quit()
def duration_querier(pipeline):
print pipeline.query_position(Gst.Format.TIME)
return True
def mylog(x):
return (x / (1 + x))
def createLayers(timeline, asset):
step = 1.0 / int(sys.argv[2])
alpha = step
for i in range(int(sys.argv[2])):
layer = timeline.append_layer()
clip = layer.add_asset(asset, i * Gst.SECOND * 0.3, 0, asset.get_duration(), GES.TrackType.UNKNOWN)
for source in clip.get_children(False):
if source.props.track_type == GES.TrackType.VIDEO:
break
source.set_child_property("alpha", alpha)
alpha += step
if __name__ =="__main__":
if len(sys.argv) < 4:
print "usage : " + sys.argv[0] + " file:///video/uri number_of_layers file:///audio/uri [file:///output_uri]"
print "If you specify a output uri, the pipeline will get rendered"
exit(0)
GObject.threads_init()
Gst.init(None)
GES.init()
timeline = GES.Timeline.new_audio_video()
asset = GES.UriClipAsset.request_sync(sys.argv[1])
audio_asset = GES.UriClipAsset.request_sync(sys.argv[3])
createLayers(timeline, asset)
timeline.commit()
layer = timeline.append_layer()
layer.add_asset(audio_asset, 0, 0, timeline.get_duration(), GES.TrackType.AUDIO)
pipeline = GES.Pipeline()
pipeline.set_timeline(timeline)
container_profile = \
GstPbutils.EncodingContainerProfile.new("pitivi-profile",
"Pitivi encoding profile",
Gst.Caps("video/webm"),
None)
video_profile = GstPbutils.EncodingVideoProfile.new(Gst.Caps("video/x-vp8"),
None,
Gst.Caps("video/x-raw"),
0)
container_profile.add_profile(video_profile)
audio_profile = GstPbutils.EncodingAudioProfile.new(Gst.Caps("audio/x-vorbis"),
None,
Gst.Caps("audio/x-raw"),
0)
container_profile.add_profile(audio_profile)
if len(sys.argv) > 4:
pipeline.set_render_settings(sys.argv[4], container_profile)
pipeline.set_mode(GES.PipelineFlags.RENDER)
pipeline.set_state(Gst.State.PLAYING)
bus = pipeline.get_bus()
bus.add_signal_watch()
bus.connect("message", busMessageCb)
GObject.timeout_add(300, duration_querier, pipeline)
signal.signal(signal.SIGINT, handle_sigint)
Gtk.main()
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment