1 /* Copyright 2009-2010 Yorba Foundation
3 * This software is licensed under the GNU Lesser General Public License
4 * (version 2.1 or later). See the COPYING file in this distribution.
9 public enum PlayState {
12 PRE_RECORD_NULL, PRE_RECORD, RECORDING, POST_RECORD,
13 PRE_EXPORT, EXPORTING, CANCEL_EXPORT,
18 public const int CHANNELS_PER_TRACK_PLAYBACK = 2;
19 public const int CHANNELS_PER_TRACK_RECORD = 1;
23 class MediaClip : Object {
24 public Gst.Element file_source;
28 public signal void clip_removed(MediaClip clip);
30 public MediaClip(Gst.Bin composition, Model.Clip clip) throws Error {
32 this.composition = composition;
33 file_source = make_element("gnlsource");
34 if (!clip.is_recording) {
35 clip.duration_changed.connect(on_duration_changed);
36 clip.media_start_changed.connect(on_media_start_changed);
37 clip.start_changed.connect(on_start_changed);
39 composition.add(file_source);
41 on_start_changed(clip.start);
42 on_media_start_changed(clip.media_start);
43 on_duration_changed(clip.duration);
45 clip.removed.connect(on_clip_removed);
49 clip.removed.disconnect(on_clip_removed);
50 if (!clip.is_recording) {
51 clip.duration_changed.disconnect(on_duration_changed);
52 clip.media_start_changed.disconnect(on_media_start_changed);
53 clip.start_changed.disconnect(on_start_changed);
55 file_source.set_state(Gst.State.NULL);
58 public void on_clip_removed() {
59 emit(this, Facility.SIGNAL_HANDLERS, Level.INFO, "on_clip_removed");
60 composition.remove((Gst.Bin)file_source);
64 void on_media_start_changed(int64 media_start) {
65 emit(this, Facility.SIGNAL_HANDLERS, Level.INFO, "on_media_start_changed");
66 file_source.set("media-start", media_start);
69 void on_duration_changed(int64 duration) {
70 emit(this, Facility.SIGNAL_HANDLERS, Level.INFO, "on_duration_changed");
71 file_source.set("duration", duration);
72 // TODO: is media-duration necessary?
73 file_source.set("media-duration", duration);
76 void on_start_changed(int64 start) {
77 emit(this, Facility.SIGNAL_HANDLERS, Level.INFO, "on_start_changed");
78 file_source.set("start", start);
81 protected void add_single_decode_bin(string filename, string caps) throws Error {
82 Gst.Element sbin = new SingleDecodeBin(Gst.Caps.from_string(caps),
83 "singledecoder", filename);
84 if (((Gst.Bin) file_source).add(sbin)) {
85 if (!file_source.sync_state_with_parent()) {
86 clip.clipfile.set_online(false);
91 public bool is_equal(Model.Clip clip) {
92 return clip == this.clip;
96 class MediaAudioClip : MediaClip {
97 public MediaAudioClip(Gst.Bin composition, Model.Clip clip, string filename) throws Error {
98 base(composition, clip);
99 if (!clip.is_recording) {
100 add_single_decode_bin(filename, "audio/x-raw-float;audio/x-raw-int");
105 class MediaVideoClip : MediaClip {
106 public MediaVideoClip(Gst.Bin composition, Model.Clip clip, string filename) throws Error {
107 base(composition, clip);
108 add_single_decode_bin(filename, "video/x-raw-yuv; video/x-raw-rgb");
112 public abstract class MediaTrack : Object {
113 Gee.ArrayList<MediaClip> clips;
114 protected weak MediaEngine media_engine;
115 protected Gst.Bin composition;
117 protected Gst.Element default_source;
118 protected Gst.Element sink;
120 public signal void track_removed(MediaTrack track);
121 public signal void error_occurred(string major_message, string? minor_message);
123 public MediaTrack(MediaEngine media_engine, Model.Track track) throws Error {
124 clips = new Gee.ArrayList<MediaClip>();
125 this.media_engine = media_engine;
126 track.clip_added.connect(on_clip_added);
127 track.track_removed.connect(on_track_removed);
129 media_engine.pre_export.connect(on_pre_export);
130 media_engine.post_export.connect(on_post_export);
132 composition = (Gst.Bin) make_element("gnlcomposition");
134 default_source = make_element_with_name("gnlsource", "track_default_source");
135 Gst.Bin default_source_bin = (Gst.Bin) default_source;
136 if (!default_source_bin.add(empty_element()))
137 error("can't add empty element");
139 // If we set the priority to 0xffffffff, then Gnonlin will treat this source as
140 // a default and we won't be able to seek past the end of the last region.
141 // We want to be able to seek into empty space, so we use a fixed priority instead.
142 default_source.set("priority", 1);
143 default_source.set("start", 0 * Gst.SECOND);
144 default_source.set("duration", 1000000 * Gst.SECOND);
145 default_source.set("media-start", 0 * Gst.SECOND);
146 default_source.set("media-duration", 1000000 * Gst.SECOND);
148 if (!composition.add(default_source)) {
149 error("can't add default source");
152 media_engine.pipeline.add(composition);
153 composition.pad_added.connect(on_pad_added);
154 composition.pad_removed.connect(on_pad_removed);
158 if (composition != null && !media_engine.pipeline.remove(composition)) {
159 error("couldn't remove composition");
163 protected abstract Gst.Element empty_element() throws Error;
164 public abstract Gst.Element? get_element();
166 public abstract void link_new_pad(Gst.Pad pad, Gst.Element track_element);
167 public abstract void unlink_pad(Gst.Pad pad, Gst.Element track_element);
169 void on_clip_added(Model.Clip clip) {
170 emit(this, Facility.SIGNAL_HANDLERS, Level.INFO, "on_clip_added");
171 clip.updated.connect(on_clip_updated);
172 on_clip_updated(clip);
175 void on_clip_updated(Model.Clip clip) {
176 emit(this, Facility.SIGNAL_HANDLERS, Level.INFO, "on_clip_updated");
177 if (clip.clipfile.is_online()) {
179 MediaClip media_clip;
180 if (clip.type == Model.MediaType.AUDIO) {
181 media_clip = new MediaAudioClip(composition, clip, clip.clipfile.filename);
183 media_clip = new MediaVideoClip(composition, clip, clip.clipfile.filename);
185 media_clip.clip_removed.connect(on_media_clip_removed);
187 clips.add(media_clip);
189 error_occurred("Could not create clip", e.message);
192 foreach (MediaClip media_clip in clips) {
193 if (media_clip.is_equal(clip)) {
194 media_clip.on_clip_removed();
200 void on_media_clip_removed(MediaClip clip) {
201 emit(this, Facility.SIGNAL_HANDLERS, Level.INFO, "on_media_clip_removed");
202 clip.clip_removed.disconnect(on_media_clip_removed);
206 void on_pad_added(Gst.Pad pad) {
207 emit(this, Facility.SIGNAL_HANDLERS, Level.INFO, "on_pad_added");
208 link_new_pad(pad, get_element());
211 void on_pad_removed(Gst.Pad pad) {
212 emit(this, Facility.SIGNAL_HANDLERS, Level.INFO, "on_pad_removed");
213 unlink_pad(pad, get_element());
216 void on_track_removed(Model.Track track) {
217 emit(this, Facility.SIGNAL_HANDLERS, Level.INFO, "on_track_removed");
221 void on_pre_export(int64 length) {
222 emit(this, Facility.SIGNAL_HANDLERS, Level.INFO, "on_pre_export");
223 default_source.set("duration", length);
224 default_source.set("media-duration", length);
227 void on_post_export(bool deleted) {
228 emit(this, Facility.SIGNAL_HANDLERS, Level.INFO, "on_post_export");
229 default_source.set("duration", 1000000 * Gst.SECOND);
230 default_source.set("media-duration", 1000000 * Gst.SECOND);
234 public class MediaVideoTrack : MediaTrack {
235 weak Gst.Element converter;
237 public MediaVideoTrack(MediaEngine media_engine, Model.Track track,
238 Gst.Element converter) throws Error {
239 base(media_engine, track);
240 this.converter = converter;
243 public override Gst.Element? get_element() {
244 //converter shouldn't be null. since fillmore is currently not supporting
245 //video, but this is a shared track, we can't guarantee at compile time that
246 //convert is valid. This is why we have "Gst.Element?" rather than "Gst.Element"
247 assert(converter != null);
248 assert(converter.sync_state_with_parent());
252 protected override Gst.Element empty_element() throws Error {
253 Gst.Element blackness = make_element("videotestsrc");
254 blackness.set("pattern", 2); // 2 == GST_VIDEO_TEST_SRC_BLACK
258 public override void link_new_pad(Gst.Pad pad, Gst.Element track_element) {
259 if (pad.link(track_element.get_static_pad("sink")) != Gst.PadLinkReturn.OK) {
260 error("couldn't link pad to converter");
264 public override void unlink_pad(Gst.Pad pad, Gst.Element track_element) {
265 pad.unlink(track_element.get_static_pad("sink"));
269 public class ClickTrack : Object {
270 Gst.Controller click_controller;
271 Gst.Controller volume_controller;
272 Gst.Element audio_source;
273 Gst.Element audio_convert;
275 weak Model.Project project;
277 public ClickTrack(MediaEngine engine, Model.Project project) throws Error {
278 this.project = project;
279 audio_source = make_element("audiotestsrc");
280 audio_convert = make_element("audioconvert");
281 volume = make_element("volume");
282 GLib.List<string> list = new GLib.List<string>();
284 click_controller = new Gst.Controller.list(audio_source, list);
285 list.remove_all("freq");
287 volume_controller = new Gst.Controller.list(volume, list);
288 engine.pipeline.add_many(audio_source, volume, audio_convert);
289 audio_source.set("volume", project.click_volume);
291 audio_source.link_many(audio_convert, volume, engine.adder);
292 engine.playstate_changed.connect(on_playstate_changed);
295 void clear_controllers() {
296 volume_controller.unset_all("mute");
297 click_controller.unset_all("freq");
298 volume.set("mute", true);
299 volume.set("volume", 0.0);
302 void on_playstate_changed() {
303 switch (project.media_engine.get_play_state()) {
304 case PlayState.PRE_EXPORT:
305 case PlayState.STOPPED:
308 case PlayState.PLAYING: {
309 if (project.click_during_play) {
310 setup_clicks(project.get_bpm(), project.get_time_signature());
316 case PlayState.PRE_RECORD: {
317 if (project.click_during_record) {
318 setup_clicks(project.get_bpm(), project.get_time_signature());
320 // TODO: Hack for bug #1436 Should be waiting for async message
321 // instead, spend some extra time allowing the seek to finish
322 setup_clicks(project.get_bpm(), project.get_time_signature());
330 void setup_clicks(int bpm, Fraction time_signature) {
332 volume.set("volume", project.click_volume / 10);
334 Gst.Value double_value = Gst.Value();
335 double_value.init(Type.from_name("gdouble"));
336 Gst.Value bool_value = Gst.Value();
337 bool_value.init(Type.from_name("gboolean"));
339 Gst.ClockTime time = (Gst.ClockTime)(0);
340 bool_value.set_boolean(true);
341 volume_controller.set("volume", time, bool_value);
343 int64 conversion = (Gst.SECOND * 60) / bpm;
344 uint64 current_time = 0;
345 // TODO: We are playing for a hard-coded amount of time.
346 for (int i = 0; current_time < Gst.SECOND * 60 * 10; ++i) {
347 current_time = i * conversion;
349 time = (Gst.ClockTime)(current_time - Gst.SECOND/10);
350 bool_value.set_boolean(true);
351 volume_controller.set("mute", time, bool_value);
353 time = (Gst.ClockTime)(current_time);
354 if ((i % time_signature.numerator) == 0) {
355 double_value.set_double(880.0);
357 double_value.set_double(440.0);
359 click_controller.set("freq", time, double_value);
360 bool_value.set_boolean(false);
361 volume_controller.set("mute", time, bool_value);
363 time = (Gst.ClockTime)(current_time + Gst.SECOND/10);
364 bool_value.set_boolean(true);
365 volume_controller.set("mute", time, bool_value);
370 public class MediaAudioTrack : MediaTrack {
371 Gst.Element audio_convert;
372 Gst.Element audio_resample;
378 public MediaAudioTrack(MediaEngine media_engine, Model.AudioTrack track) throws Error {
379 base(media_engine, track);
380 track.parameter_changed.connect(on_parameter_changed);
382 audio_convert = make_element("audioconvert");
383 audio_resample = make_element("audioresample");
384 level = make_element("level");
386 pan = make_element("audiopanorama");
387 on_parameter_changed(Model.Parameter.PAN, track.get_pan());
388 volume = make_element("volume");
389 on_parameter_changed(Model.Parameter.VOLUME, track.get_volume());
391 Value the_level = (uint64) (Gst.SECOND / 30);
392 level.set_property("interval", the_level);
393 Value true_value = true;
394 level.set_property("message", true_value);
396 if (!media_engine.pipeline.add(audio_convert)) {
397 error("could not add audio_convert");
400 if (!media_engine.pipeline.add(audio_resample)) {
401 error("could not add audio_resample");
404 if (!media_engine.pipeline.add(level)) {
405 error("could not add level");
408 if (!media_engine.pipeline.add(pan)) {
409 error("could not add pan");
412 if (!media_engine.pipeline.add(volume)) {
413 error("could not add volume");
415 media_engine.level_changed.connect(on_level_changed);
416 level_changed.connect(track.on_level_changed);
420 media_engine.level_changed.disconnect(on_level_changed);
421 media_engine.pipeline.remove_many(audio_convert, audio_resample, pan, volume, level);
424 public signal void level_changed(double level_left, double level_right);
426 void on_parameter_changed(Model.Parameter parameter, double new_value) {
427 emit(this, Facility.SIGNAL_HANDLERS, Level.INFO, "on_parameter_changed");
429 case Model.Parameter.PAN:
430 pan.set_property("panorama", new_value);
432 case Model.Parameter.VOLUME:
433 volume.set_property("volume", new_value);
438 void on_level_changed(Gst.Object source, double level_left, double level_right) {
439 emit(this, Facility.SIGNAL_HANDLERS, Level.INFO, "on_level_changed");
440 if (source == level) {
441 level_changed(level_left, level_right);
445 protected override Gst.Element empty_element() throws Error {
446 return media_engine.get_audio_silence();
449 override void link_new_pad(Gst.Pad pad, Gst.Element track_element) {
450 Gst.Bin bin = (Gst.Bin) pad.get_parent_element();
451 if (!bin.link_many(audio_convert, audio_resample, level, pan, volume)) {
452 stderr.printf("could not link_new_pad for audio track");
455 Gst.Pad volume_pad = volume.get_pad("src");
456 adder_pad = track_element.request_new_pad(
457 track_element.get_compatible_pad_template(volume_pad.get_pad_template()), null);
459 if (volume_pad.link(adder_pad) != Gst.PadLinkReturn.OK) {
460 error("could not link to adder %s->%s\n", volume.name, track_element.name);
464 public override void unlink_pad(Gst.Pad pad, Gst.Element track_element) {
465 Gst.Bin bin = (Gst.Bin) pad.get_parent_element();
466 bin.unlink_many(audio_convert, audio_resample, level, pan, volume, track_element);
467 track_element.release_request_pad(adder_pad);
470 public override Gst.Element? get_element() {
471 return media_engine.adder;
475 public abstract class MediaConnector : Object {
476 public enum MediaTypes { Audio = 1, Video = 2 }
477 MediaTypes media_types;
479 // AudioIndex and VideoIndex are the order elements are passed in to connect and disconnect
480 protected int AudioIndex = 0;
481 protected int VideoIndex = 1;
483 protected MediaConnector(MediaTypes media_types) {
484 this.media_types = media_types;
487 protected bool has_audio() {
488 return (media_types & MediaTypes.Audio) == MediaTypes.Audio;
491 protected bool has_video() {
492 return (media_types & MediaTypes.Video) == MediaTypes.Video;
495 public new abstract void connect(MediaEngine media_engine, Gst.Pipeline pipeline,
496 Gst.Element[] elements);
497 public abstract void do_disconnect(MediaEngine media_engine, Gst.Pipeline pipeline,
498 Gst.Element[] elements);
501 public class VideoOutput : MediaConnector {
503 Gtk.Widget output_widget;
505 public VideoOutput(Gtk.Widget output_widget) throws Error {
506 base(MediaTypes.Video);
507 sink = make_element("xvimagesink");
508 sink.set("force-aspect-ratio", true);
509 this.output_widget = output_widget;
512 public override void connect(MediaEngine media_engine, Gst.Pipeline pipeline,
513 Gst.Element[] elements) {
514 emit(this, Facility.GRAPH, Level.INFO, "connecting");
516 X.ID xid = Gdk.x11_drawable_get_xid(output_widget.window);
517 Gst.XOverlay overlay = (Gst.XOverlay) sink;
518 overlay.set_xwindow_id(xid);
520 // Once we've connected our video sink to a widget, it's best to turn off GTK
521 // double buffering for the widget; otherwise the video image flickers as it's resized.
522 output_widget.unset_flags(Gtk.WidgetFlags.DOUBLE_BUFFERED);
524 if (!pipeline.add(sink)) {
525 error("could not add sink");
527 if (!elements[VideoIndex].link(sink)) {
528 error("can't link converter with video sink!");
532 public override void do_disconnect(MediaEngine media_engine, Gst.Pipeline pipeline,
533 Gst.Element[] elements) {
534 emit(this, Facility.GRAPH, Level.INFO, "disconnecting");
535 elements[VideoIndex].unlink(sink);
536 pipeline.remove(sink);
540 public class AudioOutput : MediaConnector {
541 Gst.Element audio_sink;
542 Gst.Element capsfilter;
544 public AudioOutput(Gst.Caps caps) throws Error {
545 base(MediaTypes.Audio);
546 audio_sink = make_element("autoaudiosink");
547 capsfilter = make_element("capsfilter");
548 capsfilter.set("caps", caps);
551 public override void connect(MediaEngine media_engine, Gst.Pipeline pipeline,
552 Gst.Element[] elements) {
553 pipeline.add_many(capsfilter, audio_sink);
555 if (!elements[AudioIndex].link_many(capsfilter, audio_sink)) {
556 warning("could not link audio_sink");
560 public override void do_disconnect(MediaEngine media_engine, Gst.Pipeline pipeline,
561 Gst.Element[] elements) {
562 elements[AudioIndex].unlink_many(capsfilter, audio_sink);
563 pipeline.remove_many(capsfilter, audio_sink);
567 public class OggVorbisExport : MediaConnector {
568 Gst.Element capsfilter;
569 Gst.Element export_sink;
571 Gst.Element file_sink;
572 Gst.Element video_export_sink;
574 public OggVorbisExport(MediaConnector.MediaTypes media_types, string filename, Gst.Caps caps)
578 file_sink = make_element("filesink");
579 file_sink.set("location", filename);
580 mux = make_element("oggmux");
583 capsfilter = make_element("capsfilter");
584 capsfilter.set("caps", caps);
585 export_sink = make_element("vorbisenc");
589 video_export_sink = make_element("theoraenc");
593 public string get_filename() {
595 file_sink.get("location", out filename);
599 public override void connect(MediaEngine media_engine, Gst.Pipeline pipeline,
600 Gst.Element[] elements) {
601 pipeline.add_many(mux, file_sink);
605 pipeline.add_many(capsfilter, export_sink);
606 elements[AudioIndex].link_many(capsfilter, export_sink, mux);
610 pipeline.add(video_export_sink);
612 if (!elements[VideoIndex].link(video_export_sink)) {
613 error("could not link converter to video_export_sink");
616 if (!video_export_sink.link(mux)) {
617 error("could not link video_export with mux");
622 public override void do_disconnect(MediaEngine media_engine, Gst.Pipeline pipeline,
623 Gst.Element[] elements) {
625 elements[AudioIndex].unlink_many(capsfilter, export_sink, mux);
626 pipeline.remove_many(capsfilter, export_sink);
630 elements[VideoIndex].unlink_many(video_export_sink, mux);
631 pipeline.remove(video_export_sink);
634 mux.unlink(file_sink);
635 pipeline.remove_many(mux, file_sink);
639 public class MediaEngine : MultiFileProgressInterface, Object {
640 const string MIN_GNONLIN = "0.10.11";
641 const string MIN_GST_PLUGINS_GOOD = "0.10.15";
642 const string MIN_GST_PLUGINS_BASE = "0.10.15";
643 public Gst.Pipeline pipeline;
644 public Gst.Bin record_bin;
646 public Gst.Element converter;
649 public Gst.Element adder;
651 protected Gst.State gst_state;
652 protected PlayState play_state = PlayState.STOPPED;
653 public int64 position; // current play position in ns
657 public Model.AudioTrack record_track;
658 public Model.Clip record_region;
659 Gst.Element audio_in;
660 Gst.Element record_capsfilter;
661 Gst.Element wav_encoder;
662 Gst.Element record_sink;
664 weak Model.Project project;
666 public signal void playstate_changed();
667 public signal void position_changed(int64 position);
668 public signal void pre_export(int64 length);
669 public signal void post_export(bool canceled);
670 public signal void callback_pulse();
671 public signal void level_changed(Gst.Object source, double level_left, double level_right);
672 public signal void record_completed();
673 public signal void link_for_playback(Gst.Element mux);
674 public signal void link_for_export(Gst.Element mux);
675 public signal void prepare_window();
676 public signal void error_occurred(string major_message, string? minor_message);
678 Gee.ArrayList<MediaTrack> tracks;
680 public MediaEngine(Model.Project project, bool include_video) throws Error {
681 tracks = new Gee.ArrayList<MediaTrack>();
682 this.project = project;
683 playstate_changed.connect(project.on_playstate_changed);
684 pipeline = new Gst.Pipeline("pipeline");
685 pipeline.set_auto_flush_bus(false);
688 converter = make_element("ffmpegcolorspace");
689 pipeline.add(converter);
692 Gst.Element silence = get_audio_silence();
694 adder = make_element("adder");
696 Gst.Element audio_convert = make_element_with_name("audioconvert", "projectconvert");
697 pipeline.add_many(silence, audio_convert, adder);
699 if (!silence.link_many(audio_convert, adder)) {
700 error("silence: couldn't link");
703 Gst.Bus bus = pipeline.get_bus();
705 bus.add_signal_watch();
706 bus.message["error"] += on_error;
707 bus.message["warning"] += on_warning;
708 bus.message["eos"] += on_eos;
709 bus.message["state-changed"] += on_state_change;
710 bus.message["element"] += on_element;
713 public static void can_run() throws Error {
714 Gst.Registry registry = Gst.Registry.get_default();
715 check_version(registry, "adder", "gst-plugins-base", MIN_GST_PLUGINS_BASE);
716 check_version(registry, "level", "gst-plugins-good", MIN_GST_PLUGINS_GOOD);
717 check_version(registry, "gnonlin", "gnonlin", View.MediaEngine.MIN_GNONLIN);
720 static void check_version(Gst.Registry registry, string plugin_name,
721 string package_name, string min_version) throws Error {
722 Gst.Plugin plugin = registry.find_plugin(plugin_name);
723 if (plugin == null) {
724 throw new MediaError.MISSING_PLUGIN(
725 "You must install %s to use this program".printf(package_name));
728 string version = plugin.get_version();
729 if (!version_at_least(version, min_version)) {
730 throw new MediaError.MISSING_PLUGIN(
731 "You have %s version %s, but this program requires at least version %s".printf(
732 package_name, version, min_version));
736 public void connect_output(MediaConnector connector) {
737 connector.connect(this, pipeline, { adder, converter });
740 public void disconnect_output(MediaConnector connector) {
741 pipeline.seek_simple(Gst.Format.TIME, Gst.SeekFlags.FLUSH, 0);
742 pipeline.set_state(Gst.State.NULL);
743 connector.do_disconnect(this, pipeline, {adder, converter});
746 public Gst.Element get_audio_silence() throws Error {
747 Gst.Element silence = make_element("audiotestsrc");
748 silence.set("wave", 4); // 4 is silence
749 Gst.Caps audio_cap = get_project_audio_caps();
750 foreach (Gst.Pad pad in silence.pads) {
751 pad.set_caps(audio_cap);
756 public int get_sample_rate() {
760 public int get_sample_width() {
764 public int get_sample_depth() {
768 public PlayState get_play_state() {
772 public void set_play_state(PlayState play_state) {
773 this.play_state = play_state;
776 protected Gst.Caps build_audio_caps(int num_channels) {
777 string caps = "audio/x-raw-int,rate=%d,channels=%d,width=%d,depth=%d";
778 caps = caps.printf(get_sample_rate(), num_channels, get_sample_width(), get_sample_depth());
779 return Gst.Caps.from_string(caps);
782 public Gst.Caps get_project_audio_caps() {
783 return build_audio_caps(CHANNELS_PER_TRACK_PLAYBACK);
786 public Gst.Caps get_project_audio_export_caps() {
787 return Gst.Caps.from_string(
788 "audio/x-raw-float,rate=48000,channels=2,width=32");
791 void on_warning(Gst.Bus bus, Gst.Message message) {
792 emit(this, Facility.SIGNAL_HANDLERS, Level.INFO, "on_warning");
795 message.parse_warning(out error, out text);
799 void on_error(Gst.Bus bus, Gst.Message message) {
800 emit(this, Facility.SIGNAL_HANDLERS, Level.INFO, "on_error");
803 message.parse_error(out error, out text);
805 project.print_graph(pipeline, "bus_error");
808 void on_eos(Gst.Bus bus, Gst.Message message) {
809 emit(this, Facility.SIGNAL_HANDLERS, Level.INFO, "on_eos");
810 if (play_state == PlayState.EXPORTING)
811 pipeline.set_state(Gst.State.NULL);
814 void on_element(Gst.Bus bus, Gst.Message message) {
815 emit(this, Facility.SIGNAL_HANDLERS, Level.INFO, "on_element");
816 unowned Gst.Structure structure = message.get_structure();
818 if (play_state == PlayState.PLAYING && structure.get_name().to_string() == "level") {
819 Gst.Value? rms = structure.get_value("rms");
820 uint size = rms.list_get_size();
821 Gst.Value? temp = rms.list_get_value(0);
822 double level_left = temp.get_double();
823 double level_right = level_left;
826 temp = rms.list_get_value(1);
827 level_right = temp.get_double();
829 level_changed(message.src, level_left, level_right);
833 void on_state_change(Gst.Bus bus, Gst.Message message) {
834 if (message.src != pipeline) {
835 emit(this, Facility.GRAPH, Level.VERBOSE,
836 "on_state_change returning. message from %s".printf(message.src.get_name()));
844 message.parse_state_changed(out old_state, out new_state, out pending);
846 emit(this, Facility.GRAPH, Level.INFO,
847 "on_state_change old(%s) new(%s) pending(%s)".printf(old_state.to_string(),
848 new_state.to_string(), pending.to_string()));
849 if (new_state == gst_state)
852 gst_state = new_state;
856 protected bool do_state_change() {
858 switch (play_state) {
859 case PlayState.STOPPED:
860 if (gst_state != Gst.State.PAUSED) {
861 pipeline.set_state(Gst.State.PAUSED);
866 case PlayState.PRE_EXPORT:
867 if (gst_state != Gst.State.PAUSED) {
870 do_paused_state_export();
872 case PlayState.EXPORTING:
873 if (gst_state != Gst.State.NULL)
877 case PlayState.CANCEL_EXPORT:
878 if (gst_state != Gst.State.NULL)
882 case PlayState.CLOSING:
885 case PlayState.PRE_RECORD_NULL:
886 if (gst_state == Gst.State.NULL) {
888 start_record(record_region);
889 } catch (GLib.Error error) {
890 error_occurred("An error occurred starting the recording.", null);
891 warning("An error occurred starting the recording: %s", error.message);
896 case PlayState.PRE_RECORD:
897 if (gst_state == Gst.State.PAUSED) {
898 do_play(PlayState.RECORDING);
902 case PlayState.POST_RECORD:
903 if (gst_state != Gst.State.NULL) {
904 set_gst_state(Gst.State.NULL);
907 set_gst_state(Gst.State.PAUSED);
908 play_state = PlayState.STOPPED;
915 protected virtual void do_null_state_export(int64 length) {
917 play_state = PlayState.PRE_EXPORT;
918 pipeline.set_state(Gst.State.PAUSED);
921 void do_paused_state_export() {
922 play_state = PlayState.EXPORTING;
924 if (callback_id == 0)
925 callback_id = Timeout.add(50, on_callback);
926 pipeline.set_state(Gst.State.PLAYING);
929 void end_export(bool deleted) {
930 play_state = PlayState.STOPPED;
933 post_export(deleted);
936 public void go(int64 pos) {
937 if (position == pos) {
938 pipeline.seek_simple(Gst.Format.TIME, Gst.SeekFlags.FLUSH, position);
946 // We ignore the return value of seek_simple(); sometimes it returns false even when
948 pipeline.seek_simple(Gst.Format.TIME, Gst.SeekFlags.FLUSH, position);
949 position_changed(position);
953 if ((play_state == PlayState.STOPPED && !playing) ||
954 (play_state == PlayState.POST_RECORD)) {
959 Gst.Format format = Gst.Format.TIME;
961 if (pipeline.query_position(ref format, out time) && format == Gst.Format.TIME) {
965 if (play_state == PlayState.PLAYING) {
966 if (position >= project.get_length()) {
967 go(project.get_length());
970 position_changed(time);
971 } else if (play_state == PlayState.EXPORTING) {
972 if (time > project.get_length()) {
973 fraction_updated(1.0);
976 fraction_updated(time / (double) project.get_length());
977 } else if (play_state == PlayState.RECORDING) {
978 position_changed(time);
984 public virtual void pause() {
985 if (project.transport_is_recording()) {
986 record_bin.send_event(new Gst.Event.eos());
987 play_state = PlayState.POST_RECORD;
992 play_state = PlayState.STOPPED;
994 set_gst_state(Gst.State.PAUSED);
998 // TODO: don't expose Gst.State
999 public void set_gst_state(Gst.State state) {
1000 if (pipeline.set_state(state) == Gst.StateChangeReturn.FAILURE)
1001 error("can't set state");
1004 void seek(Gst.SeekFlags flags, int64 pos) {
1005 // We do *not* check the return value of seek_simple here: it will often
1006 // be false when seeking into a GnlSource which we have not yet played,
1007 // even though the seek appears to work fine in that case.
1008 pipeline.seek_simple(Gst.Format.TIME, flags, pos);
1011 public void do_play(PlayState new_state) {
1012 seek(Gst.SeekFlags.FLUSH, position);
1013 play_state = new_state;
1021 set_gst_state(Gst.State.PLAYING);
1022 if (callback_id == 0)
1023 callback_id = Timeout.add(50, on_callback);
1027 public void start_export(string filename) {
1028 file_updated(filename, 0);
1029 do_null_state_export(project.get_length());
1033 play_state = PlayState.CANCEL_EXPORT;
1034 pipeline.set_state(Gst.State.NULL);
1037 public void complete() {
1038 pipeline.set_state(Gst.State.NULL);
1041 public void on_load_complete() {
1042 emit(this, Facility.SIGNAL_HANDLERS, Level.INFO, "on_load_complete");
1043 play_state = PlayState.STOPPED;
1044 pipeline.set_state(Gst.State.PAUSED);
1047 public void on_callback_pulse() {
1048 emit(this, Facility.SIGNAL_HANDLERS, Level.INFO, "on_callback_pulse");
1049 if (record_region != null) {
1050 record_region.duration = position - record_region.start;
1054 public void close() {
1055 if (gst_state != Gst.State.NULL) {
1056 play_state = PlayState.CLOSING;
1057 set_gst_state(Gst.State.NULL);
1059 play_state = PlayState.CLOSED;
1061 playstate_changed();
1064 public void post_record() {
1065 assert(gst_state == Gst.State.NULL);
1067 record_track._delete_clip(record_region);
1069 audio_in.unlink_many(record_capsfilter, wav_encoder, record_sink);
1070 record_bin.remove_many(audio_in, record_capsfilter, wav_encoder, record_sink);
1071 pipeline.remove(record_bin);
1074 record_region = null;
1075 record_track = null;
1076 audio_in = record_capsfilter = null;
1077 wav_encoder = record_sink = null;
1078 set_gst_state(Gst.State.PAUSED);
1081 public void record(Model.AudioTrack track) {
1082 assert(gst_state != Gst.State.NULL);
1083 play_state = PlayState.PRE_RECORD_NULL;
1084 set_gst_state(Gst.State.NULL);
1085 record_track = track;
1087 string filename = new_audio_filename(track);
1088 Model.ClipFile clip_file = new Model.ClipFile(filename);
1089 record_region = new Model.Clip(clip_file, Model.MediaType.AUDIO, "", position, 0, 1, true);
1092 public void start_record(Model.Clip region) throws Error {
1093 if (project.transport_is_recording())
1096 if (project.transport_is_playing())
1097 error("can't switch from playing to recording");
1099 if (gst_state != Gst.State.NULL)
1100 error("can't record now: %s", gst_state.to_string());
1101 record_bin = new Gst.Bin("recordingbin");
1102 record_track._move(record_region, position);
1103 record_track.clip_added(record_region, true);
1104 audio_in = make_element("pulsesrc");
1105 audio_in.set_property("device", "source.hw0"); /// TODO
1106 record_capsfilter = make_element("capsfilter");
1107 record_capsfilter.set("caps", get_record_audio_caps());
1108 record_sink = make_element("filesink");
1109 record_sink.set("location", record_region.clipfile.filename);
1110 wav_encoder = make_element("wavenc");
1112 record_bin.add_many(audio_in, record_capsfilter, wav_encoder, record_sink);
1113 if (!audio_in.link_many(record_capsfilter, wav_encoder, record_sink))
1114 error("audio_in: couldn't link");
1115 pipeline.add(record_bin);
1117 play_state = PlayState.PRE_RECORD;
1118 set_gst_state(Gst.State.PAUSED); // we must advance to PAUSED before we can seek
1121 protected Gst.Caps get_record_audio_caps() {
1122 return build_audio_caps(CHANNELS_PER_TRACK_RECORD);
1125 string new_audio_filename(Model.Track track) {
1127 string base_path = project.get_audio_path();
1128 GLib.DirUtils.create(base_path, 0777);
1129 string base_name = Path.build_filename(base_path, generate_base(track.display_name));
1131 string name = "%s_%d.wav".printf(base_name, i);
1132 if (!FileUtils.test(name, FileTest.EXISTS)) {
1139 string generate_base(string name) {
1140 string base_name = name.down();
1141 base_name.canon("abcdefghijklmnopqrstuvwxyz1234567890", '_');
1145 public void on_track_added(Model.Track track) {
1146 emit(this, Facility.SIGNAL_HANDLERS, Level.INFO, "on_track_added");
1147 MediaTrack? media_track = null;
1149 switch (track.media_type()) {
1150 case Model.MediaType.AUDIO:
1151 media_track = create_audio_track(track);
1153 case Model.MediaType.VIDEO:
1154 media_track = new MediaVideoTrack(this, track, converter);
1157 } catch(GLib.Error error) {
1158 error_occurred("An error occurred adding the track.", null);
1159 warning("An error occurred adding the track: %s", error.message);
1163 media_track.track_removed.connect(on_track_removed);
1164 media_track.error_occurred.connect(on_error_occurred);
1166 tracks.add(media_track);
1169 MediaTrack create_audio_track(Model.Track track) throws Error {
1170 Model.AudioTrack? model_track = track as Model.AudioTrack;
1171 MediaAudioTrack? audio_track = null;
1172 if (model_track != null) {
1173 audio_track = new MediaAudioTrack(this, model_track);
1180 void on_track_removed(MediaTrack track) {
1181 emit(this, Facility.SIGNAL_HANDLERS, Level.INFO, "on_track_removed");
1182 tracks.remove(track);
1185 void on_error_occurred(string major_message, string? minor_message) {
1186 emit(this, Facility.SIGNAL_HANDLERS, Level.INFO, "on_error_occurred");
1187 error_occurred(major_message, minor_message);