1 /* Copyright 2009 Yorba Foundation
3 * This software is licensed under the GNU Lesser General Public License
4 * (version 2.1 or later). See the COPYING file in this distribution.
11 public enum MediaType {
20 public Gap(int64 start, int64 end) {
25 public bool is_empty() {
29 public Gap intersect(Gap g) {
30 return new Gap(int64.max(start, g.start), int64.min(end, g.end));
34 public class ClipFile : Object {
35 public string filename;
40 warning("retrieving length while clip offline");
52 public Gst.Caps video_caps; // or null if no video
53 public Gst.Caps audio_caps; // or null if no audio
54 public Gdk.Pixbuf thumbnail = null;
56 public signal void updated();
58 public ClipFile(string filename, int64 length = 0) {
59 this.filename = filename;
64 public bool is_online() {
68 public void set_online(bool o) {
69 emit(this, Facility.SIGNAL_HANDLERS, Level.INFO, "set_online");
74 public void set_thumbnail(Gdk.Pixbuf b) {
75 // TODO: Investigate this
76 // 56x56 - 62x62 icon size does not work for some reason when
77 // we display the thumbnail while dragging the clip.
79 thumbnail = b.scale_simple(64, 44, Gdk.InterpType.BILINEAR);
82 public bool has_caps_structure(MediaType m) {
83 if (m == MediaType.AUDIO) {
84 if (audio_caps == null || audio_caps.get_size() < 1)
86 } else if (m == MediaType.VIDEO) {
87 if (video_caps == null || video_caps.get_size() < 1)
93 public bool is_of_type(MediaType t) {
94 if (t == MediaType.VIDEO)
95 return video_caps != null;
96 return audio_caps != null;
99 bool get_caps_structure(MediaType m, out Gst.Structure s) {
100 if (!has_caps_structure(m))
102 if (m == MediaType.AUDIO) {
103 s = audio_caps.get_structure(0);
104 } else if (m == MediaType.VIDEO) {
105 s = video_caps.get_structure(0);
110 public bool get_frame_rate(out Fraction rate) {
111 Gst.Structure structure;
112 if (!get_caps_structure(MediaType.VIDEO, out structure))
114 return structure.get_fraction("framerate", out rate.numerator, out rate.denominator);
117 public bool get_dimensions(out int w, out int h) {
120 if (!get_caps_structure(MediaType.VIDEO, out s))
123 return s.get_int("width", out w) && s.get_int("height", out h);
126 public bool get_sample_rate(out int rate) {
128 if (!get_caps_structure(MediaType.AUDIO, out s))
131 return s.get_int("rate", out rate);
134 public bool get_video_format(out uint32 fourcc) {
137 if (!get_caps_structure(MediaType.VIDEO, out s))
140 return s.get_fourcc("format", out fourcc);
143 public bool get_num_channels(out int channels) {
145 if (!get_caps_structure(MediaType.AUDIO, out s)) {
149 return s.get_int("channels", out channels);
152 public bool get_num_channels_string(out string s) {
154 if (!get_num_channels(out i))
161 else if ((i % 2) == 0)
162 s = "Surround %d.1".printf(i - 1);
169 public abstract class Fetcher : Object {
170 protected Gst.Element filesrc;
171 protected Gst.Element decodebin;
172 protected Gst.Pipeline pipeline;
174 public ClipFile clipfile;
175 public string error_string;
177 protected abstract void on_pad_added(Gst.Pad pad);
178 protected abstract void on_state_change(Gst.Bus bus, Gst.Message message);
180 public signal void ready(Fetcher fetcher);
182 protected void do_error(string error) {
183 error_string = error;
184 pipeline.set_state(Gst.State.NULL);
187 protected void on_warning(Gst.Bus bus, Gst.Message message) {
188 emit(this, Facility.SIGNAL_HANDLERS, Level.INFO, "on_warning");
191 message.parse_warning(out error, out text);
195 protected void on_error(Gst.Bus bus, Gst.Message message) {
196 emit(this, Facility.SIGNAL_HANDLERS, Level.INFO, "on_error");
199 message.parse_error(out error, out text);
204 public class ClipFetcher : Fetcher {
205 public signal void clipfile_online(bool online);
207 public ClipFetcher(string filename) throws Error {
208 clipfile = new ClipFile(filename);
210 clipfile_online.connect(clipfile.set_online);
212 filesrc = make_element("filesrc");
213 filesrc.set("location", filename);
215 decodebin = (Gst.Bin) make_element("decodebin");
216 pipeline = new Gst.Pipeline("pipeline");
217 pipeline.set_auto_flush_bus(false);
218 if (pipeline == null)
219 error("can't construct pipeline");
220 pipeline.add_many(filesrc, decodebin);
222 if (!filesrc.link(decodebin))
223 error("can't link filesrc");
224 decodebin.pad_added.connect(on_pad_added);
226 Gst.Bus bus = pipeline.get_bus();
228 bus.add_signal_watch();
229 bus.message["state-changed"] += on_state_change;
230 bus.message["error"] += on_error;
231 bus.message["warning"] += on_warning;
234 pipeline.set_state(Gst.State.PLAYING);
237 public string get_filename() { return clipfile.filename; }
239 protected override void on_pad_added(Gst.Pad pad) {
240 emit(this, Facility.SIGNAL_HANDLERS, Level.INFO, "on_pad_added");
242 Gst.Element fake_sink;
244 if (pad.caps.to_string().has_prefix("video")) {
245 fake_sink = make_element("fakesink");
246 pipeline.add(fake_sink);
247 fake_pad = fake_sink.get_static_pad("sink");
249 if (!fake_sink.sync_state_with_parent()) {
250 error("could not sync state with parent");
253 fake_sink = make_element("fakesink");
254 pipeline.add(fake_sink);
255 fake_pad = fake_sink.get_static_pad("sink");
257 if (!fake_sink.sync_state_with_parent()) {
258 error("could not sync state with parent");
267 Gst.Pad? get_pad(string prefix) {
268 foreach(Gst.Pad pad in decodebin.pads) {
269 string caps = pad.caps.to_string();
270 if (caps.has_prefix(prefix)) {
277 protected override void on_state_change(Gst.Bus bus, Gst.Message message) {
278 emit(this, Facility.SIGNAL_HANDLERS, Level.INFO, "on_state_change");
279 if (message.src != pipeline)
286 message.parse_state_changed(out old_state, out new_state, out pending);
287 if (new_state == old_state)
290 if (new_state == Gst.State.PLAYING) {
291 Gst.Pad? pad = get_pad("video");
293 clipfile.video_caps = pad.caps;
296 pad = get_pad("audio");
298 clipfile.audio_caps = pad.caps;
301 Gst.Format format = Gst.Format.TIME;
303 if (!pipeline.query_duration(ref format, out length) ||
304 format != Gst.Format.TIME) {
305 do_error("Can't fetch length");
308 clipfile.length = length;
310 clipfile_online(true);
311 pipeline.set_state(Gst.State.NULL);
312 } else if (new_state == Gst.State.NULL) {
318 public class ThumbnailFetcher : Fetcher {
319 ThumbnailSink thumbnail_sink;
320 Gst.Element colorspace;
325 public ThumbnailFetcher(ClipFile f, int64 time) throws Error {
327 seek_position = time;
329 SingleDecodeBin single_bin = new SingleDecodeBin (
330 Gst.Caps.from_string ("video/x-raw-rgb; video/x-raw-yuv"),
331 "singledecoder", f.filename);
333 pipeline = new Gst.Pipeline("pipeline");
334 pipeline.set_auto_flush_bus(false);
336 thumbnail_sink = new ThumbnailSink();
337 thumbnail_sink.have_thumbnail.connect(on_have_thumbnail);
339 colorspace = make_element("ffmpegcolorspace");
341 pipeline.add_many(single_bin, thumbnail_sink, colorspace);
343 single_bin.pad_added.connect(on_pad_added);
345 colorspace.link(thumbnail_sink);
347 Gst.Bus bus = pipeline.get_bus();
349 bus.add_signal_watch();
350 bus.message["state-changed"] += on_state_change;
351 bus.message["error"] += on_error;
352 bus.message["warning"] += on_warning;
354 have_thumbnail = false;
356 pipeline.set_state(Gst.State.PAUSED);
359 void on_have_thumbnail(Gdk.Pixbuf buf) {
360 emit(this, Facility.SIGNAL_HANDLERS, Level.INFO, "on_have_thumbnail");
362 have_thumbnail = true;
363 clipfile.set_thumbnail(buf);
367 protected override void on_pad_added(Gst.Pad pad) {
368 emit(this, Facility.SIGNAL_HANDLERS, Level.INFO, "on_pad_added");
369 Gst.Caps c = pad.get_caps();
371 if (c.to_string().has_prefix("video")) {
372 pad.link(colorspace.get_static_pad("sink"));
376 protected override void on_state_change(Gst.Bus bus, Gst.Message message) {
377 emit(this, Facility.SIGNAL_HANDLERS, Level.INFO, "on_state_change");
378 if (message.src != pipeline)
383 Gst.State pending_state;
385 message.parse_state_changed (out old_state, out new_state, out pending_state);
386 if (new_state == old_state &&
387 new_state != Gst.State.PAUSED)
390 if (new_state == Gst.State.PAUSED) {
393 pipeline.seek_simple(Gst.Format.TIME, Gst.SeekFlags.FLUSH, seek_position);
396 pipeline.set_state(Gst.State.NULL);
398 } else if (new_state == Gst.State.NULL) {
404 public class Clip : Object {
405 public ClipFile clipfile;
406 public MediaType type;
407 // TODO: If a clip is being recorded, we don't want to set duration in the MediaClip file.
408 // Address when handling multiple track recording. This is an ugly hack.
409 public bool is_recording;
420 start_changed(_start);
427 public int64 media_start {
434 public int64 duration {
441 // saturating the duration
446 if (value + _media_start > clipfile.length) {
447 // saturating the duration
448 value = clipfile.length - media_start;
454 duration_changed(_duration);
463 get { return start + duration; }
466 public signal void moved(Clip clip);
467 public signal void updated(Clip clip);
468 public signal void media_start_changed(int64 media_start);
469 public signal void duration_changed(int64 duration);
470 public signal void start_changed(int64 start);
471 public signal void removed(Clip clip);
473 public Clip(ClipFile clipfile, MediaType t, string name,
474 int64 start, int64 media_start, int64 duration, bool is_recording) {
475 this.is_recording = is_recording;
476 this.clipfile = clipfile;
479 this.connected = clipfile.is_online();
480 this.set_media_start_duration(media_start, duration);
482 clipfile.updated.connect(on_clipfile_updated);
485 public void gnonlin_connect() { connected = true; }
486 public void gnonlin_disconnect() { connected = false; }
488 void on_clipfile_updated(ClipFile f) {
489 emit(this, Facility.SIGNAL_HANDLERS, Level.INFO, "on_clipfile_updated");
493 // TODO: Assigning to oneself has the side-effect of firing signals.
494 // fire signals directly. Make certain that loading a file still works
495 // properly in this case.
496 set_media_start_duration(media_start, duration);
508 public bool overlap_pos(int64 start, int64 length) {
509 return start < this.start + this.duration &&
510 this.start < start + length;
513 public int64 snap(Clip other, int64 pad) {
514 if (time_in_range(start, other.start, pad)) {
516 } else if (time_in_range(start, other.end, pad)) {
518 } else if (time_in_range(end, other.start, pad)) {
519 return other.start - duration;
520 } else if (time_in_range(end, other.end, pad)) {
521 return other.end - duration;
526 public bool snap_coord(out int64 s, int64 span) {
527 if (time_in_range(s, start, span)) {
530 } else if (time_in_range(s, end, span)) {
538 return new Clip(clipfile, type, name, start, media_start, duration, false);
541 public bool is_trimmed() {
542 if (!clipfile.is_online())
544 return duration != clipfile.length;
547 public void trim(int64 delta, Gdk.WindowEdge edge) {
549 case Gdk.WindowEdge.WEST:
550 if (media_start + delta < 0) {
551 delta = -media_start;
554 if (duration - delta < 0) {
559 set_media_start_duration(media_start + delta, duration - delta);
561 case Gdk.WindowEdge.EAST:
567 public void set_media_start_duration(int64 media_start, int64 duration) {
568 if (media_start < 0) {
576 if (clipfile.is_online() && media_start + duration > clipfile.length) {
577 // We are saturating the value
578 media_start = clipfile.length - duration;
581 _media_start = media_start;
582 _duration = duration;
585 media_start_changed(_media_start);
586 duration_changed(_duration);
592 public void save(FileStream f, int id) {
594 " <clip id=\"%d\" name=\"%s\" start=\"%" + int64.FORMAT + "\" " +
595 "media-start=\"%" + int64.FORMAT + "\" duration=\"%" + int64.FORMAT + "\"/>\n",
596 id, name, start, media_start, duration);
600 public class FetcherCompletion {
601 public FetcherCompletion() {
604 public virtual void complete(Fetcher fetcher) {