sbGStreamerVideoTranscode.cpp
Go to the documentation of this file.
1 /*
2  *=BEGIN SONGBIRD GPL
3  *
4  * This file is part of the Songbird web player.
5  *
6  * Copyright(c) 2005-2010 POTI, Inc.
7  * http://www.songbirdnest.com
8  *
9  * This file may be licensed under the terms of of the
10  * GNU General Public License Version 2 (the ``GPL'').
11  *
12  * Software distributed under the License is distributed
13  * on an ``AS IS'' basis, WITHOUT WARRANTY OF ANY KIND, either
14  * express or implied. See the GPL for the specific language
15  * governing rights and limitations.
16  *
17  * You should have received a copy of the GPL along with this
18  * program. If not, go to http://www.gnu.org/licenses/gpl.html
19  * or write to the Free Software Foundation, Inc.,
20  * 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
21  *
22  *=END SONGBIRD GPL
23  */
24 
26 
27 #include <sbIGStreamerService.h>
28 
29 #include <sbFileUtils.h>
31 #include <sbStringUtils.h>
32 #include <sbClassInfoUtils.h>
34 #include <sbMemoryUtils.h>
35 #include <sbStringBundle.h>
36 #include <sbTranscodeUtils.h>
37 
38 #include <nsServiceManagerUtils.h>
39 #include <nsThreadUtils.h>
40 #include <nsStringAPI.h>
41 #include <nsUnicharUtils.h>
42 #include <nsArrayUtils.h>
43 #include <nsNetUtil.h>
44 
45 #include <nsIFile.h>
46 #include <nsIURI.h>
47 #include <nsIFileURL.h>
48 #include <nsIBinaryInputStream.h>
49 #include <sbIMediaItem.h>
50 #include <nsIProperty.h>
51 
52 #include <gst/tag/tag.h>
53 
54 #include <prlog.h>
55 
56 #define PROGRESS_INTERVAL 200 /* milliseconds */
57 
62 #ifdef PR_LOGGING
63 static PRLogModuleInfo* gGStreamerVideoTranscode =
64  PR_NewLogModule("sbGStreamerVideoTranscode");
65 #define LOG(args) PR_LOG(gGStreamerVideoTranscode, PR_LOG_WARNING, args)
66 #define TRACE(args) PR_LOG(gGStreamerVideoTranscode, PR_LOG_DEBUG, args)
67 #else /* PR_LOGGING */
68 #define LOG(args) /* nothing */
69 #define TRACE(args) /* nothing */
70 #endif /* PR_LOGGING */
71 
81 
89 
90 NS_DECL_CLASSINFO(sbGStreamerVideoTranscoder);
91 NS_IMPL_THREADSAFE_CI(sbGStreamerVideoTranscoder);
92 
93 sbGStreamerVideoTranscoder::sbGStreamerVideoTranscoder() :
95  mStatus(sbIJobProgress::STATUS_RUNNING), // There is no NOT_STARTED
96  mPipelineBuilt(PR_FALSE),
97  mWaitingForCaps(PR_FALSE),
98  mAudioSrc(NULL),
99  mVideoSrc(NULL),
100  mAudioQueueSrc(NULL),
101  mVideoQueueSrc(NULL),
102  mUseAudio(PR_FALSE),
103  mUseVideo(PR_FALSE),
104  mUseMuxer(PR_FALSE)
105 {
106  TRACE(("%s[%p]", __FUNCTION__, this));
107 
108  mBuildLock = nsAutoLock::NewLock("VideoTranscoder lock");
109  NS_ENSURE_TRUE (mBuildLock, /* void */);
110 }
111 
112 sbGStreamerVideoTranscoder::~sbGStreamerVideoTranscoder()
113 {
114  TRACE(("%s[%p]", __FUNCTION__, this));
115 
116  nsresult rv = CleanupPipeline();
117  NS_ENSURE_SUCCESS (rv, /* void */);
118 
119  if (mBuildLock) {
120  nsAutoLock::DestroyLock(mBuildLock);
121  }
122 }
123 
124 /* nsITimerCallback interface implementation */
125 
126 NS_IMETHODIMP
127 sbGStreamerVideoTranscoder::Notify(nsITimer *aTimer)
128 {
129  TRACE(("%s[%p]", __FUNCTION__, this));
130 
131  NS_ENSURE_ARG_POINTER(aTimer);
132 
133  OnJobProgress();
134 
135  return NS_OK;
136 }
137 
138 /* sbITranscodeJob interface implementation */
139 
140 NS_IMETHODIMP
141 sbGStreamerVideoTranscoder::SetConfigurator(
142  sbITranscodingConfigurator *aConfigurator)
143 {
144  TRACE(("%s[%p]", __FUNCTION__, this));
145 
146  mConfigurator = aConfigurator;
147  return NS_OK;
148 }
149 
150 NS_IMETHODIMP
151 sbGStreamerVideoTranscoder::GetConfigurator(
152  sbITranscodingConfigurator **aConfigurator)
153 {
154  TRACE(("%s[%p]", __FUNCTION__, this));
155  NS_ENSURE_ARG_POINTER(aConfigurator);
156 
157  NS_IF_ADDREF (*aConfigurator = mConfigurator);
158 
159  return NS_OK;
160 }
161 
162 NS_IMETHODIMP
163 sbGStreamerVideoTranscoder::SetSourceURI(const nsAString& aSourceURI)
164 {
165  TRACE(("%s[%p]", __FUNCTION__, this));
166 
167  // Can only set this while we don't have a pipeline built/running.
168  NS_ENSURE_STATE (!mPipelineBuilt);
169 
170  mSourceURI = aSourceURI;
171 
172  // Use the source URI as the resource name too.
173  mResourceDisplayName = mSourceURI;
174  return NS_OK;
175 }
176 
177 NS_IMETHODIMP
178 sbGStreamerVideoTranscoder::GetSourceURI(nsAString& aSourceURI)
179 {
180  TRACE(("%s[%p]", __FUNCTION__, this));
181 
182  aSourceURI = mSourceURI;
183  return NS_OK;
184 }
185 
186 NS_IMETHODIMP
187 sbGStreamerVideoTranscoder::SetDestURI(const nsAString& aDestURI)
188 {
189  TRACE(("%s[%p]", __FUNCTION__, this));
190 
191  // Can only set this while we don't have a pipeline built/running.
192  NS_ENSURE_STATE (!mPipelineBuilt);
193 
194  mDestURI = aDestURI;
195  return NS_OK;
196 }
197 
198 NS_IMETHODIMP
199 sbGStreamerVideoTranscoder::GetDestURI(nsAString& aDestURI)
200 {
201  TRACE(("%s[%p]", __FUNCTION__, this));
202 
203  aDestURI = mDestURI;
204  return NS_OK;
205 }
206 
207 NS_IMETHODIMP
208 sbGStreamerVideoTranscoder::GetDestStream(nsIOutputStream **aStream)
209 {
210  TRACE(("%s[%p]", __FUNCTION__, this));
211 
212  NS_ENSURE_ARG_POINTER(aStream);
213 
214  NS_IF_ADDREF(*aStream = mDestStream);
215  return NS_OK;
216 }
217 
218 NS_IMETHODIMP
219 sbGStreamerVideoTranscoder::SetDestStream(nsIOutputStream *aStream)
220 {
221  TRACE(("%s[%p]", __FUNCTION__, this));
222 
223  // Can only set this while we don't have a pipeline built/running.
224  NS_ENSURE_STATE (!mPipelineBuilt);
225 
226  mDestStream = aStream;
227 
228  return NS_OK;
229 }
230 
231 NS_IMETHODIMP
232 sbGStreamerVideoTranscoder::GetMetadata(sbIPropertyArray **aMetadata)
233 {
234  TRACE(("%s[%p]", __FUNCTION__, this));
235 
236  NS_ENSURE_ARG_POINTER(aMetadata);
237 
238  NS_IF_ADDREF(*aMetadata = mMetadata);
239  return NS_OK;
240 }
241 
242 NS_IMETHODIMP
243 sbGStreamerVideoTranscoder::SetMetadata(sbIPropertyArray *aMetadata)
244 {
245  TRACE(("%s[%p]", __FUNCTION__, this));
246 
247  // Can only set this while we don't have a pipeline built/running.
248  NS_ENSURE_STATE (!mPipelineBuilt);
249 
250  mMetadata = aMetadata;
251  return NS_OK;
252 }
253 
254 NS_IMETHODIMP
255 sbGStreamerVideoTranscoder::GetMetadataImage(nsIInputStream **aImageStream)
256 {
257  TRACE(("%s[%p]", __FUNCTION__, this));
258 
259  NS_ENSURE_ARG_POINTER(aImageStream);
260 
261  // Can only set this while we don't have a pipeline built/running.
262  NS_ENSURE_STATE (!mPipelineBuilt);
263 
264  NS_IF_ADDREF(*aImageStream = mImageStream);
265  return NS_OK;
266 }
267 
268 NS_IMETHODIMP
269 sbGStreamerVideoTranscoder::SetMetadataImage(nsIInputStream *aImageStream)
270 {
271  TRACE(("%s[%p]", __FUNCTION__, this));
272 
273  // Can only set this while we don't have a pipeline built/running.
274  NS_ENSURE_STATE (!mPipelineBuilt);
275 
276  mImageStream = aImageStream;
277  return NS_OK;
278 }
279 
280 NS_IMETHODIMP
281 sbGStreamerVideoTranscoder::Vote(sbIMediaItem *aMediaItem, PRInt32 *aVote)
282 {
283  TRACE(("%s[%p]", __FUNCTION__, this));
284 
285  NS_ENSURE_ARG_POINTER(aVote);
286 
287  nsString contentType;
288  nsresult rv = aMediaItem->GetContentType(contentType);
289  NS_ENSURE_SUCCESS(rv, rv);
290 
291  /* For now just vote 1 for anything audio or video */
292  if (contentType.EqualsLiteral("video") ||
293  contentType.EqualsLiteral("audio"))
294  {
295  *aVote = 1;
296  }
297  else {
298  *aVote = 0;
299  }
300 
301  return NS_OK;
302 }
303 
304 nsresult
306 {
307  TRACE(("%s[%p]", __FUNCTION__, this));
308 
309  NS_ENSURE_STATE (!mPipelineBuilt);
310 
311  nsresult rv = BuildTranscodePipeline ("transcode-pipeline");
312  NS_ENSURE_SUCCESS (rv, rv);
313 
315 
316  return NS_OK;
317 }
318 
319 NS_IMETHODIMP
320 sbGStreamerVideoTranscoder::Transcode()
321 {
322  TRACE(("%s[%p]", __FUNCTION__, this));
323 
324  nsresult rv = ClearStatus();
325  NS_ENSURE_SUCCESS (rv, rv);
326 
327  // PlayPipeline builds and then starts the pipeline; assert that we haven't
328  // been called with an already running pipeline.
329  NS_ENSURE_STATE (!mPipelineBuilt);
330 
331  return PlayPipeline();
332 }
333 
334 // Override base class to start the progress reporter
335 NS_IMETHODIMP
337 {
338  TRACE(("%s[%p]", __FUNCTION__, this));
339 
340  nsresult rv;
341 
342  rv = sbGStreamerPipeline::PlayPipeline();
343  NS_ENSURE_SUCCESS (rv, rv);
344 
345  rv = StartProgressReporting();
346  NS_ENSURE_SUCCESS (rv, rv);
347 
348  return NS_OK;
349 }
350 
351 // Override base class to stop the progress reporter
352 NS_IMETHODIMP
354 {
355  TRACE(("%s[%p]", __FUNCTION__, this));
356 
357  nsresult rv;
358 
359  rv = sbGStreamerPipeline::StopPipeline();
360  NS_ENSURE_SUCCESS (rv, rv);
361 
362  rv = StopProgressReporting();
363  NS_ENSURE_SUCCESS (rv, rv);
364 
365  // Clean up any remnants of this pipeline so that a new transcoding
366  // attempt is possible
367  rv = CleanupPipeline();
368 
369  // Inform listeners of new job status
370  rv = OnJobProgress();
371  NS_ENSURE_SUCCESS (rv, rv);
372 
373  return NS_OK;
374 }
375 
376 /* sbIJobCancelable interface implementation */
377 
378 NS_IMETHODIMP
379 sbGStreamerVideoTranscoder::GetCanCancel(PRBool *aCanCancel)
380 {
381  TRACE(("%s[%p]", __FUNCTION__, this));
382 
383  NS_ENSURE_ARG_POINTER(aCanCancel);
384 
385  *aCanCancel = PR_TRUE;
386  return NS_OK;
387 }
388 
389 NS_IMETHODIMP
390 sbGStreamerVideoTranscoder::Cancel()
391 {
392  TRACE(("%s[%p]", __FUNCTION__, this));
393 
394  mStatus = sbIJobProgress::STATUS_FAILED; // We don't have a 'cancelled' state.
395 
396  nsresult rv = StopPipeline();
397  NS_ENSURE_SUCCESS (rv, rv);
398 
399  return NS_OK;
400 }
401 
402 /* sbIJobProgress interface implementation */
403 
404 NS_IMETHODIMP
405 sbGStreamerVideoTranscoder::GetElapsedTime(PRUint32 *aElapsedTime)
406 {
407  TRACE(("%s[%p]", __FUNCTION__, this));
408 
409  NS_ENSURE_ARG_POINTER(aElapsedTime);
410 
411  /* Get the running time, and convert to milliseconds */
412  *aElapsedTime = static_cast<PRUint32>(GetRunningTime() / GST_MSECOND);
413 
414  return NS_OK;
415 }
416 
417 NS_IMETHODIMP
418 sbGStreamerVideoTranscoder::GetRemainingTime(PRUint32 *aRemainingTime)
419 {
420  TRACE(("%s[%p]", __FUNCTION__, this));
421 
422  GstClockTime duration = QueryDuration();
423  GstClockTime position = QueryPosition();
424  GstClockTime elapsed = GetRunningTime();
425 
426  if (duration == GST_CLOCK_TIME_NONE || position == GST_CLOCK_TIME_NONE ||
427  elapsed == GST_CLOCK_TIME_NONE)
428  {
429  /* Unknown, so set to -1 */
430  *aRemainingTime = (PRUint32)-1;
431  }
432  else {
433  GstClockTime totalTime = gst_util_uint64_scale (elapsed, duration,
434  position);
435  /* Convert to milliseconds */
436  *aRemainingTime =
437  static_cast<PRUint32>((totalTime - elapsed) / GST_MSECOND);
438  }
439 
440  return NS_OK;
441 }
442 
443 NS_IMETHODIMP
444 sbGStreamerVideoTranscoder::GetStatus(PRUint16 *aStatus)
445 {
446  NS_ENSURE_ARG_POINTER(aStatus);
447 
448  *aStatus = mStatus;
449 
450  return NS_OK;
451 }
452 
453 NS_IMETHODIMP
454 sbGStreamerVideoTranscoder::GetBlocked(PRBool *aBlocked)
455 {
456  NS_ENSURE_ARG_POINTER(aBlocked);
457 
458  *aBlocked = PR_FALSE;
459 
460  return NS_OK;
461 }
462 
463 NS_IMETHODIMP
464 sbGStreamerVideoTranscoder::GetStatusText(nsAString& aText)
465 {
466  TRACE(("%s[%p]", __FUNCTION__, this));
467 
468  nsresult rv = NS_ERROR_FAILURE;
469 
470  switch (mStatus) {
472  rv = SBGetLocalizedString(aText,
473  NS_LITERAL_STRING("mediacore.gstreamer.transcode.failed"));
474  break;
476  rv = SBGetLocalizedString(aText,
477  NS_LITERAL_STRING("mediacore.gstreamer.transcode.succeeded"));
478  break;
480  rv = SBGetLocalizedString(aText,
481  NS_LITERAL_STRING("mediacore.gstreamer.transcode.running"));
482  break;
483  default:
484  NS_NOTREACHED("Status is invalid");
485  }
486 
487  return rv;
488 }
489 
490 NS_IMETHODIMP
491 sbGStreamerVideoTranscoder::GetTitleText(nsAString& aText)
492 {
493  TRACE(("%s[%p]", __FUNCTION__, this));
494 
495  return SBGetLocalizedString(aText,
496  NS_LITERAL_STRING("mediacore.gstreamer.transcode.title"));
497 }
498 
499 NS_IMETHODIMP
500 sbGStreamerVideoTranscoder::GetProgress(PRUint32* aProgress)
501 {
502  TRACE(("%s[%p]", __FUNCTION__, this));
503 
504  NS_ENSURE_ARG_POINTER(aProgress);
505 
506  GstClockTime duration = QueryDuration();
507  GstClockTime position = QueryPosition();
508 
509  if (duration != GST_CLOCK_TIME_NONE && position != GST_CLOCK_TIME_NONE &&
510  duration != 0)
511  {
512  // Scale to [0-1000], see comment in GetTotal for why we do this.
513  *aProgress = (PRUint32)gst_util_uint64_scale (position, 1000, duration);
514  }
515  else {
516  *aProgress = 0; // Unknown
517  }
518 
519  return NS_OK;
520 }
521 
522 NS_IMETHODIMP
523 sbGStreamerVideoTranscoder::GetTotal(PRUint32* aTotal)
524 {
525  TRACE(("%s[%p]", __FUNCTION__, this));
526 
527  NS_ENSURE_ARG_POINTER(aTotal);
528 
529  GstClockTime duration = QueryDuration();
530 
531  // The job progress stuff doesn't like overly large numbers, so we artifically
532  // fix it to a max of 1000.
533 
534  if (duration != GST_CLOCK_TIME_NONE) {
535  *aTotal = 1000;
536  }
537  else {
538  *aTotal = 0;
539  }
540 
541  return NS_OK;
542 }
543 
544 // Note that you can also get errors reported via the mediacore listener
545 // interfaces.
546 NS_IMETHODIMP
547 sbGStreamerVideoTranscoder::GetErrorCount(PRUint32* aErrorCount)
548 {
549  TRACE(("%s[%p]", __FUNCTION__, this));
550 
551  NS_ENSURE_ARG_POINTER(aErrorCount);
552  NS_ASSERTION(NS_IsMainThread(),
553  "sbIJobProgress::GetErrorCount is main thread only!");
554 
555  *aErrorCount = mErrors.Length();
556 
557  return NS_OK;
558 }
559 
560 NS_IMETHODIMP
561 sbGStreamerVideoTranscoder::GetErrorMessages(nsIStringEnumerator** aMessages)
562 {
563  TRACE(("%s[%p]", __FUNCTION__, this));
564 
565  NS_ENSURE_ARG_POINTER(aMessages);
566  NS_ASSERTION(NS_IsMainThread(),
567  "sbIJobProgress::GetProgress is main thread only!");
568 
569  nsresult rv;
570 
571  *aMessages = nsnull;
572 
573  nsRefPtr<sbJobErrorEnumerator<sbITranscodeError> > enumerator =
574  new sbJobErrorEnumerator<sbITranscodeError>(mErrors);
575  rv = CallQueryInterface(enumerator.get(), aMessages);
576  NS_ENSURE_SUCCESS(rv, rv);
577 
578  return NS_OK;
579 }
580 
581 NS_IMETHODIMP
582 sbGStreamerVideoTranscoder::AddJobProgressListener(sbIJobProgressListener *aListener)
583 {
584  TRACE(("%s[%p]", __FUNCTION__, this));
585 
586  NS_ENSURE_ARG_POINTER(aListener);
587  NS_ASSERTION(NS_IsMainThread(), \
588  "sbGStreamerVideoTranscoder::AddJobProgressListener is main thread only!");
589 
590  PRInt32 index = mProgressListeners.IndexOf(aListener);
591  if (index >= 0) {
592  // the listener already exists, do not re-add
593  return NS_SUCCESS_LOSS_OF_INSIGNIFICANT_DATA;
594  }
595  PRBool succeeded = mProgressListeners.AppendObject(aListener);
596  NS_ENSURE_TRUE(succeeded, NS_ERROR_FAILURE);
597 
598  return NS_OK;
599 }
600 
601 NS_IMETHODIMP
602 sbGStreamerVideoTranscoder::RemoveJobProgressListener(
603  sbIJobProgressListener* aListener)
604 {
605  TRACE(("%s[%p]", __FUNCTION__, this));
606 
607  NS_ENSURE_ARG_POINTER(aListener);
608  NS_ASSERTION(NS_IsMainThread(), \
609  "sbGStreamerVideoTranscoder::RemoveJobProgressListener is main thread only!");
610 
611  PRInt32 indexToRemove = mProgressListeners.IndexOf(aListener);
612  if (indexToRemove < 0) {
613  // No such listener, don't try to remove. This is OK.
614  return NS_OK;
615  }
616 
617  // remove the listener
618  PRBool succeeded = mProgressListeners.RemoveObjectAt(indexToRemove);
619  NS_ENSURE_TRUE(succeeded, NS_ERROR_FAILURE);
620 
621  return NS_OK;
622 }
623 
624 // Call all job progress listeners
625 nsresult
626 sbGStreamerVideoTranscoder::OnJobProgress()
627 {
628  TRACE(("%s[%p]", __FUNCTION__, this));
629 
630  NS_ASSERTION(NS_IsMainThread(), \
631  "sbGStreamerVideoTranscoder::OnJobProgress is main thread only!");
632 
633  // Announce our status to the world
634  for (PRInt32 i = mProgressListeners.Count() - 1; i >= 0; --i) {
635  // Ignore any errors from listeners
636  mProgressListeners[i]->OnJobProgress(this);
637  }
638  return NS_OK;
639 }
640 
641 void sbGStreamerVideoTranscoder::HandleErrorMessage(GstMessage *message)
642 {
643  TRACE(("%s[%p]", __FUNCTION__, this));
644 
645  GError *gerror = NULL;
646  gchar *debug = NULL;
647  nsresult rv;
648 
650 
651  gst_message_parse_error(message, &gerror, &debug);
652 
653  nsCOMPtr<sbITranscodeError> errorObj;
654  rv = SB_NewTranscodeError(NS_ConvertUTF8toUTF16(gerror->message),
655  NS_ConvertUTF8toUTF16(gerror->message),
656  SBVoidString(),
657  mSourceURI,
658  nsnull,
659  getter_AddRefs(errorObj));
660  if (NS_SUCCEEDED(rv)) {
661  mErrors.AppendElement(errorObj);
662  }
663 
664  g_error_free (gerror);
665  g_free(debug);
666 
667  // This will stop the pipeline and update listeners
669 }
670 
671 void sbGStreamerVideoTranscoder::HandleEOSMessage(GstMessage *message)
672 {
673  TRACE(("%s[%p]", __FUNCTION__, this));
674 
676 
677  // This will stop the pipeline and update listeners
679 }
680 
681 GstClockTime sbGStreamerVideoTranscoder::QueryPosition()
682 {
683  TRACE(("%s[%p]", __FUNCTION__, this));
684 
685  GstQuery *query;
686  gint64 position = GST_CLOCK_TIME_NONE;
687 
688  if (!mPipeline)
689  return position;
690 
691  query = gst_query_new_position(GST_FORMAT_TIME);
692 
693  if (gst_element_query(mPipeline, query))
694  gst_query_parse_position(query, NULL, &position);
695 
696  gst_query_unref (query);
697 
698  return position;
699 }
700 
701 GstClockTime sbGStreamerVideoTranscoder::QueryDuration()
702 {
703  TRACE(("%s[%p]", __FUNCTION__, this));
704 
705  GstQuery *query;
706  gint64 duration = GST_CLOCK_TIME_NONE;
707 
708  if (!mPipeline)
709  return duration;
710 
711  query = gst_query_new_duration(GST_FORMAT_TIME);
712 
713  if (gst_element_query(mPipeline, query))
714  gst_query_parse_duration(query, NULL, &duration);
715 
716  gst_query_unref (query);
717 
718  return duration;
719 }
720 
721 nsresult
722 sbGStreamerVideoTranscoder::StartProgressReporting()
723 {
724  TRACE(("%s[%p]", __FUNCTION__, this));
725 
726  NS_ENSURE_STATE(!mProgressTimer);
727 
728  nsresult rv;
729  mProgressTimer =
730  do_CreateInstance("@mozilla.org/timer;1", &rv);
731  NS_ENSURE_SUCCESS(rv, rv);
732 
733  mProgressTimer->InitWithCallback(this,
734  PROGRESS_INTERVAL, nsITimer::TYPE_REPEATING_SLACK);
735 
736  return NS_OK;
737 }
738 
739 nsresult
740 sbGStreamerVideoTranscoder::StopProgressReporting()
741 {
742  TRACE(("%s[%p]", __FUNCTION__, this));
743 
744  if (mProgressTimer) {
745  mProgressTimer->Cancel();
746  mProgressTimer = nsnull;
747  }
748 
749  return NS_OK;
750 }
751 
752 void
753 sbGStreamerVideoTranscoder::AsyncStopPipeline()
754 {
755  nsresult rv = StopPipeline();
756  NS_ENSURE_SUCCESS (rv, /* void */);
757 }
758 
759 void
760 sbGStreamerVideoTranscoder::TranscodingFatalError (const char *errorName)
761 {
762  TRACE(("%s[%p]", __FUNCTION__, this));
763 
764  nsresult rv;
766  nsString message = bundle.Get(errorName);
767 
768  // Add an error message for users of sbIJobProgress interface.
769  nsCOMPtr<sbITranscodeError> errorObj;
770  rv = SB_NewTranscodeError(message, message, SBVoidString(),
771  mSourceURI,
772  nsnull,
773  getter_AddRefs(errorObj));
774  if (NS_SUCCEEDED(rv)) {
775  mErrors.AppendElement(errorObj);
776  }
777 
778  nsRefPtr<sbMediacoreError> error;
779 
780  NS_NEWXPCOM (error, sbMediacoreError);
781  NS_ENSURE_TRUE (error, /* void */);
782 
783  error->Init (sbIMediacoreError::FAILED, message);
784 
785  // Dispatch event so that listeners can act directly on that.
787 
788  /* Stop the pipeline. We might be calling this from a non-main thread, so
789  dispatch the shutdown asynchronously. */
790  nsCOMPtr<nsIRunnable> event = NS_NEW_RUNNABLE_METHOD (
791  sbGStreamerVideoTranscoder, this, AsyncStopPipeline);
792 
793  rv = NS_DispatchToMainThread(event, NS_DISPATCH_NORMAL);
794  NS_ENSURE_SUCCESS (rv, /* void */);
795 }
796 
797 nsresult
798 sbGStreamerVideoTranscoder::AddImageToTagList(GstTagList *aTags,
799  nsIInputStream *aStream)
800 {
801  TRACE(("%s[%p]", __FUNCTION__, this));
802 
803  PRUint32 imageDataLen;
804  PRUint8 *imageData;
805  nsresult rv;
806 
807  nsCOMPtr<nsIBinaryInputStream> stream =
808  do_CreateInstance("@mozilla.org/binaryinputstream;1", &rv);
809  NS_ENSURE_SUCCESS(rv, rv);
810 
811  rv = stream->SetInputStream(aStream);
812  NS_ENSURE_SUCCESS(rv, rv);
813 
814  rv = aStream->Available(&imageDataLen);
815  NS_ENSURE_SUCCESS(rv, rv);
816 
817  rv = stream->ReadByteArray(imageDataLen, &imageData);
818  NS_ENSURE_SUCCESS(rv, rv);
819 
820  sbAutoNSMemPtr imageDataDestroy(imageData);
821 
822  GstBuffer *imagebuf = gst_tag_image_data_to_image_buffer (
823  imageData, imageDataLen, GST_TAG_IMAGE_TYPE_FRONT_COVER);
824  if (!imagebuf)
825  return NS_ERROR_FAILURE;
826 
827  gst_tag_list_add (aTags, GST_TAG_MERGE_REPLACE, GST_TAG_IMAGE,
828  imagebuf, NULL);
829  gst_buffer_unref (imagebuf);
830 
831  return NS_OK;
832 }
833 
834 
835 nsresult
836 sbGStreamerVideoTranscoder::SetMetadataOnTagSetters()
837 {
838  TRACE(("%s[%p]", __FUNCTION__, this));
839 
840  GstTagList *tags = ConvertPropertyArrayToTagList(mMetadata);
841 
842  if (mImageStream) {
843  if (!tags) {
844  tags = gst_tag_list_new();
845  }
846 
847  // Ignore return value here, failure is not critical.
848  AddImageToTagList (tags, mImageStream);
849  }
850 
851  if (tags) {
852  // Find all the tag setters in the pipeline
853  GstIterator *it = gst_bin_iterate_all_by_interface (
854  (GstBin *)mPipeline, GST_TYPE_TAG_SETTER);
855  GstElement *element;
856 
857  while (gst_iterator_next (it, (void **)&element) == GST_ITERATOR_OK) {
858  GstTagSetter *setter = GST_TAG_SETTER (element);
859 
860  /* Use MERGE_REPLACE: preserves existing tag where we don't have one
861  * in our taglist */
862  gst_tag_setter_merge_tags (setter, tags, GST_TAG_MERGE_REPLACE);
863  g_object_unref (element);
864  }
865  gst_iterator_free (it);
866  gst_tag_list_free (tags);
867  }
868 
869  return NS_OK;
870 }
871 
872 nsresult
873 sbGStreamerVideoTranscoder::ClearStatus()
874 {
875  /* Cleanup done directly _before_ creating a pipeline */
877  mErrors.Clear();
878 
879  return NS_OK;
880 }
881 
882 void
883 sbGStreamerVideoTranscoder::CleanupPads()
884 {
885  if (mAudioSrc) {
886  g_object_unref (mAudioSrc);
887  mAudioSrc = NULL;
888  }
889 
890  if (mVideoSrc) {
891  g_object_unref (mVideoSrc);
892  mVideoSrc = NULL;
893  }
894 
895  if (mAudioQueueSrc) {
896  g_object_unref (mAudioQueueSrc);
897  mAudioQueueSrc = NULL;
898  }
899 
900  if (mVideoQueueSrc) {
901  g_object_unref (mVideoQueueSrc);
902  mVideoQueueSrc = NULL;
903  }
904 }
905 
906 nsresult
907 sbGStreamerVideoTranscoder::CleanupPipeline()
908 {
909  /* Cleanup done _after_ destroying a pipeline. Note that we don't clean up
910  error messages/status, as things might want to query those */
911  CleanupPads();
912 
913  mPipelineBuilt = PR_FALSE;
914  mWaitingForCaps = PR_FALSE;
915 
916  return NS_OK;
917 }
918 
919 /* static */ void
920 sbGStreamerVideoTranscoder::decodebin_pad_added_cb (GstElement * uridecodebin,
921  GstPad * pad, sbGStreamerVideoTranscoder *transcoder)
922 {
923  nsresult rv = transcoder->DecoderPadAdded(uridecodebin, pad);
924  NS_ENSURE_SUCCESS (rv, /* void */);
925 }
926 
927 /* static */ void
928 sbGStreamerVideoTranscoder::decodebin_no_more_pads_cb (
929  GstElement * uridecodebin, sbGStreamerVideoTranscoder *transcoder)
930 {
931  nsresult rv = transcoder->DecoderNoMorePads(uridecodebin);
932  NS_ENSURE_SUCCESS (rv, /* void */);
933 }
934 
935 /* static */ void
936 sbGStreamerVideoTranscoder::pad_blocked_cb (GstPad * pad, gboolean blocked,
937  sbGStreamerVideoTranscoder *transcoder)
938 {
939  nsresult rv = transcoder->PadBlocked(pad, blocked);
940  NS_ENSURE_SUCCESS (rv, /* void */);
941 }
942 
943 /* static */ void
944 sbGStreamerVideoTranscoder::pad_notify_caps_cb (GObject *obj, GParamSpec *pspec,
945  sbGStreamerVideoTranscoder *transcoder)
946 {
947  nsresult rv = transcoder->PadNotifyCaps (GST_PAD (obj));
948  NS_ENSURE_SUCCESS (rv, /* void */);
949 }
950 
951 nsresult
952 sbGStreamerVideoTranscoder::DecoderPadAdded (GstElement *uridecodebin,
953  GstPad *pad)
954 {
955  TRACE(("%s[%p]", __FUNCTION__, this));
956 
957  // A new decoded pad has been added from the decodebin.
958  // At this point, we're able to look at the template caps (via
959  // gst_pad_get_caps()), which is sufficient to figure out if we have an audio
960  // or video pad, but not sufficient to actually get all the details of
961  // the audio/video stream.
962  // Decide whether we want to use it at all. If we do, add it to the list
963  // of pending pads. We don't actually build anything until later, once we
964  // have all the pads, and caps on all pads.
965  if (mPipelineBuilt) {
966  LOG(("pad-added after pipeline fully constructed; cannot use"));
967  return NS_ERROR_FAILURE;
968  }
969 
970  GstCaps *caps = gst_pad_get_caps (pad);
971  GstStructure *structure = gst_caps_get_structure (caps, 0);
972  const gchar *name = gst_structure_get_name (structure);
973  bool isVideo = g_str_has_prefix (name, "video/");
974  bool isAudio = g_str_has_prefix (name, "audio/");
975 
976  gst_caps_unref (caps);
977 
978  if (isAudio) {
979  if (mAudioSrc) {
980  LOG(("Multiple audio streams: ignoring subsequent ones"));
981  return NS_OK;
982  }
983 
984 #if 0
985  // TODO: Some sort of API like this will be needed once we're using this
986  // for audio-only transcoding as well.
987  if (!mTranscoderConfigurator->SupportsAudio()) {
988  LOG(("Transcoder not configured to support audio, ignoring stream"));
989  return NS_OK;
990  }
991 #endif
992 
993  LOG(("Using audio pad %s:%s for audio stream", GST_DEBUG_PAD_NAME (pad)));
994  gst_object_ref (pad);
995  mAudioSrc = pad;
996  }
997  else if (isVideo) {
998  if (mVideoSrc) {
999  LOG(("Multiple video streams: ignoring subsequent ones"));
1000  return NS_OK;
1001  }
1002 #if 0
1003  // TODO: Some sort of API like this will be needed once we're using this
1004  // for audio-only transcoding as well.
1005  if (!mTranscoderConfigurator->SupportsVideo()) {
1006  LOG(("Transcoder not configured to support video, ignoring stream"));
1007  return NS_OK;
1008  }
1009 #endif
1010 
1011  LOG(("Using video pad %s:%s for video stream", GST_DEBUG_PAD_NAME (pad)));
1012  gst_object_ref (pad);
1013  mVideoSrc = pad;
1014  }
1015  else {
1016  LOG(("Ignoring non-audio, non-video stream"));
1017  return NS_OK;
1018  }
1019 
1020  return NS_OK;
1021 }
1022 
1023 void
1024 sbGStreamerVideoTranscoder::ConfigureVideoBox (GstElement *videobox,
1025  GstCaps *aInputVideoCaps, gint outputWidth, gint outputHeight,
1026  gint outputParN, gint outputParD)
1027 {
1028  TRACE(("%s[%p]", __FUNCTION__, this));
1029 
1030  gint imageWidth, imageHeight, imageParN, imageParD;
1031  gboolean ret;
1032  GstStructure *structure = gst_caps_get_structure (aInputVideoCaps, 0);
1033 
1034  /* Ignore failures here, they aren't possible: if these weren't here, we'd
1035  have rejected this stream previously */
1036  ret = gst_structure_get_int (structure, "width", &imageWidth);
1037  NS_ASSERTION (ret, "Invalid image caps, no width");
1038  ret = gst_structure_get_int (structure, "height", &imageHeight);
1039  NS_ASSERTION (ret, "Invalid image caps, no height");
1040 
1041  const GValue* par = gst_structure_get_value (structure,
1042  "pixel-aspect-ratio");
1043  if (par) {
1044  imageParN = gst_value_get_fraction_numerator(par);
1045  imageParD = gst_value_get_fraction_denominator(par);
1046  }
1047  else {
1048  // Default to square pixels.
1049  imageParN = imageParD = 1;
1050  }
1051 
1052  gint imageDarN = imageWidth * imageParN;
1053  gint imageDarD = imageHeight * imageParD;
1054 
1055  gint outputDarN = outputWidth * outputParN;
1056  gint outputDarD = outputHeight * outputParD;
1057 
1058  LOG(("Determining output geometry. Output image is %dx%d (PAR %d:%d). "
1059  "Input image is %dx%d (PAR %d:%d)", outputWidth, outputHeight,
1060  outputParN, outputParD, imageWidth, imageHeight, imageParN, imageParD));
1061 
1062  // Ok, we have our basic variables. Now we want to check if the image DAR
1063  // is less than, equal to, or greater than, the output DAR.
1064  // TODO: check if any of these variables might plausibly get large enough
1065  // that this could overflow.
1066  if (imageDarN * outputDarD > outputDarN * imageDarD) {
1067  // Image DAR is greater than output DAR. So, we use the full width of the
1068  // image, and add padding at the top and bottom.
1069  // padding = outputHeight - (outputWidth / imageDAR * outputPAR);
1070  gint outputImageHeight = outputWidth *
1071  (imageDarD * outputParN) / (imageDarN * outputParD);
1072  gint padding = outputHeight - outputImageHeight;
1073  // Because we're usually dealing with chroma-subsampled video formats,
1074  // adding an odd amount of padding is a bad idea. For example, if we need
1075  // to add 2 pixels of padding, it's best to have both on one side, rather
1076  // than one on each.
1077  gint paddingBottom, paddingTop;
1078  if (padding % 4 == 0) {
1079  paddingBottom = padding / 2;
1080  }
1081  else {
1082  paddingBottom = padding / 2 + 1;
1083  }
1084  paddingTop = padding - paddingBottom;
1085 
1086  LOG(("Padding %d pixels at top, %d pixels at bottom", paddingTop,
1087  paddingBottom));
1088 
1089  // Negative values indicate adding padding (rather than cropping)
1090  g_object_set (videobox, "top", -paddingTop, "bottom", -paddingBottom, NULL);
1091  }
1092  else if (imageDarN * outputDarD < outputDarN * imageDarD) {
1093  // Image DAR is less than output DAR. So, we use the full height of the
1094  // image, and add padding at the left and right.
1095  // padding = outputWidth - (outputHeight * imageDAR / outputPAR);
1096  gint outputImageWidth = outputHeight *
1097  (imageDarN * outputParD) / (imageDarD * outputParN);
1098  gint padding = outputWidth - outputImageWidth;
1099  // Because we're usually dealing with chroma-subsampled video formats,
1100  // adding an odd amount of padding is a bad idea. For example, if we need
1101  // to add 2 pixels of padding, it's best to have both on one side, rather
1102  // than one on each.
1103  gint paddingRight, paddingLeft;
1104  if (padding % 4 == 0) {
1105  paddingRight = padding / 2;
1106  }
1107  else {
1108  paddingRight = padding / 2 + 1;
1109  }
1110  paddingLeft = padding - paddingRight;
1111 
1112  LOG(("Padding %d pixels at left, %d pixels at right", paddingLeft,
1113  paddingRight));
1114 
1115  // Negative values indicate adding padding (rather than cropping)
1116  g_object_set (videobox, "left", -paddingLeft, "right", -paddingRight, NULL);
1117  }
1118  else {
1119  LOG(("No padding required"));
1120  }
1121 }
1122 
1123 nsresult
1124 sbGStreamerVideoTranscoder::BuildVideoBin(GstCaps *aInputVideoCaps,
1125  GstElement **aVideoBin)
1126 {
1127  TRACE(("%s[%p]", __FUNCTION__, this));
1128 
1129  // See the comment/ascii-art in BuildTranscodePipeline for details about what
1130  // this does
1131  nsresult rv;
1132  GstBin *bin = NULL;
1133  GstElement *videorate = NULL;
1134  GstElement *colorspace = NULL;
1135  GstElement *videoscale = NULL;
1136  GstElement *videobox = NULL;
1137  GstElement *capsfilter = NULL;
1138  GstElement *encoder = NULL;
1139  nsCOMPtr<nsIPropertyBag> encoderProperties;
1140 
1141  PRInt32 outputWidth, outputHeight;
1142  PRUint32 outputParN, outputParD;
1143  PRUint32 outputFramerateN, outputFramerateD;
1144 
1145  // Ask the configurator for what format we should feed into the encoder
1146  nsCOMPtr<sbIMediaFormatVideo> videoFormat;
1147  rv = mConfigurator->GetVideoFormat (getter_AddRefs(videoFormat));
1148  NS_ENSURE_SUCCESS (rv, rv);
1149 
1150  rv = videoFormat->GetVideoWidth(&outputWidth);
1151  NS_ENSURE_SUCCESS (rv, rv);
1152  rv = videoFormat->GetVideoHeight(&outputHeight);
1153  NS_ENSURE_SUCCESS (rv, rv);
1154  rv = videoFormat->GetVideoPAR(&outputParN, &outputParD);
1155  NS_ENSURE_SUCCESS (rv, rv);
1156  rv = videoFormat->GetVideoFrameRate(&outputFramerateN, &outputFramerateD);
1157  NS_ENSURE_SUCCESS (rv, rv);
1158 
1159  /* Ensure the configurator didn't give us bogus data for any of these, and
1160  just fail if it did. */
1161  NS_ENSURE_TRUE(outputWidth > 0, NS_ERROR_FAILURE);
1162  NS_ENSURE_TRUE(outputHeight > 0, NS_ERROR_FAILURE);
1163  NS_ENSURE_TRUE(outputParN > 0, NS_ERROR_FAILURE);
1164  NS_ENSURE_TRUE(outputParD > 0, NS_ERROR_FAILURE);
1165  NS_ENSURE_TRUE(outputFramerateN > 0, NS_ERROR_FAILURE);
1166  NS_ENSURE_TRUE(outputFramerateD > 0, NS_ERROR_FAILURE);
1167 
1168  // Ask the configurator what encoder (if any) we should use.
1169  nsString encoderName;
1170  rv = mConfigurator->GetVideoEncoder(encoderName);
1171  NS_ENSURE_SUCCESS (rv, rv);
1172 
1173  GstPad *srcpad, *sinkpad, *ghostpad;
1174  GstElement *last;
1175  GstCaps *caps;
1176 
1177  bin = GST_BIN (gst_bin_new("video-encode-bin"));
1178  videorate = gst_element_factory_make ("videorate", NULL);
1179  colorspace = gst_element_factory_make ("ffmpegcolorspace", NULL);
1180  videoscale = gst_element_factory_make ("videoscale", NULL);
1181  videobox = gst_element_factory_make ("videobox", NULL);
1182  capsfilter = gst_element_factory_make ("capsfilter", NULL);
1183  encoder = NULL;
1184 
1185  if (!videorate || !colorspace || !videoscale || !videobox || !capsfilter)
1186  {
1187  // Failed to create an element that we expected to be present, means
1188  // the gstreamer installation is messed up.
1189  rv = NS_ERROR_FAILURE;
1190  goto failed;
1191  }
1192 
1193  if (encoderName.IsEmpty()) {
1194  LOG(("Video enabled but no video encoder specified"));
1195  rv = NS_ERROR_FAILURE;
1196  goto failed;
1197  }
1198 
1199  encoder = gst_element_factory_make (
1200  NS_ConvertUTF16toUTF8(encoderName).BeginReading(), NULL);
1201 
1202  if (!encoder) {
1203  LOG(("No encoder %s available",
1204  NS_ConvertUTF16toUTF8(encoderName).BeginReading()));
1205  TranscodingFatalError(
1206  "songbird.transcode.error.video_encoder_unavailable");
1207  rv = NS_ERROR_FAILURE;
1208  goto failed;
1209  }
1210 
1211  rv = mConfigurator->GetVideoEncoderProperties(
1212  getter_AddRefs(encoderProperties));
1213  if (NS_FAILED (rv)) {
1214  goto failed;
1215  }
1216 
1217  rv = ApplyPropertyBagToElement(encoder, encoderProperties);
1218  if (NS_FAILED (rv)) {
1219  goto failed;
1220  }
1221 
1222  /* Configure videoscale to use 4-tap scaling for higher quality */
1223  g_object_set (videoscale, "method", 2, NULL);
1224 
1225  /* Configure capsfilter for our output size, framerate, etc. */
1226  // TODO: Should we also permit video/x-raw-rgb? It doesn't matter for now,
1227  // as all the encoders we're using prefer YUV.
1228  caps = gst_caps_new_simple ("video/x-raw-yuv",
1229  "width", G_TYPE_INT, outputWidth,
1230  "height", G_TYPE_INT, outputHeight,
1231  "pixel-aspect-ratio", GST_TYPE_FRACTION, outputParN, outputParD,
1232  "framerate", GST_TYPE_FRACTION, outputFramerateN, outputFramerateD,
1233  NULL);
1234  g_object_set (capsfilter, "caps", caps, NULL);
1235  gst_caps_unref (caps);
1236 
1237  /* Configure videobox to add black bars around the image to preserve the
1238  actual image aspect ratio, if required. This is a little complex, so
1239  it's factored out into another function. */
1240  ConfigureVideoBox (videobox, aInputVideoCaps, outputWidth, outputHeight,
1241  outputParN, outputParD);
1242 
1243  // Now, add to the bin, and then link everything up.
1244  gst_bin_add_many (bin, videorate, colorspace, videoscale, videobox,
1245  capsfilter, NULL);
1246  gst_element_link_many (videorate, colorspace, videoscale, videobox,
1247  capsfilter, NULL);
1248 
1249  last = capsfilter;
1250 
1251  gst_bin_add (bin, encoder);
1252  gst_element_link (capsfilter, encoder);
1253  last = encoder;
1254 
1255  // Finally, add ghost pads to our bin.
1256  sinkpad = gst_element_get_static_pad (videorate, "sink");
1257  ghostpad = gst_ghost_pad_new ("sink", sinkpad);
1258  g_object_unref (sinkpad);
1259  gst_element_add_pad (GST_ELEMENT (bin), ghostpad);
1260 
1261  srcpad = gst_element_get_static_pad (last, "src");
1262  ghostpad = gst_ghost_pad_new ("src", srcpad);
1263  g_object_unref (srcpad);
1264  gst_element_add_pad (GST_ELEMENT (bin), ghostpad);
1265 
1266  // Ok, done; set return value.
1267  *aVideoBin = GST_ELEMENT (bin);
1268 
1269  return NS_OK;
1270 
1271 failed:
1272  if (videorate)
1273  g_object_unref (videorate);
1274  if (colorspace)
1275  g_object_unref (colorspace);
1276  if (videoscale)
1277  g_object_unref (videoscale);
1278  if (videobox)
1279  g_object_unref (videobox);
1280  if (capsfilter)
1281  g_object_unref (capsfilter);
1282  if (encoder)
1283  g_object_unref (encoder);
1284  if (bin)
1285  g_object_unref (bin);
1286 
1287  return rv;
1288 }
1289 
1290 nsresult
1291 sbGStreamerVideoTranscoder::GetRawAudioCaps(GstCaps **aResultCaps)
1292 {
1293  nsresult rv;
1294 
1295  nsCOMPtr<nsIPropertyBag> encoderProperties;
1296  rv = mConfigurator->GetAudioEncoderProperties(
1297  getter_AddRefs(encoderProperties));
1298  NS_ENSURE_SUCCESS(rv, rv);
1299 
1300  nsCOMPtr<nsIVariant> isFloatVar;
1301  rv = encoderProperties->GetProperty(NS_LITERAL_STRING ("IsFloat"),
1302  getter_AddRefs(isFloatVar));
1303  NS_ENSURE_SUCCESS(rv, rv);
1304  PRBool isFloat;
1305  rv = isFloatVar->GetAsBool(&isFloat);
1306  NS_ENSURE_SUCCESS(rv, rv);
1307 
1308  nsCOMPtr<nsIVariant> isLittleEndianVar;
1309  rv = encoderProperties->GetProperty(NS_LITERAL_STRING ("LittleEndian"),
1310  getter_AddRefs(isLittleEndianVar));
1311  NS_ENSURE_SUCCESS(rv, rv);
1312  PRBool isLittleEndian;
1313  rv = isLittleEndianVar->GetAsBool(&isLittleEndian);
1314  NS_ENSURE_SUCCESS(rv, rv);
1315 
1316  nsCOMPtr<nsIVariant> sampleDepthVar;
1317  rv = encoderProperties->GetProperty(NS_LITERAL_STRING ("Depth"),
1318  getter_AddRefs(sampleDepthVar));
1319  NS_ENSURE_SUCCESS(rv, rv);
1320  PRInt32 sampleDepth;
1321  rv = sampleDepthVar->GetAsInt32(&sampleDepth);
1322  NS_ENSURE_SUCCESS(rv, rv);
1323 
1324  PRInt32 outputRate, outputChannels;
1325 
1326  // Ask the configurator for what format we should feed into the encoder
1327  nsCOMPtr<sbIMediaFormatAudio> audioFormat;
1328  rv = mConfigurator->GetAudioFormat (getter_AddRefs(audioFormat));
1329  NS_ENSURE_SUCCESS (rv, rv);
1330 
1331  rv = audioFormat->GetSampleRate (&outputRate);
1332  NS_ENSURE_SUCCESS (rv, rv);
1333  rv = audioFormat->GetChannels (&outputChannels);
1334  NS_ENSURE_SUCCESS (rv, rv);
1335 
1336  gint32 endianness = isLittleEndian ? G_LITTLE_ENDIAN : G_BIG_ENDIAN;
1337  GstCaps *caps;
1338  if (isFloat) {
1339  caps = gst_caps_new_simple ("audio/x-raw-float",
1340  "endianness", G_TYPE_INT, endianness,
1341  "width", G_TYPE_INT, sampleDepth,
1342  "rate", G_TYPE_INT, outputRate,
1343  "channels", G_TYPE_INT, outputChannels);
1344  }
1345  else {
1346  caps = gst_caps_new_simple ("audio/x-raw-int",
1347  "endianness", G_TYPE_INT, endianness,
1348  "width", G_TYPE_INT, sampleDepth,
1349  "depth", G_TYPE_INT, sampleDepth,
1350  "rate", G_TYPE_INT, outputRate,
1351  "channels", G_TYPE_INT, outputChannels,
1352  "signed", G_TYPE_BOOLEAN, sampleDepth != 8);
1353  }
1354 
1355  *aResultCaps = caps;
1356 
1357  return NS_OK;
1358 }
1359 
1360 nsresult
1361 sbGStreamerVideoTranscoder::BuildAudioBin(GstCaps *aInputAudioCaps,
1362  GstElement **aAudioBin)
1363 {
1364  TRACE(("%s[%p]", __FUNCTION__, this));
1365 
1366  // See the comment/ascii-art in BuildTranscodePipeline for details about what
1367  // this does
1368  nsresult rv;
1369  GstBin *bin = NULL;
1370  GstElement *audiorate = NULL;
1371  GstElement *audioconvert = NULL;
1372  GstElement *audioresample = NULL;
1373  GstElement *capsfilter = NULL;
1374  GstElement *audioresample2 = NULL;
1375  GstElement *encoder = NULL;
1376 
1377  PRInt32 outputRate, outputChannels;
1378 
1379  // Ask the configurator for what format we should feed into the encoder
1380  nsCOMPtr<sbIMediaFormatAudio> audioFormat;
1381  rv = mConfigurator->GetAudioFormat (getter_AddRefs(audioFormat));
1382  NS_ENSURE_SUCCESS (rv, rv);
1383 
1384  rv = audioFormat->GetSampleRate (&outputRate);
1385  NS_ENSURE_SUCCESS (rv, rv);
1386  rv = audioFormat->GetChannels (&outputChannels);
1387  NS_ENSURE_SUCCESS (rv, rv);
1388 
1389  /* Ensure the configurator didn't give us bogus data for any of these, and
1390  just fail if it did. */
1391  NS_ENSURE_TRUE (outputRate > 0, NS_ERROR_FAILURE);
1392  NS_ENSURE_TRUE (outputChannels > 0, NS_ERROR_FAILURE);
1393 
1394  // Ask the configurator what encoder (if any) we should use.
1395  nsString encoderName;
1396  rv = mConfigurator->GetAudioEncoder(encoderName);
1397  NS_ENSURE_SUCCESS (rv, rv);
1398 
1399  GstPad *srcpad, *sinkpad, *ghostpad;
1400  GstElement *last;
1401  GstCaps *caps;
1402  GstStructure *structure;
1403 
1404  bin = GST_BIN (gst_bin_new("audio-encode-bin"));
1405  audiorate = gst_element_factory_make ("audiorate", NULL);
1406  audioconvert = gst_element_factory_make ("audioconvert", NULL);
1407  audioresample = gst_element_factory_make ("audioresample", NULL);
1408  capsfilter = gst_element_factory_make ("capsfilter", NULL);
1409  audioresample2 = gst_element_factory_make ("audioresample", NULL);
1410 
1411  if (!audiorate || !audioconvert || !audioresample || !capsfilter ||
1412  !audioresample2)
1413  {
1414  // Failed to create an element that we expected to be present, means
1415  // the gstreamer installation is messed up.
1416  rv = NS_ERROR_FAILURE;
1417  goto failed;
1418  }
1419 
1420  // We may have no encoder - that's used if we're outputting raw audio. That's
1421  // perfectly ok!
1422  if (!encoderName.IsEmpty()) {
1423  encoder = gst_element_factory_make (
1424  NS_ConvertUTF16toUTF8 (encoderName).BeginReading(), NULL);
1425  if (!encoder) {
1426  LOG(("No encoder %s available",
1427  NS_ConvertUTF16toUTF8(encoderName).BeginReading()));
1428  TranscodingFatalError(
1429  "songbird.transcode.error.audio_encoder_unavailable");
1430  rv = NS_ERROR_FAILURE;
1431  goto failed;
1432  }
1433 
1434  nsCOMPtr<nsIPropertyBag> encoderProperties;
1435  rv = mConfigurator->GetAudioEncoderProperties(
1436  getter_AddRefs(encoderProperties));
1437  if (NS_FAILED (rv)) {
1438  goto failed;
1439  }
1440 
1441  rv = ApplyPropertyBagToElement(encoder, encoderProperties);
1442  if (NS_FAILED (rv)) {
1443  goto failed;
1444  }
1445  }
1446 
1447  /* Configure capsfilter for our output sample rate and channels. Allow
1448  either int or float audio; this avoids having to introspect the encoder
1449  to find out what it supports.
1450 
1451  As a special case, if there is no audio encoder, that means we want
1452  raw PCM output. In this case, we select the format here in the caps.
1453  */
1454  if (encoder) {
1455  caps = gst_caps_new_empty ();
1456  structure = gst_structure_new ("audio/x-raw-int",
1457  "rate", G_TYPE_INT, outputRate,
1458  "channels", G_TYPE_INT, outputChannels,
1459  NULL);
1460  gst_caps_append_structure (caps, structure);
1461  structure = gst_structure_new ("audio/x-raw-float",
1462  "rate", G_TYPE_INT, outputRate,
1463  "channels", G_TYPE_INT, outputChannels,
1464  NULL);
1465  gst_caps_append_structure (caps, structure);
1466  }
1467  else {
1468  rv = GetRawAudioCaps(&caps);
1469  if (NS_FAILED (rv))
1470  goto failed;
1471  }
1472 
1473  g_object_set (capsfilter, "caps", caps, NULL);
1474  gst_caps_unref (caps);
1475 
1476  // Now, add to the bin, and then link everything up.
1477  gst_bin_add_many (bin, audiorate, audioconvert, audioresample,
1478  capsfilter, audioresample2, NULL);
1479  gst_element_link_many (audiorate, audioconvert, audioresample,
1480  capsfilter, audioresample2, NULL);
1481 
1482  last = audioresample2;
1483 
1484  if (encoder) {
1485  gst_bin_add (bin, encoder);
1486  gst_element_link (last, encoder);
1487  last = encoder;
1488  }
1489 
1490  // Finally, add ghost pads to our bin.
1491  sinkpad = gst_element_get_static_pad (audiorate, "sink");
1492  ghostpad = gst_ghost_pad_new ("sink", sinkpad);
1493  g_object_unref (sinkpad);
1494  gst_element_add_pad (GST_ELEMENT (bin), ghostpad);
1495 
1496  srcpad = gst_element_get_static_pad (last, "src");
1497  ghostpad = gst_ghost_pad_new ("src", srcpad);
1498  g_object_unref (srcpad);
1499  gst_element_add_pad (GST_ELEMENT (bin), ghostpad);
1500 
1501  // All done!
1502  *aAudioBin = GST_ELEMENT (bin);
1503 
1504  return NS_OK;
1505 
1506 failed:
1507  if (audiorate)
1508  g_object_unref (audiorate);
1509  if (audioconvert)
1510  g_object_unref (audioconvert);
1511  if (audioresample)
1512  g_object_unref (audioresample);
1513  if (capsfilter)
1514  g_object_unref (capsfilter);
1515  if (audioresample2)
1516  g_object_unref (audioresample2);
1517  if (encoder)
1518  g_object_unref (encoder);
1519  if (bin)
1520  g_object_unref (bin);
1521 
1522  return rv;
1523 }
1524 
1525 nsresult
1526 sbGStreamerVideoTranscoder::AddAudioBin(GstPad *inputAudioSrcPad,
1527  GstPad **outputAudioSrcPad)
1528 {
1529  TRACE(("%s[%p]", __FUNCTION__, this));
1530  NS_ENSURE_ARG_POINTER (inputAudioSrcPad);
1531  NS_ENSURE_ARG_POINTER (outputAudioSrcPad);
1532 
1533  nsresult rv;
1534  // Ok, we have an audio source. Add the audio processing bin (including
1535  // encoder, if any).
1536  GstElement *audioBin = NULL;
1537  GstCaps *caps = GetCapsFromPad (mAudioSrc);
1538  rv = BuildAudioBin(caps, &audioBin);
1539  gst_caps_unref (caps);
1540  NS_ENSURE_SUCCESS (rv, rv);
1541 
1542  GstPad *audioBinSinkPad = gst_element_get_pad (audioBin, "sink");
1543  GstPad *audioBinSrcPad = gst_element_get_pad (audioBin, "src");
1544 
1545  gst_bin_add (GST_BIN (mPipeline), audioBin);
1546  gst_element_sync_state_with_parent (audioBin);
1547 
1548  GstPadLinkReturn linkret = gst_pad_link (inputAudioSrcPad, audioBinSinkPad);
1549  if (linkret != GST_PAD_LINK_OK) {
1550  TranscodingFatalError("songbird.transcode.error.audio_incompatible");
1551  rv = NS_ERROR_FAILURE;
1552  goto failed;
1553  }
1554 
1555  g_object_unref (audioBinSinkPad);
1556 
1557  // Provide the src pad of our bin to the caller.
1558  *outputAudioSrcPad = audioBinSrcPad;
1559 
1560  return NS_OK;
1561 
1562 failed:
1563  g_object_unref (audioBinSinkPad);
1564  g_object_unref (audioBinSrcPad);
1565 
1566  return rv;
1567 }
1568 
1569 /* Add a video bin.
1570  * Link the resulting bin to inputVideoSrcPad. Return the src pad of the
1571  * new bin in outputVideoSrcPad
1572  */
1573 nsresult
1574 sbGStreamerVideoTranscoder::AddVideoBin(GstPad *inputVideoSrcPad,
1575  GstPad **outputVideoSrcPad)
1576 {
1577  TRACE(("%s[%p]", __FUNCTION__, this));
1578  NS_ENSURE_ARG_POINTER (inputVideoSrcPad);
1579  NS_ENSURE_ARG_POINTER (outputVideoSrcPad);
1580 
1581  nsresult rv;
1582 
1583  // Ok, we have a video source, so add the video processing bin (usually
1584  // including an encoder).
1585  GstElement *videoBin = NULL;
1586  GstCaps *caps = GetCapsFromPad (mVideoSrc);
1587  rv = BuildVideoBin(caps, &videoBin);
1588  gst_caps_unref (caps);
1589  NS_ENSURE_SUCCESS (rv, rv);
1590 
1591  GstPad *videoBinSinkPad = gst_element_get_pad (videoBin, "sink");
1592  GstPad *videoBinSrcPad = gst_element_get_pad (videoBin, "src");
1593 
1594  gst_bin_add (GST_BIN (mPipeline), videoBin);
1595  gst_element_sync_state_with_parent (videoBin);
1596 
1597  GstPadLinkReturn linkret = gst_pad_link (inputVideoSrcPad, videoBinSinkPad);
1598  if (linkret != GST_PAD_LINK_OK) {
1599  TranscodingFatalError("songbird.transcode.error.video_incompatible");
1600  rv = NS_ERROR_FAILURE;
1601  goto failed;
1602  }
1603 
1604  g_object_unref (videoBinSinkPad);
1605 
1606  // Provide the src pad of our bin to the caller.
1607  *outputVideoSrcPad = videoBinSrcPad;
1608 
1609  return NS_OK;
1610 
1611 failed:
1612  g_object_unref (videoBinSinkPad);
1613  g_object_unref (videoBinSrcPad);
1614 
1615  return rv;
1616 }
1617 
1618 GstPad *
1619 sbGStreamerVideoTranscoder::GetPadFromTemplate (GstElement *element,
1620  GstPadTemplate *templ)
1621 {
1622  TRACE(("%s[%p]", __FUNCTION__, this));
1623 
1624  GstPadPresence presence = GST_PAD_TEMPLATE_PRESENCE (templ);
1625 
1626  if (presence == GST_PAD_ALWAYS || presence == GST_PAD_SOMETIMES)
1627  {
1628  return gst_element_get_static_pad (element, templ->name_template);
1629  }
1630  else {
1631  // Request pad
1632  return gst_element_get_request_pad (element, templ->name_template);
1633  }
1634 }
1635 
1636 /* Get a pad from 'element' that is compatible with (can potentially be linked
1637  * to) 'pad'.
1638  * Returned pad may be an existing static pad, or a new request pad
1639  */
1640 GstPad *
1641 sbGStreamerVideoTranscoder::GetCompatiblePad (GstElement *element,
1642  GstPad *pad)
1643 {
1644  TRACE(("%s[%p]", __FUNCTION__, this));
1645 
1646  GstElementClass *klass = GST_ELEMENT_GET_CLASS (element);
1647  GList *padlist = gst_element_class_get_pad_template_list (klass);
1648  GstPadTemplate *compatibleTemplate = NULL;
1649 
1650  while (padlist) {
1651  GstPadTemplate *padtempl = (GstPadTemplate *) padlist->data;
1652 
1653  // Check that pad's direction is opposite this template's direction.
1654  // Then check that they have potentially-compatible caps.
1655  if (GST_PAD_DIRECTION (pad) != padtempl->direction) {
1656  GstCaps *caps = gst_pad_get_caps (pad);
1657 
1658  gboolean compatible = gst_caps_can_intersect (
1659  caps, GST_PAD_TEMPLATE_CAPS (padtempl));
1660  gst_caps_unref (caps);
1661 
1662  if (compatible) {
1663  compatibleTemplate = padtempl;
1664  break;
1665  }
1666  }
1667 
1668  padlist = g_list_next (padlist);
1669  }
1670 
1671  if (compatibleTemplate) {
1672  // See if we can get an actual pad based on the compatible template
1673  return GetPadFromTemplate (element, compatibleTemplate);
1674  }
1675  else {
1676  // No compatible template means there couldn't possibly be a compatible
1677  // pad.
1678  return NULL;
1679  }
1680 }
1681 
1682 // Returns the muxer's source pad, or the audio or video encoder's source pad
1683 // if there is no muxer, in 'muxerSrcPad'. Link up either or both of audioPad,
1684 // videoPad.
1685 nsresult
1686 sbGStreamerVideoTranscoder::AddMuxer (GstPad **muxerSrcPad,
1687  GstPad *audioPad,
1688  GstPad *videoPad)
1689 {
1690  TRACE(("%s[%p]", __FUNCTION__, this));
1691  NS_ENSURE_ARG_POINTER (muxerSrcPad);
1692  NS_ASSERTION (audioPad || videoPad, "Must have at least one pad");
1693 
1694  // Ask the configurator what muxer (if any) we should use.
1695  nsString muxerName;
1696  nsresult rv = mConfigurator->GetMuxer(muxerName);
1697  NS_ENSURE_SUCCESS (rv, rv);
1698 
1699  GstElement *muxer = NULL;
1700 
1701  if (muxerName.IsEmpty()) {
1702  LOG(("Muxer enabled but no muxer specified"));
1703  return NS_ERROR_FAILURE;
1704  }
1705 
1706  muxer = gst_element_factory_make (
1707  NS_ConvertUTF16toUTF8 (muxerName).BeginReading(), NULL);
1708 
1709  if (!muxer) {
1710  LOG(("No muxer %s available",
1711  NS_ConvertUTF16toUTF8 (muxerName).BeginReading()));
1712  TranscodingFatalError("songbird.transcode.error.muxer_unavailable");
1713  return NS_ERROR_FAILURE;
1714  }
1715 
1716  GstPad *sinkpad;
1717 
1718  // Muxer, hook it up!
1719  gst_bin_add (GST_BIN (mPipeline), muxer);
1720 
1721  if (audioPad) {
1722  sinkpad = GetCompatiblePad (muxer, audioPad);
1723  if (!sinkpad) {
1724  TranscodingFatalError("songbird.transcode.error.audio_not_muxable");
1725  return NS_ERROR_FAILURE;
1726  }
1727 
1728  GstPadLinkReturn linkret = gst_pad_link (audioPad, sinkpad);
1729  if (linkret != GST_PAD_LINK_OK) {
1730  g_object_unref (sinkpad);
1731  TranscodingFatalError("songbird.transcode.error.audio_not_muxable");
1732  return NS_ERROR_FAILURE;
1733  }
1734 
1735  g_object_unref (sinkpad);
1736  }
1737 
1738  if (videoPad) {
1739  sinkpad = GetCompatiblePad (muxer, videoPad);
1740  if (!sinkpad) {
1741  TranscodingFatalError("songbird.transcode.error.video_not_muxable");
1742  return NS_ERROR_FAILURE;
1743  }
1744 
1745  GstPadLinkReturn linkret = gst_pad_link (videoPad, sinkpad);
1746  if (linkret != GST_PAD_LINK_OK) {
1747  g_object_unref (sinkpad);
1748  TranscodingFatalError("songbird.transcode.error.video_not_muxable");
1749  return NS_ERROR_FAILURE;
1750  }
1751 
1752  g_object_unref (sinkpad);
1753  }
1754 
1755  gst_element_sync_state_with_parent (muxer);
1756 
1757  // Get the output of the muxer as our source pad.
1758  *muxerSrcPad = gst_element_get_static_pad (muxer, "src");
1759 
1760  return NS_OK;
1761 }
1762 
1763 nsresult
1764 sbGStreamerVideoTranscoder::CreateSink (GstElement **aSink)
1765 {
1766  TRACE(("%s[%p]", __FUNCTION__, this));
1767 
1768  GstElement *sink = NULL;
1769 
1770  if (mDestStream) {
1771  sink = gst_element_factory_make ("mozillasink", "sink");
1772  if (sink) {
1773  // Set the 'stream' property to the raw nsIOutputStream pointer.
1774  g_object_set (sink, "stream", mDestStream.get(), NULL);
1775  }
1776  }
1777  else if (!mDestURI.IsEmpty()) {
1778  nsCString uri = NS_ConvertUTF16toUTF8 (mDestURI);
1779  sink = gst_element_make_from_uri (GST_URI_SINK,
1780  uri.BeginReading(),
1781  "sink");
1782  }
1783 
1784  if (!sink) {
1785  TranscodingFatalError("songbird.transcode.error.no_sink");
1786  return NS_ERROR_FAILURE;
1787  }
1788 
1789  *aSink = sink;
1790  return NS_OK;
1791 }
1792 
1793 nsresult
1794 sbGStreamerVideoTranscoder::AddSink (GstPad *muxerSrcPad)
1795 {
1796  TRACE(("%s[%p]", __FUNCTION__, this));
1797 
1798  GstElement *sink = NULL;
1799  nsresult rv = CreateSink(&sink);
1800  NS_ENSURE_SUCCESS (rv, rv);
1801 
1802  gst_bin_add (GST_BIN (mPipeline), sink);
1803  gst_element_sync_state_with_parent (sink);
1804 
1805  GstPad *sinkpad = gst_element_get_static_pad (sink, "sink");
1806 
1807  GstPadLinkReturn linkret = gst_pad_link (muxerSrcPad, sinkpad);
1808  if (linkret != GST_PAD_LINK_OK) {
1809  TranscodingFatalError("songbird.transcode.error.sink_incompatible");
1810  return NS_ERROR_FAILURE;
1811  }
1812 
1813  g_object_unref (sinkpad);
1814 
1815  return NS_OK;
1816 }
1817 
1818 nsresult
1819 sbGStreamerVideoTranscoder::SetVideoFormatFromCaps (
1820  sbIMediaFormatVideoMutable *format, GstCaps *caps)
1821 {
1822  nsresult rv;
1823 
1824  GstStructure *structure = gst_caps_get_structure (caps, 0);
1825  gboolean success;
1826 
1827  gint width, height;
1828 
1829  success = gst_structure_get_int (structure, "width", &width);
1830  NS_ENSURE_TRUE (success, NS_ERROR_FAILURE);
1831  success = gst_structure_get_int (structure, "height", &height);
1832  NS_ENSURE_TRUE (success, NS_ERROR_FAILURE);
1833 
1834  const GValue* par = gst_structure_get_value(structure,
1835  "pixel-aspect-ratio");
1836  gint parN, parD;
1837  if (par) {
1838  parN = gst_value_get_fraction_numerator(par);
1839  parD = gst_value_get_fraction_denominator(par);
1840  }
1841  else {
1842  // Default to square pixels.
1843  parN = 1;
1844  parD = 1;
1845  }
1846 
1847  const GValue* fr = gst_structure_get_value(structure, "framerate");
1848  gint frN, frD;
1849  if (fr) {
1850  frN = gst_value_get_fraction_numerator(fr);
1851  frD = gst_value_get_fraction_denominator(fr);
1852  }
1853  else {
1854  // Default to 0/1 indicating unknown?
1855  frN = 0;
1856  frD = 1;
1857  }
1858 
1859  // TODO: should we describe the sub-type of raw video (i.e. YUV 4:2:0 or
1860  // even more specifically stuff like "I420")?
1861  rv = format->SetVideoType (NS_LITERAL_STRING ("video/x-raw"));
1862  NS_ENSURE_SUCCESS (rv, rv);
1863  rv = format->SetVideoWidth (width);
1864  NS_ENSURE_SUCCESS (rv, rv);
1865  rv = format->SetVideoHeight (height);
1866  NS_ENSURE_SUCCESS (rv, rv);
1867  rv = format->SetVideoPAR(parN, parD);
1868  NS_ENSURE_SUCCESS (rv, rv);
1869  rv = format->SetVideoFrameRate(frN, frD);
1870  NS_ENSURE_SUCCESS (rv, rv);
1871 
1872  return NS_OK;
1873 }
1874 
1875 nsresult
1876 sbGStreamerVideoTranscoder::SetAudioFormatFromCaps (
1877  sbIMediaFormatAudioMutable *format, GstCaps *caps)
1878 {
1879  nsresult rv;
1880  GstStructure *structure = gst_caps_get_structure (caps, 0);
1881  gboolean success;
1882 
1883  gint rate;
1884  gint channels;
1885 
1886  success = gst_structure_get_int (structure, "rate", &rate);
1887  NS_ENSURE_TRUE (success, NS_ERROR_FAILURE);
1888  success = gst_structure_get_int (structure, "channels", &channels);
1889  NS_ENSURE_TRUE (success, NS_ERROR_FAILURE);
1890 
1891  // TODO: should we describe the sub-type of raw audio (i.e. integer, float,
1892  // and details like depth, endianness)
1893  rv = format->SetAudioType(NS_LITERAL_STRING ("audio/x-raw"));
1894  NS_ENSURE_SUCCESS (rv, rv);
1895  rv = format->SetSampleRate (rate);
1896  NS_ENSURE_SUCCESS (rv, rv);
1897  rv = format->SetChannels (channels);
1898  NS_ENSURE_SUCCESS (rv, rv);
1899 
1900  return NS_OK;
1901 }
1902 
1903 nsresult
1904 sbGStreamerVideoTranscoder::InitializeConfigurator()
1905 {
1906  TRACE(("%s[%p]", __FUNCTION__, this));
1907 
1908  nsresult rv;
1909 
1910  // Build sbIMediaFormat describing the decoded data.
1911  nsCOMPtr<sbIMediaFormatMutable> mediaFormat =
1912  do_CreateInstance(SB_MEDIAFORMAT_CONTRACTID, &rv);
1913  NS_ENSURE_SUCCESS (rv, rv);
1914 
1915  if (mVideoSrc) {
1916  nsCOMPtr<sbIMediaFormatVideoMutable> videoFormat =
1917  do_CreateInstance(SB_MEDIAFORMATVIDEO_CONTRACTID, &rv);
1918  NS_ENSURE_SUCCESS (rv, rv);
1919 
1920  GstCaps *videoCaps = GetCapsFromPad (mVideoSrc);
1921  NS_ENSURE_TRUE (videoCaps, NS_ERROR_FAILURE);
1922 
1923  rv = SetVideoFormatFromCaps(videoFormat, videoCaps);
1924  gst_caps_unref (videoCaps);
1925  NS_ENSURE_SUCCESS (rv, rv);
1926 
1927  rv = mediaFormat->SetVideoStream(videoFormat);
1928  NS_ENSURE_SUCCESS (rv, rv);
1929  }
1930 
1931 
1932  if (mAudioSrc) {
1933  nsCOMPtr<sbIMediaFormatAudioMutable> audioFormat =
1934  do_CreateInstance(SB_MEDIAFORMATAUDIO_CONTRACTID, &rv);
1935  NS_ENSURE_SUCCESS (rv, rv);
1936 
1937  GstCaps *audioCaps = GetCapsFromPad (mAudioSrc);
1938  NS_ENSURE_TRUE (audioCaps, NS_ERROR_FAILURE);
1939 
1940  rv = SetAudioFormatFromCaps(audioFormat, audioCaps);
1941  gst_caps_unref (audioCaps);
1942  NS_ENSURE_SUCCESS (rv, rv);
1943 
1944  rv = mediaFormat->SetAudioStream(audioFormat);
1945  NS_ENSURE_SUCCESS (rv, rv);
1946  }
1947 
1948  rv = mConfigurator->SetInputFormat(mediaFormat);
1949  NS_ENSURE_SUCCESS (rv, rv);
1950 
1951  rv = mConfigurator->Configurate();
1952  if (NS_FAILED (rv)) {
1953  TranscodingFatalError("songbird.transcode.error.failed_configuration");
1954  NS_ENSURE_SUCCESS (rv, rv);
1955  }
1956 
1957  // Ensure that the proper file extension is being used for the destintation
1958  // output file (if it's a file)
1959  if (!mDestURI.IsEmpty()) {
1960  nsCOMPtr<nsIURI> fixedDestURI;
1961  rv = NS_NewURI(getter_AddRefs(fixedDestURI),
1962  NS_ConvertUTF16toUTF8(mDestURI));
1963  NS_ENSURE_SUCCESS(rv, rv);
1964 
1965  nsCOMPtr<nsIFileURL> fixedDestFileURI = do_QueryInterface(
1966  fixedDestURI, &rv);
1967  if (NS_SUCCEEDED(rv) && fixedDestFileURI) {
1968  nsCString curFileExt;
1969  rv = fixedDestFileURI->GetFileExtension(curFileExt);
1970  NS_ENSURE_SUCCESS(rv, rv);
1971 
1972  nsCString configFileExt;
1973  rv = mConfigurator->GetFileExtension(configFileExt);
1974  NS_ENSURE_SUCCESS(rv, rv);
1975 
1976  if (!curFileExt.Equals(configFileExt, CaseInsensitiveCompare)) {
1977  rv = fixedDestFileURI->SetFileExtension(configFileExt);
1978  NS_ENSURE_SUCCESS(rv, rv);
1979 
1980  // Check if the destination file already exists
1981  nsCOMPtr<nsIFile> destFile;
1982  rv = fixedDestFileURI->GetFile(getter_AddRefs(destFile));
1983 
1984  PRBool exists;
1985  rv = destFile->Exists(&exists);
1986  NS_ENSURE_SUCCESS(rv, rv);
1987 
1988  // Create a unique file if destination file already exists.
1989  if (exists) {
1990  rv = destFile->CreateUnique(nsIFile::NORMAL_FILE_TYPE,
1992  NS_ENSURE_SUCCESS(rv, rv);
1993  rv = NS_NewFileURI(getter_AddRefs(fixedDestURI), destFile);
1994  NS_ENSURE_SUCCESS(rv, rv);
1995  fixedDestFileURI = do_QueryInterface(fixedDestURI, &rv);
1996  NS_ENSURE_SUCCESS(rv, rv);
1997  }
1998 
1999  nsCString fixedSpec;
2000  rv = fixedDestFileURI->GetSpec(fixedSpec);
2001  NS_ENSURE_SUCCESS(rv, rv);
2002 
2003  CopyUTF8toUTF16(fixedSpec, mDestURI);
2004  }
2005  }
2006  }
2007 
2008  /* Determine whether we want audio/video/muxer. Whether we _actually_
2009  use these also depends on what the input file has - but we won't
2010  try to set up a video bin if there's no video encoder */
2011  nsString audioEncoder;
2012  rv = mConfigurator->GetAudioEncoder(audioEncoder);
2013  NS_ENSURE_SUCCESS(rv, rv);
2014  rv = mConfigurator->GetUseAudioEncoder(&mUseAudio);
2015  NS_ENSURE_SUCCESS(rv, rv);
2016 
2017  nsString videoEncoder;
2018  rv = mConfigurator->GetVideoEncoder(videoEncoder);
2019  NS_ENSURE_SUCCESS(rv, rv);
2020  rv = mConfigurator->GetUseVideoEncoder(&mUseVideo);
2021  NS_ENSURE_SUCCESS(rv, rv);
2022 
2023  nsString muxer;
2024  rv = mConfigurator->GetMuxer(muxer);
2025  NS_ENSURE_SUCCESS(rv, rv);
2026  rv = mConfigurator->GetUseMuxer(&mUseMuxer);
2027  NS_ENSURE_SUCCESS(rv, rv);
2028 
2029  /* If we're not using a muxer, we must have ONLY one codec */
2030  if (!mUseMuxer && (mUseAudio && mUseVideo))
2031  return NS_ERROR_UNEXPECTED;
2032 
2033  return NS_OK;
2034 }
2035 
2036 nsresult
2037 sbGStreamerVideoTranscoder::DecoderNoMorePads(GstElement *uridecodebin)
2038 {
2039  TRACE(("%s[%p]", __FUNCTION__, this));
2040 
2041  if (!mAudioSrc && !mVideoSrc) {
2042  // We have neither audio nor video. That's not very good!
2043  TranscodingFatalError("songbird.transcode.error.no_streams");
2044  return NS_ERROR_FAILURE;
2045  }
2046 
2047  // Now, set up queues, and pad blocks on the queues. Get notification of
2048  // caps being set on each src pad (the original srcpad from decodebin, NOT
2049  // the queue's srcpad). When we have caps on all streams, configure the
2050  // rest of the pipeline and release the pad blocks.
2051 
2052  if (mAudioSrc) {
2053  g_signal_connect (mAudioSrc, "notify::caps",
2054  G_CALLBACK (pad_notify_caps_cb), this);
2055 
2056  GstElement *queue = gst_element_factory_make ("queue", "audio-queue");
2057  GstPad *queueSink = gst_element_get_pad (queue, "sink");
2058 
2059  gst_bin_add (GST_BIN (mPipeline), queue);
2060  gst_element_sync_state_with_parent (queue);
2061 
2062  gst_pad_link (mAudioSrc, queueSink);
2063  g_object_unref (queueSink);
2064 
2065  GstPad *queueSrc = gst_element_get_pad (queue, "src");
2066  mAudioQueueSrc = queueSrc;
2067 
2068  gst_pad_set_blocked_async (queueSrc, TRUE,
2069  (GstPadBlockCallback)pad_blocked_cb, this);
2070  }
2071 
2072  if (mVideoSrc) {
2073  g_signal_connect (mVideoSrc, "notify::caps",
2074  G_CALLBACK (pad_notify_caps_cb), this);
2075 
2076  GstElement *queue = gst_element_factory_make ("queue", "video-queue");
2077  GstPad *queueSink = gst_element_get_pad (queue, "sink");
2078 
2079  gst_bin_add (GST_BIN (mPipeline), queue);
2080  gst_element_sync_state_with_parent (queue);
2081 
2082  gst_pad_link (mVideoSrc, queueSink);
2083  g_object_unref (queueSink);
2084 
2085  GstPad *queueSrc = gst_element_get_pad (queue, "src");
2086  mVideoQueueSrc = queueSrc;
2087 
2088  gst_pad_set_blocked_async (queueSrc, TRUE,
2089  (GstPadBlockCallback)pad_blocked_cb, this);
2090  }
2091 
2092  mWaitingForCaps = PR_TRUE;
2093 
2094  return NS_OK;
2095 }
2096 
2097 nsresult
2098 sbGStreamerVideoTranscoder::PadNotifyCaps (GstPad *pad)
2099 {
2100  return CheckForAllCaps();
2101 }
2102 
2103 
2104 GstCaps *
2105 sbGStreamerVideoTranscoder::GetCapsFromPad (GstPad *pad)
2106 {
2107  // We want to get the caps from the decoder associated with this pad (but this
2108  // pad might be a ghost pad, or a queue pad linked to the ghost pad, etc)
2109  // gst_pad_get_negotiated_caps() is needed. gst_pad_get_caps would fail in
2110  // in some cases (wmv) even with the fallback to GST_PAD_CAPS.
2111  // We still fallback and try GST_PAD_CAPS, which works in some of these cases,
2112  // if the get_caps returned us some non-fixed caps (usually these will be
2113  // template caps). The use of gst_pad_get_negotiated_caps was thought of
2114  // mainly because the media inspector uses it and that particular logic
2115  // is doing the same thing as the transcoder, getting caps for the stream.
2116 
2117  GstPad * realPad = GetRealPad(pad);
2118  GstCaps *caps = gst_pad_get_negotiated_caps (realPad);
2119  if (caps) {
2120  if (gst_caps_is_fixed (caps)) {
2121  g_object_unref(realPad);
2122  return caps;
2123  }
2124  gst_caps_unref (caps);
2125  }
2126 
2127  caps = GST_PAD_CAPS (realPad);
2128  if (caps) {
2129  gst_caps_ref (caps);
2130  g_object_unref(realPad);
2131  return caps;
2132  }
2133  g_object_unref(realPad);
2134  return NULL;
2135 }
2136 
2137 nsresult
2138 sbGStreamerVideoTranscoder::CheckForAllCaps ()
2139 {
2140  // Ensure this isn't called from multiple threads concurrently.
2141  nsAutoLock lock(mBuildLock);
2142  nsresult rv = NS_OK;
2143 
2144  if (mWaitingForCaps) {
2145  TRACE(("CheckForAllCaps: checking if we have fixed caps on all pads"));
2146  if (mAudioSrc) {
2147  GstCaps *audioCaps = GetCapsFromPad (mAudioSrc);
2148  if (!audioCaps) {
2149  // Not done yet
2150  return NS_OK;
2151  }
2152  gst_caps_unref (audioCaps);
2153  }
2154 
2155  if (mVideoSrc) {
2156  GstCaps *videoCaps = GetCapsFromPad (mVideoSrc);
2157  if (!videoCaps) {
2158  // Not done yet
2159  return NS_OK;
2160  }
2161  gst_caps_unref (videoCaps);
2162  }
2163 
2164  LOG(("Have fixed caps on all pads: proceeding to build pipeline"));
2165 
2166  // Ok, we have caps for everything.
2167  // Build the rest of the pipeline, and unblock the pads.
2168  rv = BuildRemainderOfPipeline();
2169 
2170  // success or failure, we're done waiting for caps
2171  mWaitingForCaps = PR_FALSE;
2172 
2173  if (NS_SUCCEEDED (rv)) {
2174  if (mAudioQueueSrc) {
2175  gst_pad_set_blocked_async (mAudioQueueSrc, FALSE,
2176  (GstPadBlockCallback)pad_blocked_cb, this);
2177  }
2178  if (mVideoQueueSrc) {
2179  gst_pad_set_blocked_async (mVideoQueueSrc, FALSE,
2180  (GstPadBlockCallback)pad_blocked_cb, this);
2181  }
2182  } else {
2183  // handle NS_FAILED(rv)
2184  // Set status so progress reporting shows we failed
2186 
2187  // unlock before making the proxied error dispatch
2188  lock.unlock();
2189  TranscodingFatalError("songbird.transcode.error.failed_configuration");
2190  lock.lock();
2191  }
2192 
2193  // Done with all building, clean these up.
2194  CleanupPads();
2195  }
2196 
2197  return rv;
2198 }
2199 
2200 nsresult
2201 sbGStreamerVideoTranscoder::PadBlocked (GstPad *pad, gboolean blocked)
2202 {
2203  if (blocked ) {
2204  LOG(("Pad blocked, checking if we have full caps yet"));
2205  return CheckForAllCaps();
2206  }
2207  else {
2208  LOG(("PadBlocked: returning NS_OK after unblocking."));
2209  return NS_OK;
2210  }
2211 }
2212 
2213 nsresult
2214 sbGStreamerVideoTranscoder::BuildRemainderOfPipeline ()
2215 {
2216  TRACE(("%s[%p]", __FUNCTION__, this));
2217 
2218  nsresult rv;
2219  // Set up the configurator - if this succeeds, then we have an output
2220  // format selected, so we can query that as needed to set up our encoders.
2221  rv = InitializeConfigurator();
2222  NS_ENSURE_SUCCESS (rv, rv);
2223 
2224  GstPad *newAudioSrc = NULL;
2225  GstPad *newVideoSrc = NULL;
2226 
2227  if (mAudioQueueSrc && mUseAudio) {
2228  rv = AddAudioBin (mAudioQueueSrc, &newAudioSrc);
2229  NS_ENSURE_SUCCESS (rv, rv);
2230  }
2231 
2232  if (mVideoQueueSrc && mUseVideo) {
2233  rv = AddVideoBin (mVideoQueueSrc, &newVideoSrc);
2234  NS_ENSURE_SUCCESS (rv, rv);
2235  }
2236 
2237  GstPad *srcpad = NULL;
2238 
2239  if (mUseMuxer) {
2240  rv = AddMuxer (&srcpad, newAudioSrc, newVideoSrc);
2241  NS_ENSURE_SUCCESS (rv, rv);
2242  }
2243  else {
2244  if (newAudioSrc)
2245  srcpad = (GstPad *)gst_object_ref (newAudioSrc);
2246  else if (newVideoSrc)
2247  srcpad = (GstPad *)gst_object_ref (newVideoSrc);
2248  else {
2249  NS_NOTREACHED ("No audio or video, not allowed");
2250  return NS_ERROR_FAILURE;
2251  }
2252  }
2253 
2254  rv = AddSink (srcpad);
2255  if (NS_FAILED (rv)) {
2256  NS_ENSURE_SUCCESS (rv, rv);
2257  }
2258 
2259  g_object_unref (srcpad);
2260  if (newVideoSrc)
2261  g_object_unref (newVideoSrc);
2262  if (newAudioSrc)
2263  g_object_unref (newAudioSrc);
2264 
2265  // We have the pipeline fully set up; now we can set the metadata.
2266  rv = SetMetadataOnTagSetters();
2267 
2268  LOG(("Finished building pipeline"));
2269  return NS_OK;
2270 }
2271 
2272 nsresult
2273 sbGStreamerVideoTranscoder::BuildTranscodePipeline(const gchar *aPipelineName)
2274 {
2275  TRACE(("%s[%p]", __FUNCTION__, this));
2276 
2277  // Our pipeline will look roughly like this. For details, see the functions
2278  // that build each of the bins.
2279  // The audio encoder and video encoder bins are optional - they will only
2280  // exist if the relevant media type is found.
2281  // The muxer is also optional if only one of audio or video is in use.
2282  //
2283  // transcode-decoder: an instance of uridecodebin.
2284 
2285  // [------------------------------------------------------------------------]
2286  // [transcode-pipeline [-----] [--------------] ]
2287  // [ [queue]--[audio-encoder ]\ [-----------------] ]
2288  // [ /[-----] [--------------] \ [output-bin ] ]
2289  // [ [------------------] / \[ [-----] [----] ] ]
2290  // [ [transcode-decoder ]/ [-[muxer]--[sink] ] ]
2291  // [ [ ]\ /[ [-----] [----] ] ]
2292  // [ [------------------] \ [-----] [--------------] / [ ] ]
2293  // [ \[queue]--[video-encoder ]/ [-----------------] ]
2294  // [ [-----] [--------------] ]
2295  // [ ]
2296  // [------------------------------------------------------------------------]
2297  //
2298  // audiorate: insert/drop samples as needed to make sure we have a
2299  // continuous audio stream with no gaps or overlaps.
2300  // audioconvert: convert format of the raw audio for further processing.
2301  // Includes down- or up-mixing.
2302  // ar1: audioresample used to resample audio to the desired sample
2303  // rate.
2304  // capsfilter: specify the format to convert to for the encoder to encode.
2305  // ar2: audioresample to re-resample to a rate supported by the
2306  // encoder. This is basically a hack to handle the fact that
2307  // the configurator doesn't know how to handle the constraints
2308  // of what the encoder is capable of. We don't similarly have
2309  // another audioconvert, or other converters on the video side,
2310  // because in practice we don't run into this problem there.
2311  //
2312  // [------------------------------------------------------------------------]
2313  // [audio-encoder ]
2314  // [ ]
2315  // [ [---------] [------------] [---] [----------] [---] [-------] ]
2316  // [-[audiorate]--[audioconvert]--[ar1]--[capsfilter]-[ar2]--[encoder]------]
2317  // [ [---------] [------------] [---] [----------] [---] [-------] ]
2318  // [ ]
2319  // [------------------------------------------------------------------------]
2320  //
2321  // videorate: adjust framerate of video to a constant rate, by dropping or
2322  // duplicating frames as needed.
2323  // colorspace: convert to a colour space and pixel format that we can process
2324  // further and encode.
2325  // videoscale: scale the video to the size we want in at least one dimension.
2326  // The other dimension MAY be smaller than the desired output.
2327  // videobox: add black bars at top/bottom or left/right of the actual video
2328  // so that the video has BOTH dimensions correct, and the actual
2329  // image material itself retains the correct aspect ratio.
2330  // capsfilter: specify the format to convert to for the encoder to encode.
2331 
2332  // [-------------------------------------------------------------------------]
2333  // [video-encoder ]
2334  // [ ]
2335  // [ [---------] [----------] [----------] [--------] [----------] [-------] ]
2336  // [-[videorate]-[colorspace]-[videoscale]-[videobox]-[capsfilter]-[encoder]-]
2337  // [ [---------] [----------] [----------] [--------] [----------] [-------] ]
2338  // [ ]
2339  // [-------------------------------------------------------------------------]
2340 
2341  mPipeline = gst_pipeline_new (aPipelineName);
2342  NS_ENSURE_TRUE (mPipeline, NS_ERROR_FAILURE);
2343 
2344  GstElement *uridecodebin = gst_element_factory_make("uridecodebin",
2345  "transcode-decoder");
2346  if (!uridecodebin) {
2347  g_object_unref (mPipeline);
2348  mPipeline = NULL;
2349 
2350  return NS_ERROR_FAILURE;
2351  }
2352 
2353  // Set the source URI
2354  nsCString uri = NS_ConvertUTF16toUTF8 (mSourceURI);
2355  g_object_set (uridecodebin, "uri", uri.BeginReading(), NULL);
2356 
2357  // Connect to callbacks for when uridecodebin adds a new pad that we (may)
2358  // want to connect up to the rest of the pipeline, and for when we're not
2359  // going to get any more pads (and so can actually continue on to transcoding)
2360  g_signal_connect (uridecodebin, "pad-added",
2361  G_CALLBACK (decodebin_pad_added_cb), this);
2362  g_signal_connect (uridecodebin, "no-more-pads",
2363  G_CALLBACK (decodebin_no_more_pads_cb), this);
2364 
2365  gst_bin_add (GST_BIN (mPipeline), uridecodebin);
2366 
2367  return NS_OK;
2368 }
2369 
return NS_OK
function succeeded(ch, cx, status, data)
nsresult SB_NewTranscodeError(const nsAString &aMessageWithItem, const nsAString &aMessageWithoutItem, const nsAString &aDetails, const nsAString &aUri, sbIMediaItem *aMediaItem, sbITranscodeError **_retval)
#define SB_DEFAULT_FILE_PERMISSIONS
Definition: sbFileUtils.h:123
GstPad * GetRealPad(GstPad *pad)
Generic interface for exposing long running jobs to the UI.
nsString Get(const nsAString &aKey, const nsAString &aDefault=SBVoidString())
An object capable of transcoding a source URI to a destination file.
var event
NS_INTERFACE_MAP_END NS_IMPL_CI_INTERFACE_GETTER6(sbDeviceLibrary, nsIClassInfo, sbIDeviceLibrary, sbILibrary, sbIMediaList, sbIMediaItem, sbILibraryResource) sbDeviceLibrary
const unsigned long FAILED
const unsigned long ERROR_EVENT
Indicates the event is an error and will have its error member set.
const unsigned short STATUS_SUCCEEDED
Constant indicating that the job has completed.
Generic interface extending sbIJobProgress that can track expected time, etc in addition to abstract ...
sbIJobCancelable NS_DECL_CLASSINFO(sbGStreamerVideoTranscoder)
virtual void HandleErrorMessage(GstMessage *message)
function width(ele) rect(ele).width
nsresult ApplyPropertyBagToElement(GstElement *element, nsIPropertyBag *props)
const unsigned short STATUS_RUNNING
Constant indicating that the job is active.
_hideDatepicker duration
var bundle
Base interface for all Transcoding Configurators. This interface should be implemented by any Configu...
void DispatchMediacoreEvent(unsigned long type, nsIVariant *aData=NULL, sbIMediacoreError *aError=NULL)
GstTagList * ConvertPropertyArrayToTagList(sbIPropertyArray *properties)
virtual void HandleEOSMessage(GstMessage *message)
GstMessage * message
#define PROGRESS_INTERVAL
_updateDatepicker height
return ret
Songbird String Bundle Definitions.
#define SB_MEDIAFORMATVIDEO_CONTRACTID
nsresult SBGetLocalizedString(nsAString &aString, const nsAString &aKey, const nsAString &aDefault, class nsIStringBundle *aStringBundle)
var uri
Definition: FeedWriter.js:1135
function debug(aMsg)
NS_IMPL_THREADSAFE_CI(sbGStreamerVideoTranscoder)
if(DEBUG_DATAREMOTES)
#define TRACE(args)
void SetPipelineOp(GStreamer::pipelineOp_t aPipelineOp)
Implemented to receive notifications from sbIJobProgress interfaces.
#define SB_MEDIAFORMAT_CONTRACTID
#define LOG(args)
Interface that defines a single item of media in the system.
const unsigned short STATUS_FAILED
Constant indicating that the job has completed with errors.
An interface to carry around arrays of nsIProperty instances. Users of this interface should only QI ...
var failed
#define SB_MEDIAFORMATAUDIO_CONTRACTID
_getSelectedPageStyle s i
nsITimerCallback
NS_IMPL_THREADSAFE_ISUPPORTS8(sbGStreamerVideoTranscoder, nsIClassInfo, sbIGStreamerPipeline, sbITranscodeVideoJob, sbIMediacoreEventTarget, sbIJobProgress, sbIJobProgressTime, sbIJobCancelable, nsITimerCallback) NS_IMPL_CI_INTERFACE_GETTER6(sbGStreamerVideoTranscoder