GstEngine: Move CreateElement() to GstEnginePipeline

This commit is contained in:
Jonas Kvinge
2021-10-16 21:28:56 +02:00
parent 6d7a01fb4e
commit c3ce6cff72
4 changed files with 195 additions and 107 deletions

View File

@@ -91,7 +91,6 @@ GstEngine::GstEngine(TaskManager *task_manager, QObject *parent)
waiting_to_seek_(false), waiting_to_seek_(false),
seek_pos_(0), seek_pos_(0),
timer_id_(-1), timer_id_(-1),
next_element_id_(0),
is_fading_out_to_pause_(false), is_fading_out_to_pause_(false),
has_faded_out_(false), has_faded_out_(false),
scope_chunk_(0), scope_chunk_(0),
@@ -448,25 +447,6 @@ void GstEngine::ReloadSettings() {
} }
GstElement *GstEngine::CreateElement(const QString &factoryName, GstElement *bin, const bool showerror) {
// Make a unique name
QString name = factoryName + "-" + QString::number(next_element_id_++);
GstElement *element = gst_element_factory_make(factoryName.toUtf8().constData(), name.toUtf8().constData());
if (!element) {
if (showerror) emit Error(QString("GStreamer could not create the element: %1.").arg(factoryName));
else qLog(Error) << "GStreamer could not create the element:" << factoryName;
emit StateChanged(Engine::Error);
emit FatalError();
return nullptr;
}
if (bin) gst_bin_add(GST_BIN(bin), element);
return element;
}
void GstEngine::ConsumeBuffer(GstBuffer *buffer, const int pipeline_id, const QString &format) { void GstEngine::ConsumeBuffer(GstBuffer *buffer, const int pipeline_id, const QString &format) {
// Schedule this to run in the GUI thread. The buffer gets added to the queue and unreffed by UpdateScope. // Schedule this to run in the GUI thread. The buffer gets added to the queue and unreffed by UpdateScope.
@@ -810,7 +790,7 @@ std::shared_ptr<GstEnginePipeline> GstEngine::CreatePipeline() {
EnsureInitialized(); EnsureInitialized();
std::shared_ptr<GstEnginePipeline> ret = std::make_shared<GstEnginePipeline>(this); std::shared_ptr<GstEnginePipeline> ret = std::make_shared<GstEnginePipeline>();
ret->set_output_device(output_, device_); ret->set_output_device(output_, device_);
ret->set_volume_enabled(volume_control_); ret->set_volume_enabled(volume_control_);
ret->set_stereo_balancer_enabled(stereo_balancer_enabled_); ret->set_stereo_balancer_enabled(stereo_balancer_enabled_);
@@ -841,7 +821,14 @@ std::shared_ptr<GstEnginePipeline> GstEngine::CreatePipeline() {
std::shared_ptr<GstEnginePipeline> GstEngine::CreatePipeline(const QByteArray &gst_url, const QUrl &original_url, const qint64 end_nanosec) { std::shared_ptr<GstEnginePipeline> GstEngine::CreatePipeline(const QByteArray &gst_url, const QUrl &original_url, const qint64 end_nanosec) {
std::shared_ptr<GstEnginePipeline> ret = CreatePipeline(); std::shared_ptr<GstEnginePipeline> ret = CreatePipeline();
if (!ret->InitFromUrl(gst_url, original_url, end_nanosec)) ret.reset(); QString error;
if (!ret->InitFromUrl(gst_url, original_url, end_nanosec, error)) {
ret.reset();
emit Error(error);
emit StateChanged(Engine::Error);
emit FatalError();
}
return ret; return ret;
} }

View File

@@ -88,7 +88,6 @@ class GstEngine : public Engine::Base, public GstBufferConsumer {
void SetStartup(GstStartup *gst_startup) { gst_startup_ = gst_startup; } void SetStartup(GstStartup *gst_startup) { gst_startup_ = gst_startup; }
void EnsureInitialized() { gst_startup_->EnsureInitialized(); } void EnsureInitialized() { gst_startup_->EnsureInitialized(); }
GstElement *CreateElement(const QString &factoryName, GstElement *bin = nullptr, const bool showerror = true);
void ConsumeBuffer(GstBuffer *buffer, const int pipeline_id, const QString &format) override; void ConsumeBuffer(GstBuffer *buffer, const int pipeline_id, const QString &format) override;
public slots: public slots:
@@ -191,7 +190,6 @@ class GstEngine : public Engine::Base, public GstBufferConsumer {
quint64 seek_pos_; quint64 seek_pos_;
int timer_id_; int timer_id_;
int next_element_id_;
bool is_fading_out_to_pause_; bool is_fading_out_to_pause_;
bool has_faded_out_; bool has_faded_out_;

View File

@@ -65,9 +65,8 @@ const int GstEnginePipeline::kEqBandFrequencies[] = { 60, 170, 310, 600, 1000, 3
int GstEnginePipeline::sId = 1; int GstEnginePipeline::sId = 1;
GstEnginePipeline::GstEnginePipeline(GstEngine *engine, QObject *parent) GstEnginePipeline::GstEnginePipeline(QObject *parent)
: QObject(parent), : QObject(parent),
engine_(engine),
id_(sId++), id_(sId++),
valid_(false), valid_(false),
volume_enabled_(true), volume_enabled_(true),
@@ -210,16 +209,39 @@ void GstEnginePipeline::set_channels(const bool enabled, const int channels) {
channels_ = channels; channels_ = channels;
} }
bool GstEnginePipeline::InitFromUrl(const QByteArray &stream_url, const QUrl &original_url, const qint64 end_nanosec) { GstElement *GstEnginePipeline::CreateElement(const QString &factory_name, const QString &name, GstElement *bin, QString &error) {
QString unique_name = QString("pipeline") + "-" + QString::number(id_) + "-" + (name.isEmpty() ? factory_name : name);
GstElement *element = gst_element_factory_make(factory_name.toUtf8().constData(), unique_name.toUtf8().constData());
if (!element) {
qLog(Error) << "GStreamer could not create the element" << factory_name << "with name" << unique_name;
error = QString("GStreamer could not create the element %1 with name %2.").arg(factory_name).arg(unique_name);
}
if (bin && element) gst_bin_add(GST_BIN(bin), element);
return element;
}
bool GstEnginePipeline::InitFromUrl(const QByteArray &stream_url, const QUrl &original_url, const qint64 end_nanosec, QString &error) {
stream_url_ = stream_url; stream_url_ = stream_url;
original_url_ = original_url; original_url_ = original_url;
end_offset_nanosec_ = end_nanosec; end_offset_nanosec_ = end_nanosec;
pipeline_ = engine_->CreateElement("playbin"); pipeline_ = CreateElement("playbin", "pipeline", nullptr, error);
if (!pipeline_) return false; if (!pipeline_) return false;
g_object_set(G_OBJECT(pipeline_), "uri", stream_url.constData(), nullptr); pad_added_cb_id_ = CHECKED_GCONNECT(G_OBJECT(pipeline_), "pad-added", &NewPadCallback, this);
notify_source_cb_id_ = CHECKED_GCONNECT(G_OBJECT(pipeline_), "notify::source", &SourceSetupCallback, this);
about_to_finish_cb_id_ = CHECKED_GCONNECT(G_OBJECT(pipeline_), "about-to-finish", &AboutToFinishCallback, this);
if (!InitAudioBin(error)) return false;
// Set playbin's sink to be our custom audio-sink.
g_object_set(GST_OBJECT(pipeline_), "audio-sink", audiobin_, nullptr);
gint flags = 0; gint flags = 0;
g_object_get(G_OBJECT(pipeline_), "flags", &flags, nullptr); g_object_get(G_OBJECT(pipeline_), "flags", &flags, nullptr);
@@ -231,23 +253,17 @@ bool GstEnginePipeline::InitFromUrl(const QByteArray &stream_url, const QUrl &or
else { else {
flags &= ~0x00000010; flags &= ~0x00000010;
} }
g_object_set(G_OBJECT(pipeline_), "flags", flags, nullptr); g_object_set(G_OBJECT(pipeline_), "flags", flags, nullptr);
g_object_set(G_OBJECT(pipeline_), "uri", stream_url.constData(), nullptr);
pad_added_cb_id_ = CHECKED_GCONNECT(G_OBJECT(pipeline_), "pad-added", &NewPadCallback, this);
notify_source_cb_id_ = CHECKED_GCONNECT(G_OBJECT(pipeline_), "notify::source", &SourceSetupCallback, this);
about_to_finish_cb_id_ = CHECKED_GCONNECT(G_OBJECT(pipeline_), "about-to-finish", &AboutToFinishCallback, this);
if (!InitAudioBin()) return false;
// Set playbin's sink to be our custom audio-sink.
g_object_set(GST_OBJECT(pipeline_), "audio-sink", audiobin_, nullptr);
pipeline_is_connected_ = true; pipeline_is_connected_ = true;
return true; return true;
} }
bool GstEnginePipeline::InitAudioBin() { bool GstEnginePipeline::InitAudioBin(QString &error) {
gst_segment_init(&last_playbin_segment_, GST_FORMAT_TIME); gst_segment_init(&last_playbin_segment_, GST_FORMAT_TIME);
@@ -256,7 +272,7 @@ bool GstEnginePipeline::InitAudioBin() {
if (!audiobin_) return false; if (!audiobin_) return false;
// Create the sink // Create the sink
GstElement *audiosink = engine_->CreateElement(output_, audiobin_); GstElement *audiosink = CreateElement(output_, output_, audiobin_, error);
if (!audiosink) { if (!audiosink) {
gst_object_unref(GST_OBJECT(audiobin_)); gst_object_unref(GST_OBJECT(audiobin_));
return false; return false;
@@ -309,10 +325,15 @@ bool GstEnginePipeline::InitAudioBin() {
// Create all the other elements // Create all the other elements
audioqueue_ = engine_->CreateElement("queue2", audiobin_); audioqueue_ = CreateElement("queue2", "audioqueue", audiobin_, error);
GstElement *audioconverter = engine_->CreateElement("audioconvert", audiobin_); if (!audioqueue_) {
gst_object_unref(GST_OBJECT(audiobin_));
audiobin_ = nullptr;
return false;
}
if (!audioqueue_ || !audioconverter) { GstElement *audioconverter = CreateElement("audioconvert", "audioconverter", audiobin_, error);
if (!audioconverter) {
gst_object_unref(GST_OBJECT(audiobin_)); gst_object_unref(GST_OBJECT(audiobin_));
audiobin_ = nullptr; audiobin_ = nullptr;
return false; return false;
@@ -320,20 +341,40 @@ bool GstEnginePipeline::InitAudioBin() {
// Create the volume elements if it's enabled. // Create the volume elements if it's enabled.
if (volume_enabled_) { if (volume_enabled_) {
volume_ = engine_->CreateElement("volume", audiobin_); volume_ = CreateElement("volume", "volume", audiobin_, error);
if (!volume_) {
gst_object_unref(GST_OBJECT(audiobin_));
audiobin_ = nullptr;
return false;
}
} }
// Create the stereo balancer elements if it's enabled. // Create the stereo balancer elements if it's enabled.
if (stereo_balancer_enabled_) { if (stereo_balancer_enabled_) {
audiopanorama_ = engine_->CreateElement("audiopanorama", audiobin_, false); audiopanorama_ = CreateElement("audiopanorama", "audiopanorama", audiobin_, error);
if (!audiopanorama_) {
gst_object_unref(GST_OBJECT(audiobin_));
audiobin_ = nullptr;
return false;
}
// Set the stereo balance. // Set the stereo balance.
if (audiopanorama_) g_object_set(G_OBJECT(audiopanorama_), "panorama", stereo_balance_, nullptr); g_object_set(G_OBJECT(audiopanorama_), "panorama", stereo_balance_, nullptr);
} }
// Create the equalizer elements if it's enabled. // Create the equalizer elements if it's enabled.
if (eq_enabled_) { if (eq_enabled_) {
equalizer_preamp_ = engine_->CreateElement("volume", audiobin_, false); equalizer_preamp_ = CreateElement("volume", "equalizer_preamp", audiobin_, error);
equalizer_ = engine_->CreateElement("equalizer-nbands", audiobin_, false); if (!equalizer_preamp_) {
gst_object_unref(GST_OBJECT(audiobin_));
audiobin_ = nullptr;
return false;
}
equalizer_ = CreateElement("equalizer-nbands", "equalizer_nbands", audiobin_, error);
if (!equalizer_) {
gst_object_unref(GST_OBJECT(audiobin_));
audiobin_ = nullptr;
return false;
}
// Setting the equalizer bands: // Setting the equalizer bands:
// //
// GStreamer's GstIirEqualizerNBands sets up shelve filters for the first and last bands as corner cases. // GStreamer's GstIirEqualizerNBands sets up shelve filters for the first and last bands as corner cases.
@@ -341,32 +382,36 @@ bool GstEnginePipeline::InitAudioBin() {
// As a workaround, we create two dummy bands at both ends of the spectrum. // As a workaround, we create two dummy bands at both ends of the spectrum.
// This causes the actual first and last adjustable bands to be implemented using band-pass filters. // This causes the actual first and last adjustable bands to be implemented using band-pass filters.
if (equalizer_) { g_object_set(G_OBJECT(equalizer_), "num-bands", 10 + 2, nullptr);
g_object_set(G_OBJECT(equalizer_), "num-bands", 10 + 2, nullptr);
// Dummy first band (bandwidth 0, cutting below 20Hz): // Dummy first band (bandwidth 0, cutting below 20Hz):
GstObject *first_band = GST_OBJECT(gst_child_proxy_get_child_by_index(GST_CHILD_PROXY(equalizer_), 0)); GstObject *first_band = GST_OBJECT(gst_child_proxy_get_child_by_index(GST_CHILD_PROXY(equalizer_), 0));
if (first_band) {
g_object_set(G_OBJECT(first_band), "freq", 20.0, "bandwidth", 0, "gain", 0.0F, nullptr); g_object_set(G_OBJECT(first_band), "freq", 20.0, "bandwidth", 0, "gain", 0.0F, nullptr);
g_object_unref(G_OBJECT(first_band)); g_object_unref(G_OBJECT(first_band));
}
// Dummy last band (bandwidth 0, cutting over 20KHz): // Dummy last band (bandwidth 0, cutting over 20KHz):
GstObject *last_band = GST_OBJECT(gst_child_proxy_get_child_by_index(GST_CHILD_PROXY(equalizer_), kEqBandCount + 1)); GstObject *last_band = GST_OBJECT(gst_child_proxy_get_child_by_index(GST_CHILD_PROXY(equalizer_), kEqBandCount + 1));
if (last_band) {
g_object_set(G_OBJECT(last_band), "freq", 20000.0, "bandwidth", 0, "gain", 0.0F, nullptr); g_object_set(G_OBJECT(last_band), "freq", 20000.0, "bandwidth", 0, "gain", 0.0F, nullptr);
g_object_unref(G_OBJECT(last_band)); g_object_unref(G_OBJECT(last_band));
}
int last_band_frequency = 0; int last_band_frequency = 0;
for (int i = 0; i < kEqBandCount; ++i) { for (int i = 0; i < kEqBandCount; ++i) {
const int index_in_eq = i + 1; const int index_in_eq = i + 1;
GstObject *band = GST_OBJECT(gst_child_proxy_get_child_by_index(GST_CHILD_PROXY(equalizer_), index_in_eq)); GstObject *band = GST_OBJECT(gst_child_proxy_get_child_by_index(GST_CHILD_PROXY(equalizer_), index_in_eq));
if (band) {
const float frequency = static_cast<float>(kEqBandFrequencies[i]); const float frequency = static_cast<float>(kEqBandFrequencies[i]);
const float bandwidth = frequency - static_cast<float>(last_band_frequency); const float bandwidth = frequency - static_cast<float>(last_band_frequency);
last_band_frequency = static_cast<int>(frequency); last_band_frequency = static_cast<int>(frequency);
g_object_set(G_OBJECT(band), "freq", frequency, "bandwidth", bandwidth, "gain", 0.0F, nullptr); g_object_set(G_OBJECT(band), "freq", frequency, "bandwidth", bandwidth, "gain", 0.0F, nullptr);
g_object_unref(G_OBJECT(band)); g_object_unref(G_OBJECT(band));
} }
}
} // for
} }
// Create the replaygain elements if it's enabled. // Create the replaygain elements if it's enabled.
@@ -375,29 +420,49 @@ bool GstEnginePipeline::InitAudioBin() {
GstElement *rglimiter = nullptr; GstElement *rglimiter = nullptr;
GstElement *rgconverter = nullptr; GstElement *rgconverter = nullptr;
if (rg_enabled_) { if (rg_enabled_) {
rgvolume = engine_->CreateElement("rgvolume", audiobin_, false); rgvolume = CreateElement("rgvolume", "rgvolume", audiobin_, error);
rglimiter = engine_->CreateElement("rglimiter", audiobin_, false); if (!rgvolume) {
rgconverter = engine_->CreateElement("audioconvert", audiobin_, false); gst_object_unref(GST_OBJECT(audiobin_));
if (rgvolume && rglimiter && rgconverter) { audiobin_ = nullptr;
eventprobe = rgconverter; return false;
// Set replaygain settings }
g_object_set(G_OBJECT(rgvolume), "album-mode", rg_mode_, nullptr); rglimiter = CreateElement("rglimiter", "rglimiter", audiobin_, error);
g_object_set(G_OBJECT(rgvolume), "pre-amp", rg_preamp_, nullptr); if (!rglimiter) {
g_object_set(G_OBJECT(rgvolume), "fallback-gain", rg_fallbackgain_, nullptr); gst_object_unref(GST_OBJECT(audiobin_));
g_object_set(G_OBJECT(rglimiter), "enabled", static_cast<int>(rg_compression_), nullptr); audiobin_ = nullptr;
return false;
}
rgconverter = CreateElement("audioconvert", "rgconverter", audiobin_, error);
if (!rgconverter) {
gst_object_unref(GST_OBJECT(audiobin_));
audiobin_ = nullptr;
return false;
}
eventprobe = rgconverter;
// Set replaygain settings
g_object_set(G_OBJECT(rgvolume), "album-mode", rg_mode_, nullptr);
g_object_set(G_OBJECT(rgvolume), "pre-amp", rg_preamp_, nullptr);
g_object_set(G_OBJECT(rgvolume), "fallback-gain", rg_fallbackgain_, nullptr);
g_object_set(G_OBJECT(rglimiter), "enabled", static_cast<int>(rg_compression_), nullptr);
}
{ // Create a pad on the outside of the audiobin and connect it to the pad of the first element.
GstPad *pad = gst_element_get_static_pad(audioqueue_, "sink");
if (pad) {
gst_element_add_pad(audiobin_, gst_ghost_pad_new("sink", pad));
gst_object_unref(pad);
} }
} }
// Create a pad on the outside of the audiobin and connect it to the pad of the first element.
GstPad *pad = gst_element_get_static_pad(audioqueue_, "sink");
gst_element_add_pad(audiobin_, gst_ghost_pad_new("sink", pad));
gst_object_unref(pad);
// Add a data probe on the src pad of the audioconvert element for our scope. // Add a data probe on the src pad of the audioconvert element for our scope.
// We do it here because we want pre-equalized and pre-volume samples so that our visualization are not be affected by them. // We do it here because we want pre-equalized and pre-volume samples so that our visualization are not be affected by them.
pad = gst_element_get_static_pad(eventprobe, "src"); {
gst_pad_add_probe(pad, GST_PAD_PROBE_TYPE_EVENT_UPSTREAM, &EventHandoffCallback, this, nullptr); GstPad *pad = gst_element_get_static_pad(eventprobe, "src");
gst_object_unref(pad); if (pad) {
gst_pad_add_probe(pad, GST_PAD_PROBE_TYPE_EVENT_UPSTREAM, &EventHandoffCallback, this, nullptr);
gst_object_unref(pad);
}
}
// Set the buffer duration. // Set the buffer duration.
// We set this on this queue instead of the playbin because setting it on the playbin only affects network sources. // We set this on this queue instead of the playbin because setting it on the playbin only affects network sources.
@@ -419,47 +484,87 @@ bool GstEnginePipeline::InitAudioBin() {
// Link replaygain elements if enabled. // Link replaygain elements if enabled.
if (rg_enabled_ && rgvolume && rglimiter && rgconverter) { if (rg_enabled_ && rgvolume && rglimiter && rgconverter) {
gst_element_link_many(next, rgvolume, rglimiter, rgconverter, nullptr); if (!gst_element_link_many(next, rgvolume, rglimiter, rgconverter, nullptr)) {
gst_object_unref(GST_OBJECT(audiobin_));
audiobin_ = nullptr;
error = "gst_element_link_many() failed.";
return false;
}
next = rgconverter; next = rgconverter;
} }
// Link equalizer elements if enabled. // Link equalizer elements if enabled.
if (eq_enabled_ && equalizer_ && equalizer_preamp_) { if (eq_enabled_ && equalizer_ && equalizer_preamp_) {
gst_element_link_many(next, equalizer_preamp_, equalizer_, nullptr); if (!gst_element_link_many(next, equalizer_preamp_, equalizer_, nullptr)) {
gst_object_unref(GST_OBJECT(audiobin_));
audiobin_ = nullptr;
error = "gst_element_link_many() failed.";
return false;
}
next = equalizer_; next = equalizer_;
} }
// Link stereo balancer elements if enabled. // Link stereo balancer elements if enabled.
if (stereo_balancer_enabled_ && audiopanorama_) { if (stereo_balancer_enabled_ && audiopanorama_) {
gst_element_link(next, audiopanorama_); if (!gst_element_link(next, audiopanorama_)) {
gst_object_unref(GST_OBJECT(audiobin_));
audiobin_ = nullptr;
error = "gst_element_link() failed.";
return false;
}
next = audiopanorama_; next = audiopanorama_;
} }
// Link volume elements if enabled. // Link volume elements if enabled.
if (volume_enabled_ && volume_) { if (volume_enabled_ && volume_) {
gst_element_link(next, volume_); if (!gst_element_link(next, volume_)) {
gst_object_unref(GST_OBJECT(audiobin_));
audiobin_ = nullptr;
error = "gst_element_link() failed.";
return false;
}
next = volume_; next = volume_;
} }
gst_element_link(next, audioconverter); if (!gst_element_link(next, audioconverter)) {
gst_object_unref(GST_OBJECT(audiobin_));
GstCaps *caps = gst_caps_new_empty_simple("audio/x-raw"); audiobin_ = nullptr;
if (channels_enabled_ && channels_ > 0) { error = "gst_element_link() failed.";
qLog(Debug) << "Setting channels to" << channels_; return false;
gst_caps_set_simple(caps, "channels", G_TYPE_INT, channels_, nullptr);
} }
gst_element_link_filtered(audioconverter, audiosink, caps);
gst_caps_unref(caps);
// Add probes and handlers. {
pad = gst_element_get_static_pad(audioqueue_, "src"); GstCaps *caps = gst_caps_new_empty_simple("audio/x-raw");
gst_pad_add_probe(pad, GST_PAD_PROBE_TYPE_BUFFER, HandoffCallback, this, nullptr); if (!caps) {
gst_object_unref(pad); gst_object_unref(GST_OBJECT(audiobin_));
audiobin_ = nullptr;
error = "gst_caps_new_empty_simple() failed.";
return false;
}
if (channels_enabled_ && channels_ > 0) {
qLog(Debug) << "Setting channels to" << channels_;
gst_caps_set_simple(caps, "channels", G_TYPE_INT, channels_, nullptr);
}
gst_element_link_filtered(audioconverter, audiosink, caps);
gst_caps_unref(caps);
}
GstBus *bus = gst_pipeline_get_bus(GST_PIPELINE(pipeline_)); { // Add probes and handlers.
gst_bus_set_sync_handler(bus, BusCallbackSync, this, nullptr); GstPad *pad = gst_element_get_static_pad(audioqueue_, "src");
gst_bus_add_watch(bus, BusCallback, this); if (pad) {
gst_object_unref(bus); gst_pad_add_probe(pad, GST_PAD_PROBE_TYPE_BUFFER, HandoffCallback, this, nullptr);
gst_object_unref(pad);
}
}
{
GstBus *bus = gst_pipeline_get_bus(GST_PIPELINE(pipeline_));
if (bus) {
gst_bus_set_sync_handler(bus, BusCallbackSync, this, nullptr);
gst_bus_add_watch(bus, BusCallback, this);
gst_object_unref(bus);
}
}
logged_unsupported_analyzer_format_ = false; logged_unsupported_analyzer_format_ = false;

View File

@@ -45,7 +45,6 @@
#include <QUrl> #include <QUrl>
class QTimerEvent; class QTimerEvent;
class GstEngine;
class GstBufferConsumer; class GstBufferConsumer;
namespace Engine { namespace Engine {
@@ -57,7 +56,7 @@ class GstEnginePipeline : public QObject {
Q_OBJECT Q_OBJECT
public: public:
explicit GstEnginePipeline(GstEngine *engine, QObject *parent = nullptr); explicit GstEnginePipeline(QObject *parent = nullptr);
~GstEnginePipeline() override; ~GstEnginePipeline() override;
// Globally unique across all pipelines. // Globally unique across all pipelines.
@@ -76,7 +75,7 @@ class GstEnginePipeline : public QObject {
void set_channels(const bool enabled, const int channels); void set_channels(const bool enabled, const int channels);
// Creates the pipeline, returns false on error // Creates the pipeline, returns false on error
bool InitFromUrl(const QByteArray &stream_url, const QUrl &original_url, const qint64 end_nanosec); bool InitFromUrl(const QByteArray &stream_url, const QUrl &original_url, const qint64 end_nanosec, QString &error);
// GstBufferConsumers get fed audio data. Thread-safe. // GstBufferConsumers get fed audio data. Thread-safe.
void AddBufferConsumer(GstBufferConsumer *consumer); void AddBufferConsumer(GstBufferConsumer *consumer);
@@ -124,11 +123,11 @@ class GstEnginePipeline : public QObject {
void SetVolumeModifier(qreal mod); void SetVolumeModifier(qreal mod);
signals: signals:
void Error(int pipeline_id, QString message, const int domain, const int error_code);
void EndOfStreamReached(int pipeline_id, bool has_next_track); void EndOfStreamReached(int pipeline_id, bool has_next_track);
void MetadataFound(int pipeline_id, const Engine::SimpleMetaBundle &bundle); void MetadataFound(int pipeline_id, const Engine::SimpleMetaBundle &bundle);
// This indicates an error, delegated from GStreamer, in the pipeline.
// The message, domain and error_code are related to GStreamer's GError.
void Error(int pipeline_id, QString message, int domain, int error_code);
void FaderFinished(); void FaderFinished();
void BufferingStarted(); void BufferingStarted();
@@ -139,7 +138,8 @@ class GstEnginePipeline : public QObject {
void timerEvent(QTimerEvent*) override; void timerEvent(QTimerEvent*) override;
private: private:
bool InitAudioBin(); GstElement *CreateElement(const QString &factory_name, const QString &name, GstElement *bin, QString &error);
bool InitAudioBin(QString &error);
// Static callbacks. The GstEnginePipeline instance is passed in the last argument. // Static callbacks. The GstEnginePipeline instance is passed in the last argument.
static GstPadProbeReturn EventHandoffCallback(GstPad*, GstPadProbeInfo*, gpointer); static GstPadProbeReturn EventHandoffCallback(GstPad*, GstPadProbeInfo*, gpointer);
@@ -176,8 +176,6 @@ class GstEnginePipeline : public QObject {
static const int kEqBandCount; static const int kEqBandCount;
static const int kEqBandFrequencies[]; static const int kEqBandFrequencies[];
GstEngine *engine_;
// Using == to compare two pipelines is a bad idea, because new ones often get created in the same address as old ones. This ID will be unique for each pipeline. // Using == to compare two pipelines is a bad idea, because new ones often get created in the same address as old ones. This ID will be unique for each pipeline.
// Threading warning: access to the static ID field isn't protected by a mutex because all pipeline creation is currently done in the main thread. // Threading warning: access to the static ID field isn't protected by a mutex because all pipeline creation is currently done in the main thread.
static int sId; static int sId;