2 This file is part of PulseAudio.
4 Copyright 2004-2006 Lennart Poettering
5 Copyright 2006 Pierre Ossman <ossman@cendio.se> for Cendio AB
7 PulseAudio is free software; you can redistribute it and/or modify
8 it under the terms of the GNU Lesser General Public License as published
9 by the Free Software Foundation; either version 2.1 of the License,
10 or (at your option) any later version.
12 PulseAudio is distributed in the hope that it will be useful, but
13 WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15 General Public License for more details.
17 You should have received a copy of the GNU Lesser General Public License
18 along with PulseAudio; if not, see <http://www.gnu.org/licenses/>.
28 #include <pulse/format.h>
29 #include <pulse/utf8.h>
30 #include <pulse/xmalloc.h>
31 #include <pulse/timeval.h>
32 #include <pulse/util.h>
33 #include <pulse/rtclock.h>
34 #include <pulse/internal.h>
36 #include <pulsecore/core-util.h>
37 #include <pulsecore/source-output.h>
38 #include <pulsecore/namereg.h>
39 #include <pulsecore/core-subscribe.h>
40 #include <pulsecore/log.h>
41 #include <pulsecore/mix.h>
42 #include <pulsecore/flist.h>
46 #define ABSOLUTE_MIN_LATENCY (500)
47 #define ABSOLUTE_MAX_LATENCY (10*PA_USEC_PER_SEC)
48 #define DEFAULT_FIXED_LATENCY (250*PA_USEC_PER_MSEC)
50 PA_DEFINE_PUBLIC_CLASS(pa_source, pa_msgobject);
52 struct pa_source_volume_change {
56 PA_LLIST_FIELDS(pa_source_volume_change);
59 struct source_message_set_port {
64 static void source_free(pa_object *o);
66 static void pa_source_volume_change_push(pa_source *s);
67 static void pa_source_volume_change_flush(pa_source *s);
69 pa_source_new_data* pa_source_new_data_init(pa_source_new_data *data) {
73 data->proplist = pa_proplist_new();
74 data->ports = pa_hashmap_new_full(pa_idxset_string_hash_func, pa_idxset_string_compare_func, NULL, (pa_free_cb_t) pa_device_port_unref);
79 void pa_source_new_data_set_name(pa_source_new_data *data, const char *name) {
83 data->name = pa_xstrdup(name);
86 void pa_source_new_data_set_sample_spec(pa_source_new_data *data, const pa_sample_spec *spec) {
89 if ((data->sample_spec_is_set = !!spec))
90 data->sample_spec = *spec;
93 void pa_source_new_data_set_channel_map(pa_source_new_data *data, const pa_channel_map *map) {
96 if ((data->channel_map_is_set = !!map))
97 data->channel_map = *map;
100 void pa_source_new_data_set_alternate_sample_rate(pa_source_new_data *data, const uint32_t alternate_sample_rate) {
103 data->alternate_sample_rate_is_set = true;
104 data->alternate_sample_rate = alternate_sample_rate;
107 void pa_source_new_data_set_volume(pa_source_new_data *data, const pa_cvolume *volume) {
110 if ((data->volume_is_set = !!volume))
111 data->volume = *volume;
114 void pa_source_new_data_set_muted(pa_source_new_data *data, bool mute) {
117 data->muted_is_set = true;
121 void pa_source_new_data_set_port(pa_source_new_data *data, const char *port) {
124 pa_xfree(data->active_port);
125 data->active_port = pa_xstrdup(port);
128 void pa_source_new_data_done(pa_source_new_data *data) {
131 pa_proplist_free(data->proplist);
134 pa_hashmap_free(data->ports);
136 pa_xfree(data->name);
137 pa_xfree(data->active_port);
140 /* Called from main context */
141 static void reset_callbacks(pa_source *s) {
145 s->get_volume = NULL;
146 s->set_volume = NULL;
147 s->write_volume = NULL;
150 s->update_requested_latency = NULL;
152 s->get_formats = NULL;
153 s->reconfigure = NULL;
156 /* Called from main context */
157 pa_source* pa_source_new(
159 pa_source_new_data *data,
160 pa_source_flags_t flags) {
164 char st[PA_SAMPLE_SPEC_SNPRINT_MAX], cm[PA_CHANNEL_MAP_SNPRINT_MAX];
169 pa_assert(data->name);
170 pa_assert_ctl_context();
172 s = pa_msgobject_new(pa_source);
174 if (!(name = pa_namereg_register(core, data->name, PA_NAMEREG_SOURCE, s, data->namereg_fail))) {
175 pa_log_debug("Failed to register name %s.", data->name);
180 pa_source_new_data_set_name(data, name);
182 if (pa_hook_fire(&core->hooks[PA_CORE_HOOK_SOURCE_NEW], data) < 0) {
184 pa_namereg_unregister(core, name);
188 /* FIXME, need to free s here on failure */
190 pa_return_null_if_fail(!data->driver || pa_utf8_valid(data->driver));
191 pa_return_null_if_fail(data->name && pa_utf8_valid(data->name) && data->name[0]);
193 pa_return_null_if_fail(data->sample_spec_is_set && pa_sample_spec_valid(&data->sample_spec));
195 if (!data->channel_map_is_set)
196 pa_return_null_if_fail(pa_channel_map_init_auto(&data->channel_map, data->sample_spec.channels, PA_CHANNEL_MAP_DEFAULT));
198 pa_return_null_if_fail(pa_channel_map_valid(&data->channel_map));
199 pa_return_null_if_fail(data->channel_map.channels == data->sample_spec.channels);
201 /* FIXME: There should probably be a general function for checking whether
202 * the source volume is allowed to be set, like there is for source outputs. */
203 pa_assert(!data->volume_is_set || !(flags & PA_SOURCE_SHARE_VOLUME_WITH_MASTER));
205 if (!data->volume_is_set) {
206 pa_cvolume_reset(&data->volume, data->sample_spec.channels);
207 data->save_volume = false;
210 pa_return_null_if_fail(pa_cvolume_valid(&data->volume));
211 pa_return_null_if_fail(pa_cvolume_compatible(&data->volume, &data->sample_spec));
213 if (!data->muted_is_set)
217 pa_proplist_update(data->proplist, PA_UPDATE_MERGE, data->card->proplist);
219 pa_device_init_description(data->proplist, data->card);
220 pa_device_init_icon(data->proplist, false);
221 pa_device_init_intended_roles(data->proplist);
223 if (!data->active_port) {
224 pa_device_port *p = pa_device_port_find_best(data->ports);
226 pa_source_new_data_set_port(data, p->name);
229 if (pa_hook_fire(&core->hooks[PA_CORE_HOOK_SOURCE_FIXATE], data) < 0) {
231 pa_namereg_unregister(core, name);
235 s->parent.parent.free = source_free;
236 s->parent.process_msg = pa_source_process_msg;
239 s->state = PA_SOURCE_INIT;
242 s->suspend_cause = data->suspend_cause;
243 pa_source_set_mixer_dirty(s, false);
244 s->name = pa_xstrdup(name);
245 s->proplist = pa_proplist_copy(data->proplist);
246 s->driver = pa_xstrdup(pa_path_get_filename(data->driver));
247 s->module = data->module;
248 s->card = data->card;
250 s->priority = pa_device_init_priority(s->proplist);
252 s->sample_spec = data->sample_spec;
253 s->channel_map = data->channel_map;
254 s->default_sample_rate = s->sample_spec.rate;
256 if (data->alternate_sample_rate_is_set)
257 s->alternate_sample_rate = data->alternate_sample_rate;
259 s->alternate_sample_rate = s->core->alternate_sample_rate;
261 if (s->sample_spec.rate == s->alternate_sample_rate) {
262 pa_log_warn("Default and alternate sample rates are the same.");
263 s->alternate_sample_rate = 0;
266 s->outputs = pa_idxset_new(NULL, NULL);
268 s->monitor_of = NULL;
269 s->output_from_master = NULL;
271 s->reference_volume = s->real_volume = data->volume;
272 pa_cvolume_reset(&s->soft_volume, s->sample_spec.channels);
273 s->base_volume = PA_VOLUME_NORM;
274 s->n_volume_steps = PA_VOLUME_NORM+1;
275 s->muted = data->muted;
276 s->refresh_volume = s->refresh_muted = false;
283 /* As a minor optimization we just steal the list instead of
285 s->ports = data->ports;
288 s->active_port = NULL;
289 s->save_port = false;
291 if (data->active_port)
292 if ((s->active_port = pa_hashmap_get(s->ports, data->active_port)))
293 s->save_port = data->save_port;
295 /* Hopefully the active port has already been assigned in the previous call
296 to pa_device_port_find_best, but better safe than sorry */
298 s->active_port = pa_device_port_find_best(s->ports);
301 s->port_latency_offset = s->active_port->latency_offset;
303 s->port_latency_offset = 0;
305 s->save_volume = data->save_volume;
306 s->save_muted = data->save_muted;
308 pa_silence_memchunk_get(
309 &core->silence_cache,
315 s->thread_info.rtpoll = NULL;
316 s->thread_info.outputs = pa_hashmap_new_full(pa_idxset_trivial_hash_func, pa_idxset_trivial_compare_func, NULL,
317 (pa_free_cb_t) pa_source_output_unref);
318 s->thread_info.soft_volume = s->soft_volume;
319 s->thread_info.soft_muted = s->muted;
320 s->thread_info.state = s->state;
321 s->thread_info.max_rewind = 0;
322 s->thread_info.requested_latency_valid = false;
323 s->thread_info.requested_latency = 0;
324 s->thread_info.min_latency = ABSOLUTE_MIN_LATENCY;
325 s->thread_info.max_latency = ABSOLUTE_MAX_LATENCY;
326 s->thread_info.fixed_latency = flags & PA_SOURCE_DYNAMIC_LATENCY ? 0 : DEFAULT_FIXED_LATENCY;
328 PA_LLIST_HEAD_INIT(pa_source_volume_change, s->thread_info.volume_changes);
329 s->thread_info.volume_changes_tail = NULL;
330 pa_sw_cvolume_divide(&s->thread_info.current_hw_volume, &s->real_volume, &s->soft_volume);
331 s->thread_info.volume_change_safety_margin = core->deferred_volume_safety_margin_usec;
332 s->thread_info.volume_change_extra_delay = core->deferred_volume_extra_delay_usec;
333 s->thread_info.port_latency_offset = s->port_latency_offset;
335 /* FIXME: This should probably be moved to pa_source_put() */
336 pa_assert_se(pa_idxset_put(core->sources, s, &s->index) >= 0);
339 pa_assert_se(pa_idxset_put(s->card->sources, s, NULL) >= 0);
341 pt = pa_proplist_to_string_sep(s->proplist, "\n ");
342 pa_log_info("Created source %u \"%s\" with sample spec %s and channel map %s\n %s",
345 pa_sample_spec_snprint(st, sizeof(st), &s->sample_spec),
346 pa_channel_map_snprint(cm, sizeof(cm), &s->channel_map),
353 /* Called from main context */
354 static int source_set_state(pa_source *s, pa_source_state_t state) {
357 pa_source_state_t original_state;
360 pa_assert_ctl_context();
362 if (s->state == state)
365 original_state = s->state;
368 (original_state == PA_SOURCE_SUSPENDED && PA_SOURCE_IS_OPENED(state)) ||
369 (PA_SOURCE_IS_OPENED(original_state) && state == PA_SOURCE_SUSPENDED);
372 if ((ret = s->set_state(s, state)) < 0)
376 if ((ret = pa_asyncmsgq_send(s->asyncmsgq, PA_MSGOBJECT(s), PA_SOURCE_MESSAGE_SET_STATE, PA_UINT_TO_PTR(state), 0, NULL)) < 0) {
379 s->set_state(s, original_state);
384 pa_log_debug("%s: state: %s -> %s", s->name, pa_source_state_to_string(s->state), pa_source_state_to_string(state));
387 if (state != PA_SOURCE_UNLINKED) { /* if we enter UNLINKED state pa_source_unlink() will fire the appropriate events */
388 pa_hook_fire(&s->core->hooks[PA_CORE_HOOK_SOURCE_STATE_CHANGED], s);
389 pa_subscription_post(s->core, PA_SUBSCRIPTION_EVENT_SOURCE | PA_SUBSCRIPTION_EVENT_CHANGE, s->index);
392 if (suspend_change) {
396 /* We're suspending or resuming, tell everyone about it */
398 PA_IDXSET_FOREACH(o, s->outputs, idx)
399 if (s->state == PA_SOURCE_SUSPENDED &&
400 (o->flags & PA_SOURCE_OUTPUT_KILL_ON_SUSPEND))
401 pa_source_output_kill(o);
403 o->suspend(o, state == PA_SOURCE_SUSPENDED);
409 void pa_source_set_get_volume_callback(pa_source *s, pa_source_cb_t cb) {
415 void pa_source_set_set_volume_callback(pa_source *s, pa_source_cb_t cb) {
416 pa_source_flags_t flags;
419 pa_assert(!s->write_volume || cb);
423 /* Save the current flags so we can tell if they've changed */
427 /* The source implementor is responsible for setting decibel volume support */
428 s->flags |= PA_SOURCE_HW_VOLUME_CTRL;
430 s->flags &= ~PA_SOURCE_HW_VOLUME_CTRL;
431 /* See note below in pa_source_put() about volume sharing and decibel volumes */
432 pa_source_enable_decibel_volume(s, !(s->flags & PA_SOURCE_SHARE_VOLUME_WITH_MASTER));
435 /* If the flags have changed after init, let any clients know via a change event */
436 if (s->state != PA_SOURCE_INIT && flags != s->flags)
437 pa_subscription_post(s->core, PA_SUBSCRIPTION_EVENT_SOURCE|PA_SUBSCRIPTION_EVENT_CHANGE, s->index);
440 void pa_source_set_write_volume_callback(pa_source *s, pa_source_cb_t cb) {
441 pa_source_flags_t flags;
444 pa_assert(!cb || s->set_volume);
446 s->write_volume = cb;
448 /* Save the current flags so we can tell if they've changed */
452 s->flags |= PA_SOURCE_DEFERRED_VOLUME;
454 s->flags &= ~PA_SOURCE_DEFERRED_VOLUME;
456 /* If the flags have changed after init, let any clients know via a change event */
457 if (s->state != PA_SOURCE_INIT && flags != s->flags)
458 pa_subscription_post(s->core, PA_SUBSCRIPTION_EVENT_SOURCE|PA_SUBSCRIPTION_EVENT_CHANGE, s->index);
461 void pa_source_set_get_mute_callback(pa_source *s, pa_source_get_mute_cb_t cb) {
467 void pa_source_set_set_mute_callback(pa_source *s, pa_source_cb_t cb) {
468 pa_source_flags_t flags;
474 /* Save the current flags so we can tell if they've changed */
478 s->flags |= PA_SOURCE_HW_MUTE_CTRL;
480 s->flags &= ~PA_SOURCE_HW_MUTE_CTRL;
482 /* If the flags have changed after init, let any clients know via a change event */
483 if (s->state != PA_SOURCE_INIT && flags != s->flags)
484 pa_subscription_post(s->core, PA_SUBSCRIPTION_EVENT_SOURCE|PA_SUBSCRIPTION_EVENT_CHANGE, s->index);
487 static void enable_flat_volume(pa_source *s, bool enable) {
488 pa_source_flags_t flags;
492 /* Always follow the overall user preference here */
493 enable = enable && s->core->flat_volumes;
495 /* Save the current flags so we can tell if they've changed */
499 s->flags |= PA_SOURCE_FLAT_VOLUME;
501 s->flags &= ~PA_SOURCE_FLAT_VOLUME;
503 /* If the flags have changed after init, let any clients know via a change event */
504 if (s->state != PA_SOURCE_INIT && flags != s->flags)
505 pa_subscription_post(s->core, PA_SUBSCRIPTION_EVENT_SOURCE|PA_SUBSCRIPTION_EVENT_CHANGE, s->index);
508 void pa_source_enable_decibel_volume(pa_source *s, bool enable) {
509 pa_source_flags_t flags;
513 /* Save the current flags so we can tell if they've changed */
517 s->flags |= PA_SOURCE_DECIBEL_VOLUME;
518 enable_flat_volume(s, true);
520 s->flags &= ~PA_SOURCE_DECIBEL_VOLUME;
521 enable_flat_volume(s, false);
524 /* If the flags have changed after init, let any clients know via a change event */
525 if (s->state != PA_SOURCE_INIT && flags != s->flags)
526 pa_subscription_post(s->core, PA_SUBSCRIPTION_EVENT_SOURCE|PA_SUBSCRIPTION_EVENT_CHANGE, s->index);
529 /* Called from main context */
530 void pa_source_put(pa_source *s) {
531 pa_source_assert_ref(s);
532 pa_assert_ctl_context();
534 pa_assert(s->state == PA_SOURCE_INIT);
535 pa_assert(!(s->flags & PA_SOURCE_SHARE_VOLUME_WITH_MASTER) || pa_source_is_filter(s));
537 /* The following fields must be initialized properly when calling _put() */
538 pa_assert(s->asyncmsgq);
539 pa_assert(s->thread_info.min_latency <= s->thread_info.max_latency);
541 /* Generally, flags should be initialized via pa_source_new(). As a
542 * special exception we allow some volume related flags to be set
543 * between _new() and _put() by the callback setter functions above.
545 * Thus we implement a couple safeguards here which ensure the above
546 * setters were used (or at least the implementor made manual changes
547 * in a compatible way).
549 * Note: All of these flags set here can change over the life time
551 pa_assert(!(s->flags & PA_SOURCE_HW_VOLUME_CTRL) || s->set_volume);
552 pa_assert(!(s->flags & PA_SOURCE_DEFERRED_VOLUME) || s->write_volume);
553 pa_assert(!(s->flags & PA_SOURCE_HW_MUTE_CTRL) || s->set_mute);
555 /* XXX: Currently decibel volume is disabled for all sources that use volume
556 * sharing. When the master source supports decibel volume, it would be good
557 * to have the flag also in the filter source, but currently we don't do that
558 * so that the flags of the filter source never change when it's moved from
559 * a master source to another. One solution for this problem would be to
560 * remove user-visible volume altogether from filter sources when volume
561 * sharing is used, but the current approach was easier to implement... */
562 /* We always support decibel volumes in software, otherwise we leave it to
563 * the source implementor to set this flag as needed.
565 * Note: This flag can also change over the life time of the source. */
566 if (!(s->flags & PA_SOURCE_HW_VOLUME_CTRL) && !(s->flags & PA_SOURCE_SHARE_VOLUME_WITH_MASTER)) {
567 pa_source_enable_decibel_volume(s, true);
568 s->soft_volume = s->reference_volume;
571 /* If the source implementor support DB volumes by itself, we should always
572 * try and enable flat volumes too */
573 if ((s->flags & PA_SOURCE_DECIBEL_VOLUME))
574 enable_flat_volume(s, true);
576 if (s->flags & PA_SOURCE_SHARE_VOLUME_WITH_MASTER) {
577 pa_source *root_source = pa_source_get_master(s);
579 pa_assert(PA_LIKELY(root_source));
581 s->reference_volume = root_source->reference_volume;
582 pa_cvolume_remap(&s->reference_volume, &root_source->channel_map, &s->channel_map);
584 s->real_volume = root_source->real_volume;
585 pa_cvolume_remap(&s->real_volume, &root_source->channel_map, &s->channel_map);
587 /* We assume that if the sink implementor changed the default
588 * volume he did so in real_volume, because that is the usual
589 * place where he is supposed to place his changes. */
590 s->reference_volume = s->real_volume;
592 s->thread_info.soft_volume = s->soft_volume;
593 s->thread_info.soft_muted = s->muted;
594 pa_sw_cvolume_divide(&s->thread_info.current_hw_volume, &s->real_volume, &s->soft_volume);
596 pa_assert((s->flags & PA_SOURCE_HW_VOLUME_CTRL)
597 || (s->base_volume == PA_VOLUME_NORM
598 && ((s->flags & PA_SOURCE_DECIBEL_VOLUME || (s->flags & PA_SOURCE_SHARE_VOLUME_WITH_MASTER)))));
599 pa_assert(!(s->flags & PA_SOURCE_DECIBEL_VOLUME) || s->n_volume_steps == PA_VOLUME_NORM+1);
600 pa_assert(!(s->flags & PA_SOURCE_DYNAMIC_LATENCY) == !(s->thread_info.fixed_latency == 0));
602 if (s->suspend_cause)
603 pa_assert_se(source_set_state(s, PA_SOURCE_SUSPENDED) == 0);
605 pa_assert_se(source_set_state(s, PA_SOURCE_IDLE) == 0);
607 pa_subscription_post(s->core, PA_SUBSCRIPTION_EVENT_SOURCE | PA_SUBSCRIPTION_EVENT_NEW, s->index);
608 pa_hook_fire(&s->core->hooks[PA_CORE_HOOK_SOURCE_PUT], s);
610 /* This function must be called after the PA_CORE_HOOK_SOURCE_PUT hook,
611 * because module-switch-on-connect needs to know the old default source */
612 pa_core_update_default_source(s->core);
615 /* Called from main context */
616 void pa_source_unlink(pa_source *s) {
618 pa_source_output *o, PA_UNUSED *j = NULL;
620 pa_source_assert_ref(s);
621 pa_assert_ctl_context();
623 /* See pa_sink_unlink() for a couple of comments how this function
626 if (s->unlink_requested)
629 s->unlink_requested = true;
631 linked = PA_SOURCE_IS_LINKED(s->state);
634 pa_hook_fire(&s->core->hooks[PA_CORE_HOOK_SOURCE_UNLINK], s);
636 if (s->state != PA_SOURCE_UNLINKED)
637 pa_namereg_unregister(s->core, s->name);
638 pa_idxset_remove_by_data(s->core->sources, s, NULL);
640 pa_core_update_default_source(s->core);
643 pa_idxset_remove_by_data(s->card->sources, s, NULL);
645 while ((o = pa_idxset_first(s->outputs, NULL))) {
647 pa_source_output_kill(o);
652 source_set_state(s, PA_SOURCE_UNLINKED);
654 s->state = PA_SOURCE_UNLINKED;
659 pa_subscription_post(s->core, PA_SUBSCRIPTION_EVENT_SOURCE | PA_SUBSCRIPTION_EVENT_REMOVE, s->index);
660 pa_hook_fire(&s->core->hooks[PA_CORE_HOOK_SOURCE_UNLINK_POST], s);
664 /* Called from main context */
665 static void source_free(pa_object *o) {
666 pa_source *s = PA_SOURCE(o);
669 pa_assert_ctl_context();
670 pa_assert(pa_source_refcnt(s) == 0);
671 pa_assert(!PA_SOURCE_IS_LINKED(s->state));
673 pa_log_info("Freeing source %u \"%s\"", s->index, s->name);
675 pa_source_volume_change_flush(s);
677 pa_idxset_free(s->outputs, NULL);
678 pa_hashmap_free(s->thread_info.outputs);
680 if (s->silence.memblock)
681 pa_memblock_unref(s->silence.memblock);
687 pa_proplist_free(s->proplist);
690 pa_hashmap_free(s->ports);
695 /* Called from main context, and not while the IO thread is active, please */
696 void pa_source_set_asyncmsgq(pa_source *s, pa_asyncmsgq *q) {
697 pa_source_assert_ref(s);
698 pa_assert_ctl_context();
703 /* Called from main context, and not while the IO thread is active, please */
704 void pa_source_update_flags(pa_source *s, pa_source_flags_t mask, pa_source_flags_t value) {
705 pa_source_flags_t old_flags;
706 pa_source_output *output;
709 pa_source_assert_ref(s);
710 pa_assert_ctl_context();
712 /* For now, allow only a minimal set of flags to be changed. */
713 pa_assert((mask & ~(PA_SOURCE_DYNAMIC_LATENCY|PA_SOURCE_LATENCY)) == 0);
715 old_flags = s->flags;
716 s->flags = (s->flags & ~mask) | (value & mask);
718 if (s->flags == old_flags)
721 if ((s->flags & PA_SOURCE_LATENCY) != (old_flags & PA_SOURCE_LATENCY))
722 pa_log_debug("Source %s: LATENCY flag %s.", s->name, (s->flags & PA_SOURCE_LATENCY) ? "enabled" : "disabled");
724 if ((s->flags & PA_SOURCE_DYNAMIC_LATENCY) != (old_flags & PA_SOURCE_DYNAMIC_LATENCY))
725 pa_log_debug("Source %s: DYNAMIC_LATENCY flag %s.",
726 s->name, (s->flags & PA_SOURCE_DYNAMIC_LATENCY) ? "enabled" : "disabled");
728 pa_subscription_post(s->core, PA_SUBSCRIPTION_EVENT_SOURCE | PA_SUBSCRIPTION_EVENT_CHANGE, s->index);
729 pa_hook_fire(&s->core->hooks[PA_CORE_HOOK_SOURCE_FLAGS_CHANGED], s);
731 PA_IDXSET_FOREACH(output, s->outputs, idx) {
732 if (output->destination_source)
733 pa_source_update_flags(output->destination_source, mask, value);
737 /* Called from IO context, or before _put() from main context */
738 void pa_source_set_rtpoll(pa_source *s, pa_rtpoll *p) {
739 pa_source_assert_ref(s);
740 pa_source_assert_io_context(s);
742 s->thread_info.rtpoll = p;
745 /* Called from main context */
746 int pa_source_update_status(pa_source*s) {
747 pa_source_assert_ref(s);
748 pa_assert_ctl_context();
749 pa_assert(PA_SOURCE_IS_LINKED(s->state));
751 if (s->state == PA_SOURCE_SUSPENDED)
754 return source_set_state(s, pa_source_used_by(s) ? PA_SOURCE_RUNNING : PA_SOURCE_IDLE);
757 /* Called from any context - must be threadsafe */
758 void pa_source_set_mixer_dirty(pa_source *s, bool is_dirty) {
759 pa_atomic_store(&s->mixer_dirty, is_dirty ? 1 : 0);
762 /* Called from main context */
763 int pa_source_suspend(pa_source *s, bool suspend, pa_suspend_cause_t cause) {
764 pa_source_assert_ref(s);
765 pa_assert_ctl_context();
766 pa_assert(PA_SOURCE_IS_LINKED(s->state));
767 pa_assert(cause != 0);
769 if (s->monitor_of && cause != PA_SUSPEND_PASSTHROUGH)
770 return -PA_ERR_NOTSUPPORTED;
773 s->suspend_cause |= cause;
775 s->suspend_cause &= ~cause;
777 if (!(s->suspend_cause & PA_SUSPEND_SESSION) && (pa_atomic_load(&s->mixer_dirty) != 0)) {
778 /* This might look racy but isn't: If somebody sets mixer_dirty exactly here,
779 it'll be handled just fine. */
780 pa_source_set_mixer_dirty(s, false);
781 pa_log_debug("Mixer is now accessible. Updating alsa mixer settings.");
782 if (s->active_port && s->set_port) {
783 if (s->flags & PA_SOURCE_DEFERRED_VOLUME) {
784 struct source_message_set_port msg = { .port = s->active_port, .ret = 0 };
785 pa_assert_se(pa_asyncmsgq_send(s->asyncmsgq, PA_MSGOBJECT(s), PA_SOURCE_MESSAGE_SET_PORT, &msg, 0, NULL) == 0);
788 s->set_port(s, s->active_port);
798 if ((pa_source_get_state(s) == PA_SOURCE_SUSPENDED) == !!s->suspend_cause)
801 pa_log_debug("Suspend cause of source %s is 0x%04x, %s", s->name, s->suspend_cause, s->suspend_cause ? "suspending" : "resuming");
803 if (s->suspend_cause)
804 return source_set_state(s, PA_SOURCE_SUSPENDED);
806 return source_set_state(s, pa_source_used_by(s) ? PA_SOURCE_RUNNING : PA_SOURCE_IDLE);
809 /* Called from main context */
810 int pa_source_sync_suspend(pa_source *s) {
811 pa_sink_state_t state;
813 pa_source_assert_ref(s);
814 pa_assert_ctl_context();
815 pa_assert(PA_SOURCE_IS_LINKED(s->state));
816 pa_assert(s->monitor_of);
818 state = pa_sink_get_state(s->monitor_of);
820 if (state == PA_SINK_SUSPENDED)
821 return source_set_state(s, PA_SOURCE_SUSPENDED);
823 pa_assert(PA_SINK_IS_OPENED(state));
825 return source_set_state(s, pa_source_used_by(s) ? PA_SOURCE_RUNNING : PA_SOURCE_IDLE);
828 /* Called from main context */
829 pa_queue *pa_source_move_all_start(pa_source *s, pa_queue *q) {
830 pa_source_output *o, *n;
833 pa_source_assert_ref(s);
834 pa_assert_ctl_context();
835 pa_assert(PA_SOURCE_IS_LINKED(s->state));
840 for (o = PA_SOURCE_OUTPUT(pa_idxset_first(s->outputs, &idx)); o; o = n) {
841 n = PA_SOURCE_OUTPUT(pa_idxset_next(s->outputs, &idx));
843 pa_source_output_ref(o);
845 if (pa_source_output_start_move(o) >= 0)
848 pa_source_output_unref(o);
854 /* Called from main context */
855 void pa_source_move_all_finish(pa_source *s, pa_queue *q, bool save) {
858 pa_source_assert_ref(s);
859 pa_assert_ctl_context();
860 pa_assert(PA_SOURCE_IS_LINKED(s->state));
863 while ((o = PA_SOURCE_OUTPUT(pa_queue_pop(q)))) {
864 if (PA_SOURCE_OUTPUT_IS_LINKED(o->state)) {
865 if (pa_source_output_finish_move(o, s, save) < 0)
866 pa_source_output_fail_move(o);
869 pa_source_output_unref(o);
872 pa_queue_free(q, NULL);
875 /* Called from main context */
876 void pa_source_move_all_fail(pa_queue *q) {
879 pa_assert_ctl_context();
882 while ((o = PA_SOURCE_OUTPUT(pa_queue_pop(q)))) {
883 pa_source_output_fail_move(o);
884 pa_source_output_unref(o);
887 pa_queue_free(q, NULL);
890 /* Called from IO thread context */
891 void pa_source_process_rewind(pa_source *s, size_t nbytes) {
895 pa_source_assert_ref(s);
896 pa_source_assert_io_context(s);
897 pa_assert(PA_SOURCE_IS_LINKED(s->thread_info.state));
902 if (s->thread_info.state == PA_SOURCE_SUSPENDED)
905 pa_log_debug("Processing rewind...");
907 PA_HASHMAP_FOREACH(o, s->thread_info.outputs, state) {
908 pa_source_output_assert_ref(o);
909 pa_source_output_process_rewind(o, nbytes);
913 /* Called from IO thread context */
914 void pa_source_post(pa_source*s, const pa_memchunk *chunk) {
918 pa_source_assert_ref(s);
919 pa_source_assert_io_context(s);
920 pa_assert(PA_SOURCE_IS_LINKED(s->thread_info.state));
923 if (s->thread_info.state == PA_SOURCE_SUSPENDED)
926 if (s->thread_info.soft_muted || !pa_cvolume_is_norm(&s->thread_info.soft_volume)) {
927 pa_memchunk vchunk = *chunk;
929 pa_memblock_ref(vchunk.memblock);
930 pa_memchunk_make_writable(&vchunk, 0);
932 if (s->thread_info.soft_muted || pa_cvolume_is_muted(&s->thread_info.soft_volume))
933 pa_silence_memchunk(&vchunk, &s->sample_spec);
935 pa_volume_memchunk(&vchunk, &s->sample_spec, &s->thread_info.soft_volume);
937 while ((o = pa_hashmap_iterate(s->thread_info.outputs, &state, NULL))) {
938 pa_source_output_assert_ref(o);
940 if (!o->thread_info.direct_on_input)
941 pa_source_output_push(o, &vchunk);
944 pa_memblock_unref(vchunk.memblock);
947 while ((o = pa_hashmap_iterate(s->thread_info.outputs, &state, NULL))) {
948 pa_source_output_assert_ref(o);
950 if (!o->thread_info.direct_on_input)
951 pa_source_output_push(o, chunk);
956 /* Called from IO thread context */
957 void pa_source_post_direct(pa_source*s, pa_source_output *o, const pa_memchunk *chunk) {
958 pa_source_assert_ref(s);
959 pa_source_assert_io_context(s);
960 pa_assert(PA_SOURCE_IS_LINKED(s->thread_info.state));
961 pa_source_output_assert_ref(o);
962 pa_assert(o->thread_info.direct_on_input);
965 if (s->thread_info.state == PA_SOURCE_SUSPENDED)
968 if (s->thread_info.soft_muted || !pa_cvolume_is_norm(&s->thread_info.soft_volume)) {
969 pa_memchunk vchunk = *chunk;
971 pa_memblock_ref(vchunk.memblock);
972 pa_memchunk_make_writable(&vchunk, 0);
974 if (s->thread_info.soft_muted || pa_cvolume_is_muted(&s->thread_info.soft_volume))
975 pa_silence_memchunk(&vchunk, &s->sample_spec);
977 pa_volume_memchunk(&vchunk, &s->sample_spec, &s->thread_info.soft_volume);
979 pa_source_output_push(o, &vchunk);
981 pa_memblock_unref(vchunk.memblock);
983 pa_source_output_push(o, chunk);
986 /* Called from main thread */
987 int pa_source_reconfigure(pa_source *s, pa_sample_spec *spec, bool passthrough) {
989 pa_sample_spec desired_spec;
990 uint32_t default_rate = s->default_sample_rate;
991 uint32_t alternate_rate = s->alternate_sample_rate;
992 bool default_rate_is_usable = false;
993 bool alternate_rate_is_usable = false;
994 bool avoid_resampling = s->core->avoid_resampling;
996 /* We currently only try to reconfigure the sample rate */
998 if (pa_sample_spec_equal(spec, &s->sample_spec))
1001 if (!s->reconfigure && !s->monitor_of)
1004 if (PA_UNLIKELY(default_rate == alternate_rate && !passthrough && !avoid_resampling)) {
1005 pa_log_debug("Default and alternate sample rates are the same, so there is no point in switching.");
1009 if (PA_SOURCE_IS_RUNNING(s->state)) {
1010 pa_log_info("Cannot update rate, SOURCE_IS_RUNNING, will keep using %u Hz",
1011 s->sample_spec.rate);
1015 if (s->monitor_of) {
1016 if (PA_SINK_IS_RUNNING(s->monitor_of->state)) {
1017 pa_log_info("Cannot update rate, this is a monitor source and the sink is running.");
1022 if (PA_UNLIKELY(!pa_sample_spec_valid(spec)))
1025 desired_spec = s->sample_spec;
1028 /* We have to try to use the source output rate */
1029 desired_spec.rate = spec->rate;
1031 } else if (avoid_resampling && (spec->rate >= default_rate || spec->rate >= alternate_rate)) {
1032 /* We just try to set the source output's sample rate if it's not too low */
1033 desired_spec.rate = spec->rate;
1035 } else if (default_rate == spec->rate || alternate_rate == spec->rate) {
1036 /* We can directly try to use this rate */
1037 desired_spec.rate = spec->rate;
1040 /* See if we can pick a rate that results in less resampling effort */
1041 if (default_rate % 11025 == 0 && spec->rate % 11025 == 0)
1042 default_rate_is_usable = true;
1043 if (default_rate % 4000 == 0 && spec->rate % 4000 == 0)
1044 default_rate_is_usable = true;
1045 if (alternate_rate && alternate_rate % 11025 == 0 && spec->rate % 11025 == 0)
1046 alternate_rate_is_usable = true;
1047 if (alternate_rate && alternate_rate % 4000 == 0 && spec->rate % 4000 == 0)
1048 alternate_rate_is_usable = true;
1050 if (alternate_rate_is_usable && !default_rate_is_usable)
1051 desired_spec.rate = alternate_rate;
1053 desired_spec.rate = default_rate;
1056 if (pa_sample_spec_equal(&desired_spec, &s->sample_spec) && passthrough == pa_source_is_passthrough(s))
1059 if (!passthrough && pa_source_used_by(s) > 0)
1062 pa_log_debug("Suspending source %s due to changing the sample rate.", s->name);
1063 pa_source_suspend(s, true, PA_SUSPEND_INTERNAL);
1066 ret = s->reconfigure(s, &desired_spec, passthrough);
1068 /* This is a monitor source. */
1070 /* XXX: This code is written with non-passthrough streams in mind. I
1071 * have no idea whether the behaviour with passthrough streams is
1074 pa_sample_spec old_spec = s->sample_spec;
1076 s->sample_spec = desired_spec;
1077 ret = pa_sink_reconfigure(s->monitor_of, &desired_spec, false);
1080 /* Changing the sink rate failed, roll back the old rate for
1081 * the monitor source. Why did we set the source rate before
1082 * calling pa_sink_reconfigure(), you may ask. The reason is
1083 * that pa_sink_reconfigure() tries to update the monitor
1084 * source rate, but we are already in the process of updating
1085 * the monitor source rate, so there's a risk of entering an
1086 * infinite loop. Setting the source rate before calling
1087 * pa_sink_reconfigure() makes the rate == s->sample_spec.rate
1088 * check in the beginning of this function return early, so we
1090 s->sample_spec = old_spec;
1098 pa_source_output *o;
1100 PA_IDXSET_FOREACH(o, s->outputs, idx) {
1101 if (o->state == PA_SOURCE_OUTPUT_CORKED)
1102 pa_source_output_update_rate(o);
1105 pa_log_info("Changed sampling rate successfully");
1108 pa_source_suspend(s, false, PA_SUSPEND_INTERNAL);
1113 /* Called from main thread */
1114 pa_usec_t pa_source_get_latency(pa_source *s) {
1117 pa_source_assert_ref(s);
1118 pa_assert_ctl_context();
1119 pa_assert(PA_SOURCE_IS_LINKED(s->state));
1121 if (s->state == PA_SOURCE_SUSPENDED)
1124 if (!(s->flags & PA_SOURCE_LATENCY))
1127 pa_assert_se(pa_asyncmsgq_send(s->asyncmsgq, PA_MSGOBJECT(s), PA_SOURCE_MESSAGE_GET_LATENCY, &usec, 0, NULL) == 0);
1129 /* The return value is unsigned, so check that the offset can be added to usec without
1131 if (-s->port_latency_offset <= usec)
1132 usec += s->port_latency_offset;
1136 return (pa_usec_t)usec;
1139 /* Called from IO thread */
1140 int64_t pa_source_get_latency_within_thread(pa_source *s, bool allow_negative) {
1144 pa_source_assert_ref(s);
1145 pa_source_assert_io_context(s);
1146 pa_assert(PA_SOURCE_IS_LINKED(s->thread_info.state));
1148 /* The returned value is supposed to be in the time domain of the sound card! */
1150 if (s->thread_info.state == PA_SOURCE_SUSPENDED)
1153 if (!(s->flags & PA_SOURCE_LATENCY))
1156 o = PA_MSGOBJECT(s);
1158 /* FIXME: We probably should make this a proper vtable callback instead of going through process_msg() */
1160 o->process_msg(o, PA_SOURCE_MESSAGE_GET_LATENCY, &usec, 0, NULL);
1162 /* If allow_negative is false, the call should only return positive values, */
1163 usec += s->thread_info.port_latency_offset;
1164 if (!allow_negative && usec < 0)
1170 /* Called from the main thread (and also from the IO thread while the main
1171 * thread is waiting).
1173 * When a source uses volume sharing, it never has the PA_SOURCE_FLAT_VOLUME flag
1174 * set. Instead, flat volume mode is detected by checking whether the root source
1175 * has the flag set. */
1176 bool pa_source_flat_volume_enabled(pa_source *s) {
1177 pa_source_assert_ref(s);
1179 s = pa_source_get_master(s);
1182 return (s->flags & PA_SOURCE_FLAT_VOLUME);
1187 /* Called from the main thread (and also from the IO thread while the main
1188 * thread is waiting). */
1189 pa_source *pa_source_get_master(pa_source *s) {
1190 pa_source_assert_ref(s);
1192 while (s && (s->flags & PA_SOURCE_SHARE_VOLUME_WITH_MASTER)) {
1193 if (PA_UNLIKELY(!s->output_from_master))
1196 s = s->output_from_master->source;
1202 /* Called from main context */
1203 bool pa_source_is_filter(pa_source *s) {
1204 pa_source_assert_ref(s);
1206 return (s->output_from_master != NULL);
1209 /* Called from main context */
1210 bool pa_source_is_passthrough(pa_source *s) {
1212 pa_source_assert_ref(s);
1214 /* NB Currently only monitor sources support passthrough mode */
1215 return (s->monitor_of && pa_sink_is_passthrough(s->monitor_of));
1218 /* Called from main context */
1219 void pa_source_enter_passthrough(pa_source *s) {
1222 /* set the volume to NORM */
1223 s->saved_volume = *pa_source_get_volume(s, true);
1224 s->saved_save_volume = s->save_volume;
1226 pa_cvolume_set(&volume, s->sample_spec.channels, PA_MIN(s->base_volume, PA_VOLUME_NORM));
1227 pa_source_set_volume(s, &volume, true, false);
1230 /* Called from main context */
1231 void pa_source_leave_passthrough(pa_source *s) {
1232 /* Restore source volume to what it was before we entered passthrough mode */
1233 pa_source_set_volume(s, &s->saved_volume, true, s->saved_save_volume);
1235 pa_cvolume_init(&s->saved_volume);
1236 s->saved_save_volume = false;
1239 /* Called from main context. */
1240 static void compute_reference_ratio(pa_source_output *o) {
1242 pa_cvolume remapped;
1246 pa_assert(pa_source_flat_volume_enabled(o->source));
1249 * Calculates the reference ratio from the source's reference
1250 * volume. This basically calculates:
1252 * o->reference_ratio = o->volume / o->source->reference_volume
1255 remapped = o->source->reference_volume;
1256 pa_cvolume_remap(&remapped, &o->source->channel_map, &o->channel_map);
1258 ratio = o->reference_ratio;
1260 for (c = 0; c < o->sample_spec.channels; c++) {
1262 /* We don't update when the source volume is 0 anyway */
1263 if (remapped.values[c] <= PA_VOLUME_MUTED)
1266 /* Don't update the reference ratio unless necessary */
1267 if (pa_sw_volume_multiply(
1269 remapped.values[c]) == o->volume.values[c])
1272 ratio.values[c] = pa_sw_volume_divide(
1273 o->volume.values[c],
1274 remapped.values[c]);
1277 pa_source_output_set_reference_ratio(o, &ratio);
1280 /* Called from main context. Only called for the root source in volume sharing
1281 * cases, except for internal recursive calls. */
1282 static void compute_reference_ratios(pa_source *s) {
1284 pa_source_output *o;
1286 pa_source_assert_ref(s);
1287 pa_assert_ctl_context();
1288 pa_assert(PA_SOURCE_IS_LINKED(s->state));
1289 pa_assert(pa_source_flat_volume_enabled(s));
1291 PA_IDXSET_FOREACH(o, s->outputs, idx) {
1292 compute_reference_ratio(o);
1294 if (o->destination_source && (o->destination_source->flags & PA_SOURCE_SHARE_VOLUME_WITH_MASTER)
1295 && PA_SOURCE_IS_LINKED(o->destination_source->state))
1296 compute_reference_ratios(o->destination_source);
1300 /* Called from main context. Only called for the root source in volume sharing
1301 * cases, except for internal recursive calls. */
1302 static void compute_real_ratios(pa_source *s) {
1303 pa_source_output *o;
1306 pa_source_assert_ref(s);
1307 pa_assert_ctl_context();
1308 pa_assert(PA_SOURCE_IS_LINKED(s->state));
1309 pa_assert(pa_source_flat_volume_enabled(s));
1311 PA_IDXSET_FOREACH(o, s->outputs, idx) {
1313 pa_cvolume remapped;
1315 if (o->destination_source && (o->destination_source->flags & PA_SOURCE_SHARE_VOLUME_WITH_MASTER)) {
1316 /* The origin source uses volume sharing, so this input's real ratio
1317 * is handled as a special case - the real ratio must be 0 dB, and
1318 * as a result i->soft_volume must equal i->volume_factor. */
1319 pa_cvolume_reset(&o->real_ratio, o->real_ratio.channels);
1320 o->soft_volume = o->volume_factor;
1322 if (PA_SOURCE_IS_LINKED(o->destination_source->state))
1323 compute_real_ratios(o->destination_source);
1329 * This basically calculates:
1331 * i->real_ratio := i->volume / s->real_volume
1332 * i->soft_volume := i->real_ratio * i->volume_factor
1335 remapped = s->real_volume;
1336 pa_cvolume_remap(&remapped, &s->channel_map, &o->channel_map);
1338 o->real_ratio.channels = o->sample_spec.channels;
1339 o->soft_volume.channels = o->sample_spec.channels;
1341 for (c = 0; c < o->sample_spec.channels; c++) {
1343 if (remapped.values[c] <= PA_VOLUME_MUTED) {
1344 /* We leave o->real_ratio untouched */
1345 o->soft_volume.values[c] = PA_VOLUME_MUTED;
1349 /* Don't lose accuracy unless necessary */
1350 if (pa_sw_volume_multiply(
1351 o->real_ratio.values[c],
1352 remapped.values[c]) != o->volume.values[c])
1354 o->real_ratio.values[c] = pa_sw_volume_divide(
1355 o->volume.values[c],
1356 remapped.values[c]);
1358 o->soft_volume.values[c] = pa_sw_volume_multiply(
1359 o->real_ratio.values[c],
1360 o->volume_factor.values[c]);
1363 /* We don't copy the soft_volume to the thread_info data
1364 * here. That must be done by the caller */
1368 static pa_cvolume *cvolume_remap_minimal_impact(
1370 const pa_cvolume *template,
1371 const pa_channel_map *from,
1372 const pa_channel_map *to) {
1377 pa_assert(template);
1380 pa_assert(pa_cvolume_compatible_with_channel_map(v, from));
1381 pa_assert(pa_cvolume_compatible_with_channel_map(template, to));
1383 /* Much like pa_cvolume_remap(), but tries to minimize impact when
1384 * mapping from source output to source volumes:
1386 * If template is a possible remapping from v it is used instead
1387 * of remapping anew.
1389 * If the channel maps don't match we set an all-channel volume on
1390 * the source to ensure that changing a volume on one stream has no
1391 * effect that cannot be compensated for in another stream that
1392 * does not have the same channel map as the source. */
1394 if (pa_channel_map_equal(from, to))
1398 if (pa_cvolume_equal(pa_cvolume_remap(&t, to, from), v)) {
1403 pa_cvolume_set(v, to->channels, pa_cvolume_max(v));
1407 /* Called from main thread. Only called for the root source in volume sharing
1408 * cases, except for internal recursive calls. */
1409 static void get_maximum_output_volume(pa_source *s, pa_cvolume *max_volume, const pa_channel_map *channel_map) {
1410 pa_source_output *o;
1413 pa_source_assert_ref(s);
1414 pa_assert(max_volume);
1415 pa_assert(channel_map);
1416 pa_assert(pa_source_flat_volume_enabled(s));
1418 PA_IDXSET_FOREACH(o, s->outputs, idx) {
1419 pa_cvolume remapped;
1421 if (o->destination_source && (o->destination_source->flags & PA_SOURCE_SHARE_VOLUME_WITH_MASTER)) {
1422 if (PA_SOURCE_IS_LINKED(o->destination_source->state))
1423 get_maximum_output_volume(o->destination_source, max_volume, channel_map);
1425 /* Ignore this output. The origin source uses volume sharing, so this
1426 * output's volume will be set to be equal to the root source's real
1427 * volume. Obviously this output's current volume must not then
1428 * affect what the root source's real volume will be. */
1432 remapped = o->volume;
1433 cvolume_remap_minimal_impact(&remapped, max_volume, &o->channel_map, channel_map);
1434 pa_cvolume_merge(max_volume, max_volume, &remapped);
1438 /* Called from main thread. Only called for the root source in volume sharing
1439 * cases, except for internal recursive calls. */
1440 static bool has_outputs(pa_source *s) {
1441 pa_source_output *o;
1444 pa_source_assert_ref(s);
1446 PA_IDXSET_FOREACH(o, s->outputs, idx) {
1447 if (!o->destination_source || !(o->destination_source->flags & PA_SOURCE_SHARE_VOLUME_WITH_MASTER) || has_outputs(o->destination_source))
1454 /* Called from main thread. Only called for the root source in volume sharing
1455 * cases, except for internal recursive calls. */
1456 static void update_real_volume(pa_source *s, const pa_cvolume *new_volume, pa_channel_map *channel_map) {
1457 pa_source_output *o;
1460 pa_source_assert_ref(s);
1461 pa_assert(new_volume);
1462 pa_assert(channel_map);
1464 s->real_volume = *new_volume;
1465 pa_cvolume_remap(&s->real_volume, channel_map, &s->channel_map);
1467 PA_IDXSET_FOREACH(o, s->outputs, idx) {
1468 if (o->destination_source && (o->destination_source->flags & PA_SOURCE_SHARE_VOLUME_WITH_MASTER)) {
1469 if (pa_source_flat_volume_enabled(s)) {
1470 pa_cvolume new_output_volume;
1472 /* Follow the root source's real volume. */
1473 new_output_volume = *new_volume;
1474 pa_cvolume_remap(&new_output_volume, channel_map, &o->channel_map);
1475 pa_source_output_set_volume_direct(o, &new_output_volume);
1476 compute_reference_ratio(o);
1479 if (PA_SOURCE_IS_LINKED(o->destination_source->state))
1480 update_real_volume(o->destination_source, new_volume, channel_map);
1485 /* Called from main thread. Only called for the root source in shared volume
1487 static void compute_real_volume(pa_source *s) {
1488 pa_source_assert_ref(s);
1489 pa_assert_ctl_context();
1490 pa_assert(PA_SOURCE_IS_LINKED(s->state));
1491 pa_assert(pa_source_flat_volume_enabled(s));
1492 pa_assert(!(s->flags & PA_SOURCE_SHARE_VOLUME_WITH_MASTER));
1494 /* This determines the maximum volume of all streams and sets
1495 * s->real_volume accordingly. */
1497 if (!has_outputs(s)) {
1498 /* In the special case that we have no source outputs we leave the
1499 * volume unmodified. */
1500 update_real_volume(s, &s->reference_volume, &s->channel_map);
1504 pa_cvolume_mute(&s->real_volume, s->channel_map.channels);
1506 /* First let's determine the new maximum volume of all outputs
1507 * connected to this source */
1508 get_maximum_output_volume(s, &s->real_volume, &s->channel_map);
1509 update_real_volume(s, &s->real_volume, &s->channel_map);
1511 /* Then, let's update the real ratios/soft volumes of all outputs
1512 * connected to this source */
1513 compute_real_ratios(s);
1516 /* Called from main thread. Only called for the root source in shared volume
1517 * cases, except for internal recursive calls. */
1518 static void propagate_reference_volume(pa_source *s) {
1519 pa_source_output *o;
1522 pa_source_assert_ref(s);
1523 pa_assert_ctl_context();
1524 pa_assert(PA_SOURCE_IS_LINKED(s->state));
1525 pa_assert(pa_source_flat_volume_enabled(s));
1527 /* This is called whenever the source volume changes that is not
1528 * caused by a source output volume change. We need to fix up the
1529 * source output volumes accordingly */
1531 PA_IDXSET_FOREACH(o, s->outputs, idx) {
1532 pa_cvolume new_volume;
1534 if (o->destination_source && (o->destination_source->flags & PA_SOURCE_SHARE_VOLUME_WITH_MASTER)) {
1535 if (PA_SOURCE_IS_LINKED(o->destination_source->state))
1536 propagate_reference_volume(o->destination_source);
1538 /* Since the origin source uses volume sharing, this output's volume
1539 * needs to be updated to match the root source's real volume, but
1540 * that will be done later in update_real_volume(). */
1544 /* This basically calculates:
1546 * o->volume := o->reference_volume * o->reference_ratio */
1548 new_volume = s->reference_volume;
1549 pa_cvolume_remap(&new_volume, &s->channel_map, &o->channel_map);
1550 pa_sw_cvolume_multiply(&new_volume, &new_volume, &o->reference_ratio);
1551 pa_source_output_set_volume_direct(o, &new_volume);
1555 /* Called from main thread. Only called for the root source in volume sharing
1556 * cases, except for internal recursive calls. The return value indicates
1557 * whether any reference volume actually changed. */
1558 static bool update_reference_volume(pa_source *s, const pa_cvolume *v, const pa_channel_map *channel_map, bool save) {
1560 bool reference_volume_changed;
1561 pa_source_output *o;
1564 pa_source_assert_ref(s);
1565 pa_assert(PA_SOURCE_IS_LINKED(s->state));
1567 pa_assert(channel_map);
1568 pa_assert(pa_cvolume_valid(v));
1571 pa_cvolume_remap(&volume, channel_map, &s->channel_map);
1573 reference_volume_changed = !pa_cvolume_equal(&volume, &s->reference_volume);
1574 pa_source_set_reference_volume_direct(s, &volume);
1576 s->save_volume = (!reference_volume_changed && s->save_volume) || save;
1578 if (!reference_volume_changed && !(s->flags & PA_SOURCE_SHARE_VOLUME_WITH_MASTER))
1579 /* If the root source's volume doesn't change, then there can't be any
1580 * changes in the other source in the source tree either.
1582 * It's probably theoretically possible that even if the root source's
1583 * volume changes slightly, some filter source doesn't change its volume
1584 * due to rounding errors. If that happens, we still want to propagate
1585 * the changed root source volume to the sources connected to the
1586 * intermediate source that didn't change its volume. This theoretical
1587 * possibility is the reason why we have that !(s->flags &
1588 * PA_SOURCE_SHARE_VOLUME_WITH_MASTER) condition. Probably nobody would
1589 * notice even if we returned here false always if
1590 * reference_volume_changed is false. */
1593 PA_IDXSET_FOREACH(o, s->outputs, idx) {
1594 if (o->destination_source && (o->destination_source->flags & PA_SOURCE_SHARE_VOLUME_WITH_MASTER)
1595 && PA_SOURCE_IS_LINKED(o->destination_source->state))
1596 update_reference_volume(o->destination_source, v, channel_map, false);
1602 /* Called from main thread */
1603 void pa_source_set_volume(
1605 const pa_cvolume *volume,
1609 pa_cvolume new_reference_volume, root_real_volume;
1610 pa_source *root_source;
1612 pa_source_assert_ref(s);
1613 pa_assert_ctl_context();
1614 pa_assert(PA_SOURCE_IS_LINKED(s->state));
1615 pa_assert(!volume || pa_cvolume_valid(volume));
1616 pa_assert(volume || pa_source_flat_volume_enabled(s));
1617 pa_assert(!volume || volume->channels == 1 || pa_cvolume_compatible(volume, &s->sample_spec));
1619 /* make sure we don't change the volume in PASSTHROUGH mode ...
1620 * ... *except* if we're being invoked to reset the volume to ensure 0 dB gain */
1621 if (pa_source_is_passthrough(s) && (!volume || !pa_cvolume_is_norm(volume))) {
1622 pa_log_warn("Cannot change volume, source is monitor of a PASSTHROUGH sink");
1626 /* In case of volume sharing, the volume is set for the root source first,
1627 * from which it's then propagated to the sharing sources. */
1628 root_source = pa_source_get_master(s);
1630 if (PA_UNLIKELY(!root_source))
1633 /* As a special exception we accept mono volumes on all sources --
1634 * even on those with more complex channel maps */
1637 if (pa_cvolume_compatible(volume, &s->sample_spec))
1638 new_reference_volume = *volume;
1640 new_reference_volume = s->reference_volume;
1641 pa_cvolume_scale(&new_reference_volume, pa_cvolume_max(volume));
1644 pa_cvolume_remap(&new_reference_volume, &s->channel_map, &root_source->channel_map);
1646 if (update_reference_volume(root_source, &new_reference_volume, &root_source->channel_map, save)) {
1647 if (pa_source_flat_volume_enabled(root_source)) {
1648 /* OK, propagate this volume change back to the outputs */
1649 propagate_reference_volume(root_source);
1651 /* And now recalculate the real volume */
1652 compute_real_volume(root_source);
1654 update_real_volume(root_source, &root_source->reference_volume, &root_source->channel_map);
1658 /* If volume is NULL we synchronize the source's real and
1659 * reference volumes with the stream volumes. */
1661 pa_assert(pa_source_flat_volume_enabled(root_source));
1663 /* Ok, let's determine the new real volume */
1664 compute_real_volume(root_source);
1666 /* To propagate the reference volume from the filter to the root source,
1667 * we first take the real volume from the root source and remap it to
1668 * match the filter. Then, we merge in the reference volume from the
1669 * filter on top of this, and remap it back to the root source channel
1671 root_real_volume = root_source->real_volume;
1672 /* First we remap root's real volume to filter channel count and map if needed */
1673 if (s != root_source && !pa_channel_map_equal(&s->channel_map, &root_source->channel_map))
1674 pa_cvolume_remap(&root_real_volume, &root_source->channel_map, &s->channel_map);
1675 /* Then let's 'push' the reference volume if necessary */
1676 pa_cvolume_merge(&new_reference_volume, &s->reference_volume, &root_real_volume);
1677 /* If the source and its root don't have the same number of channels, we need to remap back */
1678 if (s != root_source && !pa_channel_map_equal(&s->channel_map, &root_source->channel_map))
1679 pa_cvolume_remap(&new_reference_volume, &s->channel_map, &root_source->channel_map);
1681 update_reference_volume(root_source, &new_reference_volume, &root_source->channel_map, save);
1683 /* Now that the reference volume is updated, we can update the streams'
1684 * reference ratios. */
1685 compute_reference_ratios(root_source);
1688 if (root_source->set_volume) {
1689 /* If we have a function set_volume(), then we do not apply a
1690 * soft volume by default. However, set_volume() is free to
1691 * apply one to root_source->soft_volume */
1693 pa_cvolume_reset(&root_source->soft_volume, root_source->sample_spec.channels);
1694 if (!(root_source->flags & PA_SOURCE_DEFERRED_VOLUME))
1695 root_source->set_volume(root_source);
1698 /* If we have no function set_volume(), then the soft volume
1699 * becomes the real volume */
1700 root_source->soft_volume = root_source->real_volume;
1702 /* This tells the source that soft volume and/or real volume changed */
1704 pa_assert_se(pa_asyncmsgq_send(root_source->asyncmsgq, PA_MSGOBJECT(root_source), PA_SOURCE_MESSAGE_SET_SHARED_VOLUME, NULL, 0, NULL) == 0);
1707 /* Called from the io thread if sync volume is used, otherwise from the main thread.
1708 * Only to be called by source implementor */
1709 void pa_source_set_soft_volume(pa_source *s, const pa_cvolume *volume) {
1711 pa_source_assert_ref(s);
1712 pa_assert(!(s->flags & PA_SOURCE_SHARE_VOLUME_WITH_MASTER));
1714 if (s->flags & PA_SOURCE_DEFERRED_VOLUME)
1715 pa_source_assert_io_context(s);
1717 pa_assert_ctl_context();
1720 pa_cvolume_reset(&s->soft_volume, s->sample_spec.channels);
1722 s->soft_volume = *volume;
1724 if (PA_SOURCE_IS_LINKED(s->state) && !(s->flags & PA_SOURCE_DEFERRED_VOLUME))
1725 pa_assert_se(pa_asyncmsgq_send(s->asyncmsgq, PA_MSGOBJECT(s), PA_SOURCE_MESSAGE_SET_VOLUME, NULL, 0, NULL) == 0);
1727 s->thread_info.soft_volume = s->soft_volume;
1730 /* Called from the main thread. Only called for the root source in volume sharing
1731 * cases, except for internal recursive calls. */
1732 static void propagate_real_volume(pa_source *s, const pa_cvolume *old_real_volume) {
1733 pa_source_output *o;
1736 pa_source_assert_ref(s);
1737 pa_assert(old_real_volume);
1738 pa_assert_ctl_context();
1739 pa_assert(PA_SOURCE_IS_LINKED(s->state));
1741 /* This is called when the hardware's real volume changes due to
1742 * some external event. We copy the real volume into our
1743 * reference volume and then rebuild the stream volumes based on
1744 * i->real_ratio which should stay fixed. */
1746 if (!(s->flags & PA_SOURCE_SHARE_VOLUME_WITH_MASTER)) {
1747 if (pa_cvolume_equal(old_real_volume, &s->real_volume))
1750 /* 1. Make the real volume the reference volume */
1751 update_reference_volume(s, &s->real_volume, &s->channel_map, true);
1754 if (pa_source_flat_volume_enabled(s)) {
1755 PA_IDXSET_FOREACH(o, s->outputs, idx) {
1756 pa_cvolume new_volume;
1758 /* 2. Since the source's reference and real volumes are equal
1759 * now our ratios should be too. */
1760 pa_source_output_set_reference_ratio(o, &o->real_ratio);
1762 /* 3. Recalculate the new stream reference volume based on the
1763 * reference ratio and the sink's reference volume.
1765 * This basically calculates:
1767 * o->volume = s->reference_volume * o->reference_ratio
1769 * This is identical to propagate_reference_volume() */
1770 new_volume = s->reference_volume;
1771 pa_cvolume_remap(&new_volume, &s->channel_map, &o->channel_map);
1772 pa_sw_cvolume_multiply(&new_volume, &new_volume, &o->reference_ratio);
1773 pa_source_output_set_volume_direct(o, &new_volume);
1775 if (o->destination_source && (o->destination_source->flags & PA_SOURCE_SHARE_VOLUME_WITH_MASTER)
1776 && PA_SOURCE_IS_LINKED(o->destination_source->state))
1777 propagate_real_volume(o->destination_source, old_real_volume);
1781 /* Something got changed in the hardware. It probably makes sense
1782 * to save changed hw settings given that hw volume changes not
1783 * triggered by PA are almost certainly done by the user. */
1784 if (!(s->flags & PA_SOURCE_SHARE_VOLUME_WITH_MASTER))
1785 s->save_volume = true;
1788 /* Called from io thread */
1789 void pa_source_update_volume_and_mute(pa_source *s) {
1791 pa_source_assert_io_context(s);
1793 pa_asyncmsgq_post(pa_thread_mq_get()->outq, PA_MSGOBJECT(s), PA_SOURCE_MESSAGE_UPDATE_VOLUME_AND_MUTE, NULL, 0, NULL, NULL);
1796 /* Called from main thread */
1797 const pa_cvolume *pa_source_get_volume(pa_source *s, bool force_refresh) {
1798 pa_source_assert_ref(s);
1799 pa_assert_ctl_context();
1800 pa_assert(PA_SOURCE_IS_LINKED(s->state));
1802 if (s->refresh_volume || force_refresh) {
1803 struct pa_cvolume old_real_volume;
1805 pa_assert(!(s->flags & PA_SOURCE_SHARE_VOLUME_WITH_MASTER));
1807 old_real_volume = s->real_volume;
1809 if (!(s->flags & PA_SOURCE_DEFERRED_VOLUME) && s->get_volume)
1812 pa_assert_se(pa_asyncmsgq_send(s->asyncmsgq, PA_MSGOBJECT(s), PA_SOURCE_MESSAGE_GET_VOLUME, NULL, 0, NULL) == 0);
1814 update_real_volume(s, &s->real_volume, &s->channel_map);
1815 propagate_real_volume(s, &old_real_volume);
1818 return &s->reference_volume;
1821 /* Called from main thread. In volume sharing cases, only the root source may
1823 void pa_source_volume_changed(pa_source *s, const pa_cvolume *new_real_volume) {
1824 pa_cvolume old_real_volume;
1826 pa_source_assert_ref(s);
1827 pa_assert_ctl_context();
1828 pa_assert(PA_SOURCE_IS_LINKED(s->state));
1829 pa_assert(!(s->flags & PA_SOURCE_SHARE_VOLUME_WITH_MASTER));
1831 /* The source implementor may call this if the volume changed to make sure everyone is notified */
1833 old_real_volume = s->real_volume;
1834 update_real_volume(s, new_real_volume, &s->channel_map);
1835 propagate_real_volume(s, &old_real_volume);
1838 /* Called from main thread */
1839 void pa_source_set_mute(pa_source *s, bool mute, bool save) {
1842 pa_source_assert_ref(s);
1843 pa_assert_ctl_context();
1845 old_muted = s->muted;
1847 if (mute == old_muted) {
1848 s->save_muted |= save;
1853 s->save_muted = save;
1855 if (!(s->flags & PA_SOURCE_DEFERRED_VOLUME) && s->set_mute) {
1856 s->set_mute_in_progress = true;
1858 s->set_mute_in_progress = false;
1861 if (!PA_SOURCE_IS_LINKED(s->state))
1864 pa_log_debug("The mute of source %s changed from %s to %s.", s->name, pa_yes_no(old_muted), pa_yes_no(mute));
1865 pa_assert_se(pa_asyncmsgq_send(s->asyncmsgq, PA_MSGOBJECT(s), PA_SOURCE_MESSAGE_SET_MUTE, NULL, 0, NULL) == 0);
1866 pa_subscription_post(s->core, PA_SUBSCRIPTION_EVENT_SOURCE|PA_SUBSCRIPTION_EVENT_CHANGE, s->index);
1867 pa_hook_fire(&s->core->hooks[PA_CORE_HOOK_SOURCE_MUTE_CHANGED], s);
1870 /* Called from main thread */
1871 bool pa_source_get_mute(pa_source *s, bool force_refresh) {
1873 pa_source_assert_ref(s);
1874 pa_assert_ctl_context();
1875 pa_assert(PA_SOURCE_IS_LINKED(s->state));
1877 if ((s->refresh_muted || force_refresh) && s->get_mute) {
1880 if (s->flags & PA_SOURCE_DEFERRED_VOLUME) {
1881 if (pa_asyncmsgq_send(s->asyncmsgq, PA_MSGOBJECT(s), PA_SOURCE_MESSAGE_GET_MUTE, &mute, 0, NULL) >= 0)
1882 pa_source_mute_changed(s, mute);
1884 if (s->get_mute(s, &mute) >= 0)
1885 pa_source_mute_changed(s, mute);
1892 /* Called from main thread */
1893 void pa_source_mute_changed(pa_source *s, bool new_muted) {
1894 pa_source_assert_ref(s);
1895 pa_assert_ctl_context();
1896 pa_assert(PA_SOURCE_IS_LINKED(s->state));
1898 if (s->set_mute_in_progress)
1901 /* pa_source_set_mute() does this same check, so this may appear redundant,
1902 * but we must have this here also, because the save parameter of
1903 * pa_source_set_mute() would otherwise have unintended side effects
1904 * (saving the mute state when it shouldn't be saved). */
1905 if (new_muted == s->muted)
1908 pa_source_set_mute(s, new_muted, true);
1911 /* Called from main thread */
1912 bool pa_source_update_proplist(pa_source *s, pa_update_mode_t mode, pa_proplist *p) {
1913 pa_source_assert_ref(s);
1914 pa_assert_ctl_context();
1917 pa_proplist_update(s->proplist, mode, p);
1919 if (PA_SOURCE_IS_LINKED(s->state)) {
1920 pa_hook_fire(&s->core->hooks[PA_CORE_HOOK_SOURCE_PROPLIST_CHANGED], s);
1921 pa_subscription_post(s->core, PA_SUBSCRIPTION_EVENT_SOURCE|PA_SUBSCRIPTION_EVENT_CHANGE, s->index);
1927 /* Called from main thread */
1928 /* FIXME -- this should be dropped and be merged into pa_source_update_proplist() */
1929 void pa_source_set_description(pa_source *s, const char *description) {
1931 pa_source_assert_ref(s);
1932 pa_assert_ctl_context();
1934 if (!description && !pa_proplist_contains(s->proplist, PA_PROP_DEVICE_DESCRIPTION))
1937 old = pa_proplist_gets(s->proplist, PA_PROP_DEVICE_DESCRIPTION);
1939 if (old && description && pa_streq(old, description))
1943 pa_proplist_sets(s->proplist, PA_PROP_DEVICE_DESCRIPTION, description);
1945 pa_proplist_unset(s->proplist, PA_PROP_DEVICE_DESCRIPTION);
1947 if (PA_SOURCE_IS_LINKED(s->state)) {
1948 pa_subscription_post(s->core, PA_SUBSCRIPTION_EVENT_SOURCE|PA_SUBSCRIPTION_EVENT_CHANGE, s->index);
1949 pa_hook_fire(&s->core->hooks[PA_CORE_HOOK_SOURCE_PROPLIST_CHANGED], s);
1953 /* Called from main thread */
1954 unsigned pa_source_linked_by(pa_source *s) {
1955 pa_source_assert_ref(s);
1956 pa_assert_ctl_context();
1957 pa_assert(PA_SOURCE_IS_LINKED(s->state));
1959 return pa_idxset_size(s->outputs);
1962 /* Called from main thread */
1963 unsigned pa_source_used_by(pa_source *s) {
1966 pa_source_assert_ref(s);
1967 pa_assert_ctl_context();
1968 pa_assert(PA_SOURCE_IS_LINKED(s->state));
1970 ret = pa_idxset_size(s->outputs);
1971 pa_assert(ret >= s->n_corked);
1973 return ret - s->n_corked;
1976 /* Called from main thread */
1977 unsigned pa_source_check_suspend(pa_source *s, pa_source_output *ignore) {
1979 pa_source_output *o;
1982 pa_source_assert_ref(s);
1983 pa_assert_ctl_context();
1985 if (!PA_SOURCE_IS_LINKED(s->state))
1990 PA_IDXSET_FOREACH(o, s->outputs, idx) {
1991 pa_source_output_state_t st;
1996 st = pa_source_output_get_state(o);
1998 /* We do not assert here. It is perfectly valid for a source output to
1999 * be in the INIT state (i.e. created, marked done but not yet put)
2000 * and we should not care if it's unlinked as it won't contribute
2001 * towards our busy status.
2003 if (!PA_SOURCE_OUTPUT_IS_LINKED(st))
2006 if (st == PA_SOURCE_OUTPUT_CORKED)
2009 if (o->flags & PA_SOURCE_OUTPUT_DONT_INHIBIT_AUTO_SUSPEND)
2018 const char *pa_source_state_to_string(pa_source_state_t state) {
2020 case PA_SOURCE_INIT: return "INIT";
2021 case PA_SOURCE_IDLE: return "IDLE";
2022 case PA_SOURCE_RUNNING: return "RUNNING";
2023 case PA_SOURCE_SUSPENDED: return "SUSPENDED";
2024 case PA_SOURCE_UNLINKED: return "UNLINKED";
2025 case PA_SOURCE_INVALID_STATE: return "INVALID_STATE";
2028 pa_assert_not_reached();
2031 /* Called from the IO thread */
2032 static void sync_output_volumes_within_thread(pa_source *s) {
2033 pa_source_output *o;
2036 pa_source_assert_ref(s);
2037 pa_source_assert_io_context(s);
2039 PA_HASHMAP_FOREACH(o, s->thread_info.outputs, state) {
2040 if (pa_cvolume_equal(&o->thread_info.soft_volume, &o->soft_volume))
2043 o->thread_info.soft_volume = o->soft_volume;
2044 //pa_source_output_request_rewind(o, 0, true, false, false);
2048 /* Called from the IO thread. Only called for the root source in volume sharing
2049 * cases, except for internal recursive calls. */
2050 static void set_shared_volume_within_thread(pa_source *s) {
2051 pa_source_output *o;
2054 pa_source_assert_ref(s);
2056 PA_MSGOBJECT(s)->process_msg(PA_MSGOBJECT(s), PA_SOURCE_MESSAGE_SET_VOLUME_SYNCED, NULL, 0, NULL);
2058 PA_HASHMAP_FOREACH(o, s->thread_info.outputs, state) {
2059 if (o->destination_source && (o->destination_source->flags & PA_SOURCE_SHARE_VOLUME_WITH_MASTER))
2060 set_shared_volume_within_thread(o->destination_source);
2064 /* Called from IO thread, except when it is not */
2065 int pa_source_process_msg(pa_msgobject *object, int code, void *userdata, int64_t offset, pa_memchunk *chunk) {
2066 pa_source *s = PA_SOURCE(object);
2067 pa_source_assert_ref(s);
2069 switch ((pa_source_message_t) code) {
2071 case PA_SOURCE_MESSAGE_ADD_OUTPUT: {
2072 pa_source_output *o = PA_SOURCE_OUTPUT(userdata);
2074 pa_hashmap_put(s->thread_info.outputs, PA_UINT32_TO_PTR(o->index), pa_source_output_ref(o));
2076 if (o->direct_on_input) {
2077 o->thread_info.direct_on_input = o->direct_on_input;
2078 pa_hashmap_put(o->thread_info.direct_on_input->thread_info.direct_outputs, PA_UINT32_TO_PTR(o->index), o);
2081 pa_source_output_attach(o);
2083 pa_source_output_set_state_within_thread(o, o->state);
2085 if (o->thread_info.requested_source_latency != (pa_usec_t) -1)
2086 pa_source_output_set_requested_latency_within_thread(o, o->thread_info.requested_source_latency);
2088 pa_source_output_update_max_rewind(o, s->thread_info.max_rewind);
2090 /* We don't just invalidate the requested latency here,
2091 * because if we are in a move we might need to fix up the
2092 * requested latency. */
2093 pa_source_output_set_requested_latency_within_thread(o, o->thread_info.requested_source_latency);
2095 /* In flat volume mode we need to update the volume as
2097 return object->process_msg(object, PA_SOURCE_MESSAGE_SET_SHARED_VOLUME, NULL, 0, NULL);
2100 case PA_SOURCE_MESSAGE_REMOVE_OUTPUT: {
2101 pa_source_output *o = PA_SOURCE_OUTPUT(userdata);
2103 pa_source_output_set_state_within_thread(o, o->state);
2105 pa_source_output_detach(o);
2107 if (o->thread_info.direct_on_input) {
2108 pa_hashmap_remove(o->thread_info.direct_on_input->thread_info.direct_outputs, PA_UINT32_TO_PTR(o->index));
2109 o->thread_info.direct_on_input = NULL;
2112 pa_hashmap_remove_and_free(s->thread_info.outputs, PA_UINT32_TO_PTR(o->index));
2113 pa_source_invalidate_requested_latency(s, true);
2115 /* In flat volume mode we need to update the volume as
2117 return object->process_msg(object, PA_SOURCE_MESSAGE_SET_SHARED_VOLUME, NULL, 0, NULL);
2120 case PA_SOURCE_MESSAGE_SET_SHARED_VOLUME: {
2121 pa_source *root_source = pa_source_get_master(s);
2123 if (PA_LIKELY(root_source))
2124 set_shared_volume_within_thread(root_source);
2129 case PA_SOURCE_MESSAGE_SET_VOLUME_SYNCED:
2131 if (s->flags & PA_SOURCE_DEFERRED_VOLUME) {
2133 pa_source_volume_change_push(s);
2135 /* Fall through ... */
2137 case PA_SOURCE_MESSAGE_SET_VOLUME:
2139 if (!pa_cvolume_equal(&s->thread_info.soft_volume, &s->soft_volume)) {
2140 s->thread_info.soft_volume = s->soft_volume;
2143 /* Fall through ... */
2145 case PA_SOURCE_MESSAGE_SYNC_VOLUMES:
2146 sync_output_volumes_within_thread(s);
2149 case PA_SOURCE_MESSAGE_GET_VOLUME:
2151 if ((s->flags & PA_SOURCE_DEFERRED_VOLUME) && s->get_volume) {
2153 pa_source_volume_change_flush(s);
2154 pa_sw_cvolume_divide(&s->thread_info.current_hw_volume, &s->real_volume, &s->soft_volume);
2157 /* In case source implementor reset SW volume. */
2158 if (!pa_cvolume_equal(&s->thread_info.soft_volume, &s->soft_volume)) {
2159 s->thread_info.soft_volume = s->soft_volume;
2164 case PA_SOURCE_MESSAGE_SET_MUTE:
2166 if (s->thread_info.soft_muted != s->muted) {
2167 s->thread_info.soft_muted = s->muted;
2170 if (s->flags & PA_SOURCE_DEFERRED_VOLUME && s->set_mute)
2175 case PA_SOURCE_MESSAGE_GET_MUTE:
2177 if (s->flags & PA_SOURCE_DEFERRED_VOLUME && s->get_mute)
2178 return s->get_mute(s, userdata);
2182 case PA_SOURCE_MESSAGE_SET_STATE: {
2184 bool suspend_change =
2185 (s->thread_info.state == PA_SOURCE_SUSPENDED && PA_SOURCE_IS_OPENED(PA_PTR_TO_UINT(userdata))) ||
2186 (PA_SOURCE_IS_OPENED(s->thread_info.state) && PA_PTR_TO_UINT(userdata) == PA_SOURCE_SUSPENDED);
2188 s->thread_info.state = PA_PTR_TO_UINT(userdata);
2190 if (suspend_change) {
2191 pa_source_output *o;
2194 while ((o = pa_hashmap_iterate(s->thread_info.outputs, &state, NULL)))
2195 if (o->suspend_within_thread)
2196 o->suspend_within_thread(o, s->thread_info.state == PA_SOURCE_SUSPENDED);
2202 case PA_SOURCE_MESSAGE_GET_REQUESTED_LATENCY: {
2204 pa_usec_t *usec = userdata;
2205 *usec = pa_source_get_requested_latency_within_thread(s);
2207 /* Yes, that's right, the IO thread will see -1 when no
2208 * explicit requested latency is configured, the main
2209 * thread will see max_latency */
2210 if (*usec == (pa_usec_t) -1)
2211 *usec = s->thread_info.max_latency;
2216 case PA_SOURCE_MESSAGE_SET_LATENCY_RANGE: {
2217 pa_usec_t *r = userdata;
2219 pa_source_set_latency_range_within_thread(s, r[0], r[1]);
2224 case PA_SOURCE_MESSAGE_GET_LATENCY_RANGE: {
2225 pa_usec_t *r = userdata;
2227 r[0] = s->thread_info.min_latency;
2228 r[1] = s->thread_info.max_latency;
2233 case PA_SOURCE_MESSAGE_GET_FIXED_LATENCY:
2235 *((pa_usec_t*) userdata) = s->thread_info.fixed_latency;
2238 case PA_SOURCE_MESSAGE_SET_FIXED_LATENCY:
2240 pa_source_set_fixed_latency_within_thread(s, (pa_usec_t) offset);
2243 case PA_SOURCE_MESSAGE_GET_MAX_REWIND:
2245 *((size_t*) userdata) = s->thread_info.max_rewind;
2248 case PA_SOURCE_MESSAGE_SET_MAX_REWIND:
2250 pa_source_set_max_rewind_within_thread(s, (size_t) offset);
2253 case PA_SOURCE_MESSAGE_GET_LATENCY:
2255 if (s->monitor_of) {
2256 *((int64_t*) userdata) = -pa_sink_get_latency_within_thread(s->monitor_of, true);
2260 /* Implementors need to overwrite this implementation! */
2263 case PA_SOURCE_MESSAGE_SET_PORT:
2265 pa_assert(userdata);
2267 struct source_message_set_port *msg_data = userdata;
2268 msg_data->ret = s->set_port(s, msg_data->port);
2272 case PA_SOURCE_MESSAGE_UPDATE_VOLUME_AND_MUTE:
2273 /* This message is sent from IO-thread and handled in main thread. */
2274 pa_assert_ctl_context();
2276 /* Make sure we're not messing with main thread when no longer linked */
2277 if (!PA_SOURCE_IS_LINKED(s->state))
2280 pa_source_get_volume(s, true);
2281 pa_source_get_mute(s, true);
2284 case PA_SOURCE_MESSAGE_SET_PORT_LATENCY_OFFSET:
2285 s->thread_info.port_latency_offset = offset;
2288 case PA_SOURCE_MESSAGE_MAX:
2295 /* Called from main thread */
2296 int pa_source_suspend_all(pa_core *c, bool suspend, pa_suspend_cause_t cause) {
2301 pa_core_assert_ref(c);
2302 pa_assert_ctl_context();
2303 pa_assert(cause != 0);
2305 for (source = PA_SOURCE(pa_idxset_first(c->sources, &idx)); source; source = PA_SOURCE(pa_idxset_next(c->sources, &idx))) {
2308 if (source->monitor_of)
2311 if ((r = pa_source_suspend(source, suspend, cause)) < 0)
2318 /* Called from IO thread */
2319 void pa_source_detach_within_thread(pa_source *s) {
2320 pa_source_output *o;
2323 pa_source_assert_ref(s);
2324 pa_source_assert_io_context(s);
2325 pa_assert(PA_SOURCE_IS_LINKED(s->thread_info.state));
2327 PA_HASHMAP_FOREACH(o, s->thread_info.outputs, state)
2328 pa_source_output_detach(o);
2331 /* Called from IO thread */
2332 void pa_source_attach_within_thread(pa_source *s) {
2333 pa_source_output *o;
2336 pa_source_assert_ref(s);
2337 pa_source_assert_io_context(s);
2338 pa_assert(PA_SOURCE_IS_LINKED(s->thread_info.state));
2340 PA_HASHMAP_FOREACH(o, s->thread_info.outputs, state)
2341 pa_source_output_attach(o);
2344 /* Called from IO thread */
2345 pa_usec_t pa_source_get_requested_latency_within_thread(pa_source *s) {
2346 pa_usec_t result = (pa_usec_t) -1;
2347 pa_source_output *o;
2350 pa_source_assert_ref(s);
2351 pa_source_assert_io_context(s);
2353 if (!(s->flags & PA_SOURCE_DYNAMIC_LATENCY))
2354 return PA_CLAMP(s->thread_info.fixed_latency, s->thread_info.min_latency, s->thread_info.max_latency);
2356 if (s->thread_info.requested_latency_valid)
2357 return s->thread_info.requested_latency;
2359 PA_HASHMAP_FOREACH(o, s->thread_info.outputs, state)
2360 if (o->thread_info.requested_source_latency != (pa_usec_t) -1 &&
2361 (result == (pa_usec_t) -1 || result > o->thread_info.requested_source_latency))
2362 result = o->thread_info.requested_source_latency;
2364 if (result != (pa_usec_t) -1)
2365 result = PA_CLAMP(result, s->thread_info.min_latency, s->thread_info.max_latency);
2367 if (PA_SOURCE_IS_LINKED(s->thread_info.state)) {
2368 /* Only cache this if we are fully set up */
2369 s->thread_info.requested_latency = result;
2370 s->thread_info.requested_latency_valid = true;
2376 /* Called from main thread */
2377 pa_usec_t pa_source_get_requested_latency(pa_source *s) {
2380 pa_source_assert_ref(s);
2381 pa_assert_ctl_context();
2382 pa_assert(PA_SOURCE_IS_LINKED(s->state));
2384 if (s->state == PA_SOURCE_SUSPENDED)
2387 pa_assert_se(pa_asyncmsgq_send(s->asyncmsgq, PA_MSGOBJECT(s), PA_SOURCE_MESSAGE_GET_REQUESTED_LATENCY, &usec, 0, NULL) == 0);
2392 /* Called from IO thread */
2393 void pa_source_set_max_rewind_within_thread(pa_source *s, size_t max_rewind) {
2394 pa_source_output *o;
2397 pa_source_assert_ref(s);
2398 pa_source_assert_io_context(s);
2400 if (max_rewind == s->thread_info.max_rewind)
2403 s->thread_info.max_rewind = max_rewind;
2405 if (PA_SOURCE_IS_LINKED(s->thread_info.state))
2406 PA_HASHMAP_FOREACH(o, s->thread_info.outputs, state)
2407 pa_source_output_update_max_rewind(o, s->thread_info.max_rewind);
2410 /* Called from main thread */
2411 void pa_source_set_max_rewind(pa_source *s, size_t max_rewind) {
2412 pa_source_assert_ref(s);
2413 pa_assert_ctl_context();
2415 if (PA_SOURCE_IS_LINKED(s->state))
2416 pa_assert_se(pa_asyncmsgq_send(s->asyncmsgq, PA_MSGOBJECT(s), PA_SOURCE_MESSAGE_SET_MAX_REWIND, NULL, max_rewind, NULL) == 0);
2418 pa_source_set_max_rewind_within_thread(s, max_rewind);
2421 /* Called from IO thread */
2422 void pa_source_invalidate_requested_latency(pa_source *s, bool dynamic) {
2423 pa_source_output *o;
2426 pa_source_assert_ref(s);
2427 pa_source_assert_io_context(s);
2429 if ((s->flags & PA_SOURCE_DYNAMIC_LATENCY))
2430 s->thread_info.requested_latency_valid = false;
2434 if (PA_SOURCE_IS_LINKED(s->thread_info.state)) {
2436 if (s->update_requested_latency)
2437 s->update_requested_latency(s);
2439 while ((o = pa_hashmap_iterate(s->thread_info.outputs, &state, NULL)))
2440 if (o->update_source_requested_latency)
2441 o->update_source_requested_latency(o);
2445 pa_sink_invalidate_requested_latency(s->monitor_of, dynamic);
2448 /* Called from main thread */
2449 void pa_source_set_latency_range(pa_source *s, pa_usec_t min_latency, pa_usec_t max_latency) {
2450 pa_source_assert_ref(s);
2451 pa_assert_ctl_context();
2453 /* min_latency == 0: no limit
2454 * min_latency anything else: specified limit
2456 * Similar for max_latency */
2458 if (min_latency < ABSOLUTE_MIN_LATENCY)
2459 min_latency = ABSOLUTE_MIN_LATENCY;
2461 if (max_latency <= 0 ||
2462 max_latency > ABSOLUTE_MAX_LATENCY)
2463 max_latency = ABSOLUTE_MAX_LATENCY;
2465 pa_assert(min_latency <= max_latency);
2467 /* Hmm, let's see if someone forgot to set PA_SOURCE_DYNAMIC_LATENCY here... */
2468 pa_assert((min_latency == ABSOLUTE_MIN_LATENCY &&
2469 max_latency == ABSOLUTE_MAX_LATENCY) ||
2470 (s->flags & PA_SOURCE_DYNAMIC_LATENCY));
2472 if (PA_SOURCE_IS_LINKED(s->state)) {
2478 pa_assert_se(pa_asyncmsgq_send(s->asyncmsgq, PA_MSGOBJECT(s), PA_SOURCE_MESSAGE_SET_LATENCY_RANGE, r, 0, NULL) == 0);
2480 pa_source_set_latency_range_within_thread(s, min_latency, max_latency);
2483 /* Called from main thread */
2484 void pa_source_get_latency_range(pa_source *s, pa_usec_t *min_latency, pa_usec_t *max_latency) {
2485 pa_source_assert_ref(s);
2486 pa_assert_ctl_context();
2487 pa_assert(min_latency);
2488 pa_assert(max_latency);
2490 if (PA_SOURCE_IS_LINKED(s->state)) {
2491 pa_usec_t r[2] = { 0, 0 };
2493 pa_assert_se(pa_asyncmsgq_send(s->asyncmsgq, PA_MSGOBJECT(s), PA_SOURCE_MESSAGE_GET_LATENCY_RANGE, r, 0, NULL) == 0);
2495 *min_latency = r[0];
2496 *max_latency = r[1];
2498 *min_latency = s->thread_info.min_latency;
2499 *max_latency = s->thread_info.max_latency;
2503 /* Called from IO thread, and from main thread before pa_source_put() is called */
2504 void pa_source_set_latency_range_within_thread(pa_source *s, pa_usec_t min_latency, pa_usec_t max_latency) {
2505 pa_source_assert_ref(s);
2506 pa_source_assert_io_context(s);
2508 pa_assert(min_latency >= ABSOLUTE_MIN_LATENCY);
2509 pa_assert(max_latency <= ABSOLUTE_MAX_LATENCY);
2510 pa_assert(min_latency <= max_latency);
2512 /* Hmm, let's see if someone forgot to set PA_SOURCE_DYNAMIC_LATENCY here... */
2513 pa_assert((min_latency == ABSOLUTE_MIN_LATENCY &&
2514 max_latency == ABSOLUTE_MAX_LATENCY) ||
2515 (s->flags & PA_SOURCE_DYNAMIC_LATENCY) ||
2518 if (s->thread_info.min_latency == min_latency &&
2519 s->thread_info.max_latency == max_latency)
2522 s->thread_info.min_latency = min_latency;
2523 s->thread_info.max_latency = max_latency;
2525 if (PA_SOURCE_IS_LINKED(s->thread_info.state)) {
2526 pa_source_output *o;
2529 PA_HASHMAP_FOREACH(o, s->thread_info.outputs, state)
2530 if (o->update_source_latency_range)
2531 o->update_source_latency_range(o);
2534 pa_source_invalidate_requested_latency(s, false);
2537 /* Called from main thread, before the source is put */
2538 void pa_source_set_fixed_latency(pa_source *s, pa_usec_t latency) {
2539 pa_source_assert_ref(s);
2540 pa_assert_ctl_context();
2542 if (s->flags & PA_SOURCE_DYNAMIC_LATENCY) {
2543 pa_assert(latency == 0);
2547 if (latency < ABSOLUTE_MIN_LATENCY)
2548 latency = ABSOLUTE_MIN_LATENCY;
2550 if (latency > ABSOLUTE_MAX_LATENCY)
2551 latency = ABSOLUTE_MAX_LATENCY;
2553 if (PA_SOURCE_IS_LINKED(s->state))
2554 pa_assert_se(pa_asyncmsgq_send(s->asyncmsgq, PA_MSGOBJECT(s), PA_SOURCE_MESSAGE_SET_FIXED_LATENCY, NULL, (int64_t) latency, NULL) == 0);
2556 s->thread_info.fixed_latency = latency;
2559 /* Called from main thread */
2560 pa_usec_t pa_source_get_fixed_latency(pa_source *s) {
2563 pa_source_assert_ref(s);
2564 pa_assert_ctl_context();
2566 if (s->flags & PA_SOURCE_DYNAMIC_LATENCY)
2569 if (PA_SOURCE_IS_LINKED(s->state))
2570 pa_assert_se(pa_asyncmsgq_send(s->asyncmsgq, PA_MSGOBJECT(s), PA_SOURCE_MESSAGE_GET_FIXED_LATENCY, &latency, 0, NULL) == 0);
2572 latency = s->thread_info.fixed_latency;
2577 /* Called from IO thread */
2578 void pa_source_set_fixed_latency_within_thread(pa_source *s, pa_usec_t latency) {
2579 pa_source_assert_ref(s);
2580 pa_source_assert_io_context(s);
2582 if (s->flags & PA_SOURCE_DYNAMIC_LATENCY) {
2583 pa_assert(latency == 0);
2584 s->thread_info.fixed_latency = 0;
2589 pa_assert(latency >= ABSOLUTE_MIN_LATENCY);
2590 pa_assert(latency <= ABSOLUTE_MAX_LATENCY);
2592 if (s->thread_info.fixed_latency == latency)
2595 s->thread_info.fixed_latency = latency;
2597 if (PA_SOURCE_IS_LINKED(s->thread_info.state)) {
2598 pa_source_output *o;
2601 PA_HASHMAP_FOREACH(o, s->thread_info.outputs, state)
2602 if (o->update_source_fixed_latency)
2603 o->update_source_fixed_latency(o);
2606 pa_source_invalidate_requested_latency(s, false);
2609 /* Called from main thread */
2610 void pa_source_set_port_latency_offset(pa_source *s, int64_t offset) {
2611 pa_source_assert_ref(s);
2613 s->port_latency_offset = offset;
2615 if (PA_SOURCE_IS_LINKED(s->state))
2616 pa_assert_se(pa_asyncmsgq_send(s->asyncmsgq, PA_MSGOBJECT(s), PA_SOURCE_MESSAGE_SET_PORT_LATENCY_OFFSET, NULL, offset, NULL) == 0);
2618 s->thread_info.port_latency_offset = offset;
2620 pa_hook_fire(&s->core->hooks[PA_CORE_HOOK_SOURCE_PORT_LATENCY_OFFSET_CHANGED], s);
2623 /* Called from main thread */
2624 size_t pa_source_get_max_rewind(pa_source *s) {
2626 pa_assert_ctl_context();
2627 pa_source_assert_ref(s);
2629 if (!PA_SOURCE_IS_LINKED(s->state))
2630 return s->thread_info.max_rewind;
2632 pa_assert_se(pa_asyncmsgq_send(s->asyncmsgq, PA_MSGOBJECT(s), PA_SOURCE_MESSAGE_GET_MAX_REWIND, &r, 0, NULL) == 0);
2637 /* Called from main context */
2638 int pa_source_set_port(pa_source *s, const char *name, bool save) {
2639 pa_device_port *port;
2642 pa_source_assert_ref(s);
2643 pa_assert_ctl_context();
2646 pa_log_debug("set_port() operation not implemented for source %u \"%s\"", s->index, s->name);
2647 return -PA_ERR_NOTIMPLEMENTED;
2651 return -PA_ERR_NOENTITY;
2653 if (!(port = pa_hashmap_get(s->ports, name)))
2654 return -PA_ERR_NOENTITY;
2656 if (s->active_port == port) {
2657 s->save_port = s->save_port || save;
2661 if (s->flags & PA_SOURCE_DEFERRED_VOLUME) {
2662 struct source_message_set_port msg = { .port = port, .ret = 0 };
2663 pa_assert_se(pa_asyncmsgq_send(s->asyncmsgq, PA_MSGOBJECT(s), PA_SOURCE_MESSAGE_SET_PORT, &msg, 0, NULL) == 0);
2667 ret = s->set_port(s, port);
2670 return -PA_ERR_NOENTITY;
2672 pa_subscription_post(s->core, PA_SUBSCRIPTION_EVENT_SOURCE|PA_SUBSCRIPTION_EVENT_CHANGE, s->index);
2674 pa_log_info("Changed port of source %u \"%s\" to %s", s->index, s->name, port->name);
2676 s->active_port = port;
2677 s->save_port = save;
2679 /* The active port affects the default source selection. */
2680 pa_core_update_default_source(s->core);
2682 pa_source_set_port_latency_offset(s, s->active_port->latency_offset);
2684 pa_hook_fire(&s->core->hooks[PA_CORE_HOOK_SOURCE_PORT_CHANGED], s);
2689 PA_STATIC_FLIST_DECLARE(pa_source_volume_change, 0, pa_xfree);
2691 /* Called from the IO thread. */
2692 static pa_source_volume_change *pa_source_volume_change_new(pa_source *s) {
2693 pa_source_volume_change *c;
2694 if (!(c = pa_flist_pop(PA_STATIC_FLIST_GET(pa_source_volume_change))))
2695 c = pa_xnew(pa_source_volume_change, 1);
2697 PA_LLIST_INIT(pa_source_volume_change, c);
2699 pa_cvolume_reset(&c->hw_volume, s->sample_spec.channels);
2703 /* Called from the IO thread. */
2704 static void pa_source_volume_change_free(pa_source_volume_change *c) {
2706 if (pa_flist_push(PA_STATIC_FLIST_GET(pa_source_volume_change), c) < 0)
2710 /* Called from the IO thread. */
2711 void pa_source_volume_change_push(pa_source *s) {
2712 pa_source_volume_change *c = NULL;
2713 pa_source_volume_change *nc = NULL;
2714 pa_source_volume_change *pc = NULL;
2715 uint32_t safety_margin = s->thread_info.volume_change_safety_margin;
2717 const char *direction = NULL;
2720 nc = pa_source_volume_change_new(s);
2722 /* NOTE: There is already more different volumes in pa_source that I can remember.
2723 * Adding one more volume for HW would get us rid of this, but I am trying
2724 * to survive with the ones we already have. */
2725 pa_sw_cvolume_divide(&nc->hw_volume, &s->real_volume, &s->soft_volume);
2727 if (!s->thread_info.volume_changes && pa_cvolume_equal(&nc->hw_volume, &s->thread_info.current_hw_volume)) {
2728 pa_log_debug("Volume not changing");
2729 pa_source_volume_change_free(nc);
2733 nc->at = pa_source_get_latency_within_thread(s, false);
2734 nc->at += pa_rtclock_now() + s->thread_info.volume_change_extra_delay;
2736 if (s->thread_info.volume_changes_tail) {
2737 for (c = s->thread_info.volume_changes_tail; c; c = c->prev) {
2738 /* If volume is going up let's do it a bit late. If it is going
2739 * down let's do it a bit early. */
2740 if (pa_cvolume_avg(&nc->hw_volume) > pa_cvolume_avg(&c->hw_volume)) {
2741 if (nc->at + safety_margin > c->at) {
2742 nc->at += safety_margin;
2747 else if (nc->at - safety_margin > c->at) {
2748 nc->at -= safety_margin;
2756 if (pa_cvolume_avg(&nc->hw_volume) > pa_cvolume_avg(&s->thread_info.current_hw_volume)) {
2757 nc->at += safety_margin;
2760 nc->at -= safety_margin;
2763 PA_LLIST_PREPEND(pa_source_volume_change, s->thread_info.volume_changes, nc);
2766 PA_LLIST_INSERT_AFTER(pa_source_volume_change, s->thread_info.volume_changes, c, nc);
2769 pa_log_debug("Volume going %s to %d at %llu", direction, pa_cvolume_avg(&nc->hw_volume), (long long unsigned) nc->at);
2771 /* We can ignore volume events that came earlier but should happen later than this. */
2772 PA_LLIST_FOREACH_SAFE(c, pc, nc->next) {
2773 pa_log_debug("Volume change to %d at %llu was dropped", pa_cvolume_avg(&c->hw_volume), (long long unsigned) c->at);
2774 pa_source_volume_change_free(c);
2777 s->thread_info.volume_changes_tail = nc;
2780 /* Called from the IO thread. */
2781 static void pa_source_volume_change_flush(pa_source *s) {
2782 pa_source_volume_change *c = s->thread_info.volume_changes;
2784 s->thread_info.volume_changes = NULL;
2785 s->thread_info.volume_changes_tail = NULL;
2787 pa_source_volume_change *next = c->next;
2788 pa_source_volume_change_free(c);
2793 /* Called from the IO thread. */
2794 bool pa_source_volume_change_apply(pa_source *s, pa_usec_t *usec_to_next) {
2800 if (!s->thread_info.volume_changes || !PA_SOURCE_IS_LINKED(s->state)) {
2806 pa_assert(s->write_volume);
2808 now = pa_rtclock_now();
2810 while (s->thread_info.volume_changes && now >= s->thread_info.volume_changes->at) {
2811 pa_source_volume_change *c = s->thread_info.volume_changes;
2812 PA_LLIST_REMOVE(pa_source_volume_change, s->thread_info.volume_changes, c);
2813 pa_log_debug("Volume change to %d at %llu was written %llu usec late",
2814 pa_cvolume_avg(&c->hw_volume), (long long unsigned) c->at, (long long unsigned) (now - c->at));
2816 s->thread_info.current_hw_volume = c->hw_volume;
2817 pa_source_volume_change_free(c);
2823 if (s->thread_info.volume_changes) {
2825 *usec_to_next = s->thread_info.volume_changes->at - now;
2826 if (pa_log_ratelimit(PA_LOG_DEBUG))
2827 pa_log_debug("Next volume change in %lld usec", (long long) (s->thread_info.volume_changes->at - now));
2832 s->thread_info.volume_changes_tail = NULL;
2837 /* Called from the main thread */
2838 /* Gets the list of formats supported by the source. The members and idxset must
2839 * be freed by the caller. */
2840 pa_idxset* pa_source_get_formats(pa_source *s) {
2845 if (s->get_formats) {
2846 /* Source supports format query, all is good */
2847 ret = s->get_formats(s);
2849 /* Source doesn't support format query, so assume it does PCM */
2850 pa_format_info *f = pa_format_info_new();
2851 f->encoding = PA_ENCODING_PCM;
2853 ret = pa_idxset_new(NULL, NULL);
2854 pa_idxset_put(ret, f, NULL);
2860 /* Called from the main thread */
2861 /* Checks if the source can accept this format */
2862 bool pa_source_check_format(pa_source *s, pa_format_info *f) {
2863 pa_idxset *formats = NULL;
2869 formats = pa_source_get_formats(s);
2872 pa_format_info *finfo_device;
2875 PA_IDXSET_FOREACH(finfo_device, formats, i) {
2876 if (pa_format_info_is_compatible(finfo_device, f)) {
2882 pa_idxset_free(formats, (pa_free_cb_t) pa_format_info_free);
2888 /* Called from the main thread */
2889 /* Calculates the intersection between formats supported by the source and
2890 * in_formats, and returns these, in the order of the source's formats. */
2891 pa_idxset* pa_source_check_formats(pa_source *s, pa_idxset *in_formats) {
2892 pa_idxset *out_formats = pa_idxset_new(NULL, NULL), *source_formats = NULL;
2893 pa_format_info *f_source, *f_in;
2898 if (!in_formats || pa_idxset_isempty(in_formats))
2901 source_formats = pa_source_get_formats(s);
2903 PA_IDXSET_FOREACH(f_source, source_formats, i) {
2904 PA_IDXSET_FOREACH(f_in, in_formats, j) {
2905 if (pa_format_info_is_compatible(f_source, f_in))
2906 pa_idxset_put(out_formats, pa_format_info_copy(f_in), NULL);
2912 pa_idxset_free(source_formats, (pa_free_cb_t) pa_format_info_free);
2917 /* Called from the main thread. */
2918 void pa_source_set_reference_volume_direct(pa_source *s, const pa_cvolume *volume) {
2919 pa_cvolume old_volume;
2920 char old_volume_str[PA_CVOLUME_SNPRINT_VERBOSE_MAX];
2921 char new_volume_str[PA_CVOLUME_SNPRINT_VERBOSE_MAX];
2926 old_volume = s->reference_volume;
2928 if (pa_cvolume_equal(volume, &old_volume))
2931 s->reference_volume = *volume;
2932 pa_log_debug("The reference volume of source %s changed from %s to %s.", s->name,
2933 pa_cvolume_snprint_verbose(old_volume_str, sizeof(old_volume_str), &old_volume, &s->channel_map,
2934 s->flags & PA_SOURCE_DECIBEL_VOLUME),
2935 pa_cvolume_snprint_verbose(new_volume_str, sizeof(new_volume_str), volume, &s->channel_map,
2936 s->flags & PA_SOURCE_DECIBEL_VOLUME));
2938 pa_subscription_post(s->core, PA_SUBSCRIPTION_EVENT_SOURCE|PA_SUBSCRIPTION_EVENT_CHANGE, s->index);
2939 pa_hook_fire(&s->core->hooks[PA_CORE_HOOK_SOURCE_VOLUME_CHANGED], s);