2 This file is part of PulseAudio.
4 Copyright 2004-2006 Lennart Poettering
5 Copyright 2006 Pierre Ossman <ossman@cendio.se> for Cendio AB
7 PulseAudio is free software; you can redistribute it and/or modify
8 it under the terms of the GNU Lesser General Public License as published
9 by the Free Software Foundation; either version 2.1 of the License,
10 or (at your option) any later version.
12 PulseAudio is distributed in the hope that it will be useful, but
13 WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15 General Public License for more details.
17 You should have received a copy of the GNU Lesser General Public License
18 along with PulseAudio; if not, see <http://www.gnu.org/licenses/>.
28 #include <pulse/format.h>
29 #include <pulse/utf8.h>
30 #include <pulse/xmalloc.h>
31 #include <pulse/timeval.h>
32 #include <pulse/util.h>
33 #include <pulse/rtclock.h>
34 #include <pulse/internal.h>
36 #include <pulsecore/core-util.h>
37 #include <pulsecore/source-output.h>
38 #include <pulsecore/namereg.h>
39 #include <pulsecore/core-subscribe.h>
40 #include <pulsecore/log.h>
41 #include <pulsecore/mix.h>
42 #include <pulsecore/flist.h>
46 #define ABSOLUTE_MIN_LATENCY (500)
47 #define ABSOLUTE_MAX_LATENCY (10*PA_USEC_PER_SEC)
48 #define DEFAULT_FIXED_LATENCY (250*PA_USEC_PER_MSEC)
50 PA_DEFINE_PUBLIC_CLASS(pa_source, pa_msgobject);
52 struct pa_source_volume_change {
56 PA_LLIST_FIELDS(pa_source_volume_change);
59 struct source_message_set_port {
64 static void source_free(pa_object *o);
66 static void pa_source_volume_change_push(pa_source *s);
67 static void pa_source_volume_change_flush(pa_source *s);
69 pa_source_new_data* pa_source_new_data_init(pa_source_new_data *data) {
73 data->proplist = pa_proplist_new();
74 data->ports = pa_hashmap_new_full(pa_idxset_string_hash_func, pa_idxset_string_compare_func, NULL, (pa_free_cb_t) pa_device_port_unref);
79 void pa_source_new_data_set_name(pa_source_new_data *data, const char *name) {
83 data->name = pa_xstrdup(name);
86 void pa_source_new_data_set_sample_spec(pa_source_new_data *data, const pa_sample_spec *spec) {
89 if ((data->sample_spec_is_set = !!spec))
90 data->sample_spec = *spec;
93 void pa_source_new_data_set_channel_map(pa_source_new_data *data, const pa_channel_map *map) {
96 if ((data->channel_map_is_set = !!map))
97 data->channel_map = *map;
100 void pa_source_new_data_set_alternate_sample_rate(pa_source_new_data *data, const uint32_t alternate_sample_rate) {
103 data->alternate_sample_rate_is_set = true;
104 data->alternate_sample_rate = alternate_sample_rate;
107 void pa_source_new_data_set_volume(pa_source_new_data *data, const pa_cvolume *volume) {
110 if ((data->volume_is_set = !!volume))
111 data->volume = *volume;
114 void pa_source_new_data_set_muted(pa_source_new_data *data, bool mute) {
117 data->muted_is_set = true;
121 void pa_source_new_data_set_port(pa_source_new_data *data, const char *port) {
124 pa_xfree(data->active_port);
125 data->active_port = pa_xstrdup(port);
128 void pa_source_new_data_done(pa_source_new_data *data) {
131 pa_proplist_free(data->proplist);
134 pa_hashmap_free(data->ports);
136 pa_xfree(data->name);
137 pa_xfree(data->active_port);
140 /* Called from main context */
141 static void reset_callbacks(pa_source *s) {
145 s->get_volume = NULL;
146 s->set_volume = NULL;
147 s->write_volume = NULL;
150 s->update_requested_latency = NULL;
152 s->get_formats = NULL;
153 s->update_rate = NULL;
156 /* Called from main context */
157 pa_source* pa_source_new(
159 pa_source_new_data *data,
160 pa_source_flags_t flags) {
164 char st[PA_SAMPLE_SPEC_SNPRINT_MAX], cm[PA_CHANNEL_MAP_SNPRINT_MAX];
169 pa_assert(data->name);
170 pa_assert_ctl_context();
172 s = pa_msgobject_new(pa_source);
174 if (!(name = pa_namereg_register(core, data->name, PA_NAMEREG_SOURCE, s, data->namereg_fail))) {
175 pa_log_debug("Failed to register name %s.", data->name);
180 pa_source_new_data_set_name(data, name);
182 if (pa_hook_fire(&core->hooks[PA_CORE_HOOK_SOURCE_NEW], data) < 0) {
184 pa_namereg_unregister(core, name);
188 /* FIXME, need to free s here on failure */
190 pa_return_null_if_fail(!data->driver || pa_utf8_valid(data->driver));
191 pa_return_null_if_fail(data->name && pa_utf8_valid(data->name) && data->name[0]);
193 pa_return_null_if_fail(data->sample_spec_is_set && pa_sample_spec_valid(&data->sample_spec));
195 if (!data->channel_map_is_set)
196 pa_return_null_if_fail(pa_channel_map_init_auto(&data->channel_map, data->sample_spec.channels, PA_CHANNEL_MAP_DEFAULT));
198 pa_return_null_if_fail(pa_channel_map_valid(&data->channel_map));
199 pa_return_null_if_fail(data->channel_map.channels == data->sample_spec.channels);
201 /* FIXME: There should probably be a general function for checking whether
202 * the source volume is allowed to be set, like there is for source outputs. */
203 pa_assert(!data->volume_is_set || !(flags & PA_SOURCE_SHARE_VOLUME_WITH_MASTER));
205 if (!data->volume_is_set) {
206 pa_cvolume_reset(&data->volume, data->sample_spec.channels);
207 data->save_volume = false;
210 pa_return_null_if_fail(pa_cvolume_valid(&data->volume));
211 pa_return_null_if_fail(pa_cvolume_compatible(&data->volume, &data->sample_spec));
213 if (!data->muted_is_set)
217 pa_proplist_update(data->proplist, PA_UPDATE_MERGE, data->card->proplist);
219 pa_device_init_description(data->proplist, data->card);
220 pa_device_init_icon(data->proplist, false);
221 pa_device_init_intended_roles(data->proplist);
223 if (!data->active_port) {
224 pa_device_port *p = pa_device_port_find_best(data->ports);
226 pa_source_new_data_set_port(data, p->name);
229 if (pa_hook_fire(&core->hooks[PA_CORE_HOOK_SOURCE_FIXATE], data) < 0) {
231 pa_namereg_unregister(core, name);
235 s->parent.parent.free = source_free;
236 s->parent.process_msg = pa_source_process_msg;
239 s->state = PA_SOURCE_INIT;
242 s->suspend_cause = data->suspend_cause;
243 pa_source_set_mixer_dirty(s, false);
244 s->name = pa_xstrdup(name);
245 s->proplist = pa_proplist_copy(data->proplist);
246 s->driver = pa_xstrdup(pa_path_get_filename(data->driver));
247 s->module = data->module;
248 s->card = data->card;
250 s->priority = pa_device_init_priority(s->proplist);
252 s->sample_spec = data->sample_spec;
253 s->channel_map = data->channel_map;
254 s->default_sample_rate = s->sample_spec.rate;
256 if (data->alternate_sample_rate_is_set)
257 s->alternate_sample_rate = data->alternate_sample_rate;
259 s->alternate_sample_rate = s->core->alternate_sample_rate;
261 if (s->sample_spec.rate == s->alternate_sample_rate) {
262 pa_log_warn("Default and alternate sample rates are the same.");
263 s->alternate_sample_rate = 0;
266 s->outputs = pa_idxset_new(NULL, NULL);
268 s->monitor_of = NULL;
269 s->output_from_master = NULL;
271 s->reference_volume = s->real_volume = data->volume;
272 pa_cvolume_reset(&s->soft_volume, s->sample_spec.channels);
273 s->base_volume = PA_VOLUME_NORM;
274 s->n_volume_steps = PA_VOLUME_NORM+1;
275 s->muted = data->muted;
276 s->refresh_volume = s->refresh_muted = false;
283 /* As a minor optimization we just steal the list instead of
285 s->ports = data->ports;
288 s->active_port = NULL;
289 s->save_port = false;
291 if (data->active_port)
292 if ((s->active_port = pa_hashmap_get(s->ports, data->active_port)))
293 s->save_port = data->save_port;
295 /* Hopefully the active port has already been assigned in the previous call
296 to pa_device_port_find_best, but better safe than sorry */
298 s->active_port = pa_device_port_find_best(s->ports);
301 s->port_latency_offset = s->active_port->latency_offset;
303 s->port_latency_offset = 0;
305 s->save_volume = data->save_volume;
306 s->save_muted = data->save_muted;
308 pa_silence_memchunk_get(
309 &core->silence_cache,
315 s->thread_info.rtpoll = NULL;
316 s->thread_info.outputs = pa_hashmap_new_full(pa_idxset_trivial_hash_func, pa_idxset_trivial_compare_func, NULL,
317 (pa_free_cb_t) pa_source_output_unref);
318 s->thread_info.soft_volume = s->soft_volume;
319 s->thread_info.soft_muted = s->muted;
320 s->thread_info.state = s->state;
321 s->thread_info.max_rewind = 0;
322 s->thread_info.requested_latency_valid = false;
323 s->thread_info.requested_latency = 0;
324 s->thread_info.min_latency = ABSOLUTE_MIN_LATENCY;
325 s->thread_info.max_latency = ABSOLUTE_MAX_LATENCY;
326 s->thread_info.fixed_latency = flags & PA_SOURCE_DYNAMIC_LATENCY ? 0 : DEFAULT_FIXED_LATENCY;
328 PA_LLIST_HEAD_INIT(pa_source_volume_change, s->thread_info.volume_changes);
329 s->thread_info.volume_changes_tail = NULL;
330 pa_sw_cvolume_divide(&s->thread_info.current_hw_volume, &s->real_volume, &s->soft_volume);
331 s->thread_info.volume_change_safety_margin = core->deferred_volume_safety_margin_usec;
332 s->thread_info.volume_change_extra_delay = core->deferred_volume_extra_delay_usec;
333 s->thread_info.port_latency_offset = s->port_latency_offset;
335 /* FIXME: This should probably be moved to pa_source_put() */
336 pa_assert_se(pa_idxset_put(core->sources, s, &s->index) >= 0);
339 pa_assert_se(pa_idxset_put(s->card->sources, s, NULL) >= 0);
341 pt = pa_proplist_to_string_sep(s->proplist, "\n ");
342 pa_log_info("Created source %u \"%s\" with sample spec %s and channel map %s\n %s",
345 pa_sample_spec_snprint(st, sizeof(st), &s->sample_spec),
346 pa_channel_map_snprint(cm, sizeof(cm), &s->channel_map),
353 /* Called from main context */
354 static int source_set_state(pa_source *s, pa_source_state_t state) {
357 pa_source_state_t original_state;
360 pa_assert_ctl_context();
362 if (s->state == state)
365 original_state = s->state;
368 (original_state == PA_SOURCE_SUSPENDED && PA_SOURCE_IS_OPENED(state)) ||
369 (PA_SOURCE_IS_OPENED(original_state) && state == PA_SOURCE_SUSPENDED);
372 if ((ret = s->set_state(s, state)) < 0)
376 if ((ret = pa_asyncmsgq_send(s->asyncmsgq, PA_MSGOBJECT(s), PA_SOURCE_MESSAGE_SET_STATE, PA_UINT_TO_PTR(state), 0, NULL)) < 0) {
379 s->set_state(s, original_state);
386 if (state != PA_SOURCE_UNLINKED) { /* if we enter UNLINKED state pa_source_unlink() will fire the appropriate events */
387 pa_hook_fire(&s->core->hooks[PA_CORE_HOOK_SOURCE_STATE_CHANGED], s);
388 pa_subscription_post(s->core, PA_SUBSCRIPTION_EVENT_SOURCE | PA_SUBSCRIPTION_EVENT_CHANGE, s->index);
391 if (suspend_change) {
395 /* We're suspending or resuming, tell everyone about it */
397 PA_IDXSET_FOREACH(o, s->outputs, idx)
398 if (s->state == PA_SOURCE_SUSPENDED &&
399 (o->flags & PA_SOURCE_OUTPUT_KILL_ON_SUSPEND))
400 pa_source_output_kill(o);
402 o->suspend(o, state == PA_SOURCE_SUSPENDED);
408 void pa_source_set_get_volume_callback(pa_source *s, pa_source_cb_t cb) {
414 void pa_source_set_set_volume_callback(pa_source *s, pa_source_cb_t cb) {
415 pa_source_flags_t flags;
418 pa_assert(!s->write_volume || cb);
422 /* Save the current flags so we can tell if they've changed */
426 /* The source implementor is responsible for setting decibel volume support */
427 s->flags |= PA_SOURCE_HW_VOLUME_CTRL;
429 s->flags &= ~PA_SOURCE_HW_VOLUME_CTRL;
430 /* See note below in pa_source_put() about volume sharing and decibel volumes */
431 pa_source_enable_decibel_volume(s, !(s->flags & PA_SOURCE_SHARE_VOLUME_WITH_MASTER));
434 /* If the flags have changed after init, let any clients know via a change event */
435 if (s->state != PA_SOURCE_INIT && flags != s->flags)
436 pa_subscription_post(s->core, PA_SUBSCRIPTION_EVENT_SOURCE|PA_SUBSCRIPTION_EVENT_CHANGE, s->index);
439 void pa_source_set_write_volume_callback(pa_source *s, pa_source_cb_t cb) {
440 pa_source_flags_t flags;
443 pa_assert(!cb || s->set_volume);
445 s->write_volume = cb;
447 /* Save the current flags so we can tell if they've changed */
451 s->flags |= PA_SOURCE_DEFERRED_VOLUME;
453 s->flags &= ~PA_SOURCE_DEFERRED_VOLUME;
455 /* If the flags have changed after init, let any clients know via a change event */
456 if (s->state != PA_SOURCE_INIT && flags != s->flags)
457 pa_subscription_post(s->core, PA_SUBSCRIPTION_EVENT_SOURCE|PA_SUBSCRIPTION_EVENT_CHANGE, s->index);
460 void pa_source_set_get_mute_callback(pa_source *s, pa_source_get_mute_cb_t cb) {
466 void pa_source_set_set_mute_callback(pa_source *s, pa_source_cb_t cb) {
467 pa_source_flags_t flags;
473 /* Save the current flags so we can tell if they've changed */
477 s->flags |= PA_SOURCE_HW_MUTE_CTRL;
479 s->flags &= ~PA_SOURCE_HW_MUTE_CTRL;
481 /* If the flags have changed after init, let any clients know via a change event */
482 if (s->state != PA_SOURCE_INIT && flags != s->flags)
483 pa_subscription_post(s->core, PA_SUBSCRIPTION_EVENT_SOURCE|PA_SUBSCRIPTION_EVENT_CHANGE, s->index);
486 static void enable_flat_volume(pa_source *s, bool enable) {
487 pa_source_flags_t flags;
491 /* Always follow the overall user preference here */
492 enable = enable && s->core->flat_volumes;
494 /* Save the current flags so we can tell if they've changed */
498 s->flags |= PA_SOURCE_FLAT_VOLUME;
500 s->flags &= ~PA_SOURCE_FLAT_VOLUME;
502 /* If the flags have changed after init, let any clients know via a change event */
503 if (s->state != PA_SOURCE_INIT && flags != s->flags)
504 pa_subscription_post(s->core, PA_SUBSCRIPTION_EVENT_SOURCE|PA_SUBSCRIPTION_EVENT_CHANGE, s->index);
507 void pa_source_enable_decibel_volume(pa_source *s, bool enable) {
508 pa_source_flags_t flags;
512 /* Save the current flags so we can tell if they've changed */
516 s->flags |= PA_SOURCE_DECIBEL_VOLUME;
517 enable_flat_volume(s, true);
519 s->flags &= ~PA_SOURCE_DECIBEL_VOLUME;
520 enable_flat_volume(s, false);
523 /* If the flags have changed after init, let any clients know via a change event */
524 if (s->state != PA_SOURCE_INIT && flags != s->flags)
525 pa_subscription_post(s->core, PA_SUBSCRIPTION_EVENT_SOURCE|PA_SUBSCRIPTION_EVENT_CHANGE, s->index);
528 /* Called from main context */
529 void pa_source_put(pa_source *s) {
530 pa_source_assert_ref(s);
531 pa_assert_ctl_context();
533 pa_assert(s->state == PA_SOURCE_INIT);
534 pa_assert(!(s->flags & PA_SOURCE_SHARE_VOLUME_WITH_MASTER) || pa_source_is_filter(s));
536 /* The following fields must be initialized properly when calling _put() */
537 pa_assert(s->asyncmsgq);
538 pa_assert(s->thread_info.min_latency <= s->thread_info.max_latency);
540 /* Generally, flags should be initialized via pa_source_new(). As a
541 * special exception we allow some volume related flags to be set
542 * between _new() and _put() by the callback setter functions above.
544 * Thus we implement a couple safeguards here which ensure the above
545 * setters were used (or at least the implementor made manual changes
546 * in a compatible way).
548 * Note: All of these flags set here can change over the life time
550 pa_assert(!(s->flags & PA_SOURCE_HW_VOLUME_CTRL) || s->set_volume);
551 pa_assert(!(s->flags & PA_SOURCE_DEFERRED_VOLUME) || s->write_volume);
552 pa_assert(!(s->flags & PA_SOURCE_HW_MUTE_CTRL) || s->set_mute);
554 /* XXX: Currently decibel volume is disabled for all sources that use volume
555 * sharing. When the master source supports decibel volume, it would be good
556 * to have the flag also in the filter source, but currently we don't do that
557 * so that the flags of the filter source never change when it's moved from
558 * a master source to another. One solution for this problem would be to
559 * remove user-visible volume altogether from filter sources when volume
560 * sharing is used, but the current approach was easier to implement... */
561 /* We always support decibel volumes in software, otherwise we leave it to
562 * the source implementor to set this flag as needed.
564 * Note: This flag can also change over the life time of the source. */
565 if (!(s->flags & PA_SOURCE_HW_VOLUME_CTRL) && !(s->flags & PA_SOURCE_SHARE_VOLUME_WITH_MASTER)) {
566 pa_source_enable_decibel_volume(s, true);
567 s->soft_volume = s->reference_volume;
570 /* If the source implementor support DB volumes by itself, we should always
571 * try and enable flat volumes too */
572 if ((s->flags & PA_SOURCE_DECIBEL_VOLUME))
573 enable_flat_volume(s, true);
575 if (s->flags & PA_SOURCE_SHARE_VOLUME_WITH_MASTER) {
576 pa_source *root_source = pa_source_get_master(s);
578 pa_assert(PA_LIKELY(root_source));
580 s->reference_volume = root_source->reference_volume;
581 pa_cvolume_remap(&s->reference_volume, &root_source->channel_map, &s->channel_map);
583 s->real_volume = root_source->real_volume;
584 pa_cvolume_remap(&s->real_volume, &root_source->channel_map, &s->channel_map);
586 /* We assume that if the sink implementor changed the default
587 * volume he did so in real_volume, because that is the usual
588 * place where he is supposed to place his changes. */
589 s->reference_volume = s->real_volume;
591 s->thread_info.soft_volume = s->soft_volume;
592 s->thread_info.soft_muted = s->muted;
593 pa_sw_cvolume_divide(&s->thread_info.current_hw_volume, &s->real_volume, &s->soft_volume);
595 pa_assert((s->flags & PA_SOURCE_HW_VOLUME_CTRL)
596 || (s->base_volume == PA_VOLUME_NORM
597 && ((s->flags & PA_SOURCE_DECIBEL_VOLUME || (s->flags & PA_SOURCE_SHARE_VOLUME_WITH_MASTER)))));
598 pa_assert(!(s->flags & PA_SOURCE_DECIBEL_VOLUME) || s->n_volume_steps == PA_VOLUME_NORM+1);
599 pa_assert(!(s->flags & PA_SOURCE_DYNAMIC_LATENCY) == !(s->thread_info.fixed_latency == 0));
601 if (s->suspend_cause)
602 pa_assert_se(source_set_state(s, PA_SOURCE_SUSPENDED) == 0);
604 pa_assert_se(source_set_state(s, PA_SOURCE_IDLE) == 0);
606 pa_core_update_default_source(s->core);
608 pa_subscription_post(s->core, PA_SUBSCRIPTION_EVENT_SOURCE | PA_SUBSCRIPTION_EVENT_NEW, s->index);
609 pa_hook_fire(&s->core->hooks[PA_CORE_HOOK_SOURCE_PUT], s);
612 /* Called from main context */
613 void pa_source_unlink(pa_source *s) {
615 pa_source_output *o, PA_UNUSED *j = NULL;
617 pa_source_assert_ref(s);
618 pa_assert_ctl_context();
620 /* See pa_sink_unlink() for a couple of comments how this function
623 if (s->unlink_requested)
626 s->unlink_requested = true;
628 linked = PA_SOURCE_IS_LINKED(s->state);
631 pa_hook_fire(&s->core->hooks[PA_CORE_HOOK_SOURCE_UNLINK], s);
633 if (s->state != PA_SOURCE_UNLINKED)
634 pa_namereg_unregister(s->core, s->name);
635 pa_idxset_remove_by_data(s->core->sources, s, NULL);
637 if (s == s->core->configured_default_source)
638 pa_core_set_configured_default_source(s->core, NULL);
640 pa_core_update_default_source(s->core);
643 pa_idxset_remove_by_data(s->card->sources, s, NULL);
645 while ((o = pa_idxset_first(s->outputs, NULL))) {
647 pa_source_output_kill(o);
652 source_set_state(s, PA_SOURCE_UNLINKED);
654 s->state = PA_SOURCE_UNLINKED;
659 pa_subscription_post(s->core, PA_SUBSCRIPTION_EVENT_SOURCE | PA_SUBSCRIPTION_EVENT_REMOVE, s->index);
660 pa_hook_fire(&s->core->hooks[PA_CORE_HOOK_SOURCE_UNLINK_POST], s);
664 /* Called from main context */
665 static void source_free(pa_object *o) {
666 pa_source *s = PA_SOURCE(o);
669 pa_assert_ctl_context();
670 pa_assert(pa_source_refcnt(s) == 0);
671 pa_assert(!PA_SOURCE_IS_LINKED(s->state));
673 pa_log_info("Freeing source %u \"%s\"", s->index, s->name);
675 pa_source_volume_change_flush(s);
677 pa_idxset_free(s->outputs, NULL);
678 pa_hashmap_free(s->thread_info.outputs);
680 if (s->silence.memblock)
681 pa_memblock_unref(s->silence.memblock);
687 pa_proplist_free(s->proplist);
690 pa_hashmap_free(s->ports);
695 /* Called from main context, and not while the IO thread is active, please */
696 void pa_source_set_asyncmsgq(pa_source *s, pa_asyncmsgq *q) {
697 pa_source_assert_ref(s);
698 pa_assert_ctl_context();
703 /* Called from main context, and not while the IO thread is active, please */
704 void pa_source_update_flags(pa_source *s, pa_source_flags_t mask, pa_source_flags_t value) {
705 pa_source_flags_t old_flags;
706 pa_source_output *output;
709 pa_source_assert_ref(s);
710 pa_assert_ctl_context();
712 /* For now, allow only a minimal set of flags to be changed. */
713 pa_assert((mask & ~(PA_SOURCE_DYNAMIC_LATENCY|PA_SOURCE_LATENCY)) == 0);
715 old_flags = s->flags;
716 s->flags = (s->flags & ~mask) | (value & mask);
718 if (s->flags == old_flags)
721 if ((s->flags & PA_SOURCE_LATENCY) != (old_flags & PA_SOURCE_LATENCY))
722 pa_log_debug("Source %s: LATENCY flag %s.", s->name, (s->flags & PA_SOURCE_LATENCY) ? "enabled" : "disabled");
724 if ((s->flags & PA_SOURCE_DYNAMIC_LATENCY) != (old_flags & PA_SOURCE_DYNAMIC_LATENCY))
725 pa_log_debug("Source %s: DYNAMIC_LATENCY flag %s.",
726 s->name, (s->flags & PA_SOURCE_DYNAMIC_LATENCY) ? "enabled" : "disabled");
728 pa_subscription_post(s->core, PA_SUBSCRIPTION_EVENT_SOURCE | PA_SUBSCRIPTION_EVENT_CHANGE, s->index);
729 pa_hook_fire(&s->core->hooks[PA_CORE_HOOK_SOURCE_FLAGS_CHANGED], s);
731 PA_IDXSET_FOREACH(output, s->outputs, idx) {
732 if (output->destination_source)
733 pa_source_update_flags(output->destination_source, mask, value);
737 /* Called from IO context, or before _put() from main context */
738 void pa_source_set_rtpoll(pa_source *s, pa_rtpoll *p) {
739 pa_source_assert_ref(s);
740 pa_source_assert_io_context(s);
742 s->thread_info.rtpoll = p;
745 /* Called from main context */
746 int pa_source_update_status(pa_source*s) {
747 pa_source_assert_ref(s);
748 pa_assert_ctl_context();
749 pa_assert(PA_SOURCE_IS_LINKED(s->state));
751 if (s->state == PA_SOURCE_SUSPENDED)
754 return source_set_state(s, pa_source_used_by(s) ? PA_SOURCE_RUNNING : PA_SOURCE_IDLE);
757 /* Called from any context - must be threadsafe */
758 void pa_source_set_mixer_dirty(pa_source *s, bool is_dirty) {
759 pa_atomic_store(&s->mixer_dirty, is_dirty ? 1 : 0);
762 /* Called from main context */
763 int pa_source_suspend(pa_source *s, bool suspend, pa_suspend_cause_t cause) {
764 pa_source_assert_ref(s);
765 pa_assert_ctl_context();
766 pa_assert(PA_SOURCE_IS_LINKED(s->state));
767 pa_assert(cause != 0);
769 if (s->monitor_of && cause != PA_SUSPEND_PASSTHROUGH)
770 return -PA_ERR_NOTSUPPORTED;
773 s->suspend_cause |= cause;
775 s->suspend_cause &= ~cause;
777 if (!(s->suspend_cause & PA_SUSPEND_SESSION) && (pa_atomic_load(&s->mixer_dirty) != 0)) {
778 /* This might look racy but isn't: If somebody sets mixer_dirty exactly here,
779 it'll be handled just fine. */
780 pa_source_set_mixer_dirty(s, false);
781 pa_log_debug("Mixer is now accessible. Updating alsa mixer settings.");
782 if (s->active_port && s->set_port) {
783 if (s->flags & PA_SOURCE_DEFERRED_VOLUME) {
784 struct source_message_set_port msg = { .port = s->active_port, .ret = 0 };
785 pa_assert_se(pa_asyncmsgq_send(s->asyncmsgq, PA_MSGOBJECT(s), PA_SOURCE_MESSAGE_SET_PORT, &msg, 0, NULL) == 0);
788 s->set_port(s, s->active_port);
798 if ((pa_source_get_state(s) == PA_SOURCE_SUSPENDED) == !!s->suspend_cause)
801 pa_log_debug("Suspend cause of source %s is 0x%04x, %s", s->name, s->suspend_cause, s->suspend_cause ? "suspending" : "resuming");
803 if (s->suspend_cause)
804 return source_set_state(s, PA_SOURCE_SUSPENDED);
806 return source_set_state(s, pa_source_used_by(s) ? PA_SOURCE_RUNNING : PA_SOURCE_IDLE);
809 /* Called from main context */
810 int pa_source_sync_suspend(pa_source *s) {
811 pa_sink_state_t state;
813 pa_source_assert_ref(s);
814 pa_assert_ctl_context();
815 pa_assert(PA_SOURCE_IS_LINKED(s->state));
816 pa_assert(s->monitor_of);
818 state = pa_sink_get_state(s->monitor_of);
820 if (state == PA_SINK_SUSPENDED)
821 return source_set_state(s, PA_SOURCE_SUSPENDED);
823 pa_assert(PA_SINK_IS_OPENED(state));
825 return source_set_state(s, pa_source_used_by(s) ? PA_SOURCE_RUNNING : PA_SOURCE_IDLE);
828 /* Called from main context */
829 pa_queue *pa_source_move_all_start(pa_source *s, pa_queue *q) {
830 pa_source_output *o, *n;
833 pa_source_assert_ref(s);
834 pa_assert_ctl_context();
835 pa_assert(PA_SOURCE_IS_LINKED(s->state));
840 for (o = PA_SOURCE_OUTPUT(pa_idxset_first(s->outputs, &idx)); o; o = n) {
841 n = PA_SOURCE_OUTPUT(pa_idxset_next(s->outputs, &idx));
843 pa_source_output_ref(o);
845 if (pa_source_output_start_move(o) >= 0)
848 pa_source_output_unref(o);
854 /* Called from main context */
855 void pa_source_move_all_finish(pa_source *s, pa_queue *q, bool save) {
858 pa_source_assert_ref(s);
859 pa_assert_ctl_context();
860 pa_assert(PA_SOURCE_IS_LINKED(s->state));
863 while ((o = PA_SOURCE_OUTPUT(pa_queue_pop(q)))) {
864 if (pa_source_output_finish_move(o, s, save) < 0)
865 pa_source_output_fail_move(o);
867 pa_source_output_unref(o);
870 pa_queue_free(q, NULL);
873 /* Called from main context */
874 void pa_source_move_all_fail(pa_queue *q) {
877 pa_assert_ctl_context();
880 while ((o = PA_SOURCE_OUTPUT(pa_queue_pop(q)))) {
881 pa_source_output_fail_move(o);
882 pa_source_output_unref(o);
885 pa_queue_free(q, NULL);
888 /* Called from IO thread context */
889 void pa_source_process_rewind(pa_source *s, size_t nbytes) {
893 pa_source_assert_ref(s);
894 pa_source_assert_io_context(s);
895 pa_assert(PA_SOURCE_IS_LINKED(s->thread_info.state));
900 if (s->thread_info.state == PA_SOURCE_SUSPENDED)
903 pa_log_debug("Processing rewind...");
905 PA_HASHMAP_FOREACH(o, s->thread_info.outputs, state) {
906 pa_source_output_assert_ref(o);
907 pa_source_output_process_rewind(o, nbytes);
911 /* Called from IO thread context */
912 void pa_source_post(pa_source*s, const pa_memchunk *chunk) {
916 pa_source_assert_ref(s);
917 pa_source_assert_io_context(s);
918 pa_assert(PA_SOURCE_IS_LINKED(s->thread_info.state));
921 if (s->thread_info.state == PA_SOURCE_SUSPENDED)
924 if (s->thread_info.soft_muted || !pa_cvolume_is_norm(&s->thread_info.soft_volume)) {
925 pa_memchunk vchunk = *chunk;
927 pa_memblock_ref(vchunk.memblock);
928 pa_memchunk_make_writable(&vchunk, 0);
930 if (s->thread_info.soft_muted || pa_cvolume_is_muted(&s->thread_info.soft_volume))
931 pa_silence_memchunk(&vchunk, &s->sample_spec);
933 pa_volume_memchunk(&vchunk, &s->sample_spec, &s->thread_info.soft_volume);
935 while ((o = pa_hashmap_iterate(s->thread_info.outputs, &state, NULL))) {
936 pa_source_output_assert_ref(o);
938 if (!o->thread_info.direct_on_input)
939 pa_source_output_push(o, &vchunk);
942 pa_memblock_unref(vchunk.memblock);
945 while ((o = pa_hashmap_iterate(s->thread_info.outputs, &state, NULL))) {
946 pa_source_output_assert_ref(o);
948 if (!o->thread_info.direct_on_input)
949 pa_source_output_push(o, chunk);
954 /* Called from IO thread context */
955 void pa_source_post_direct(pa_source*s, pa_source_output *o, const pa_memchunk *chunk) {
956 pa_source_assert_ref(s);
957 pa_source_assert_io_context(s);
958 pa_assert(PA_SOURCE_IS_LINKED(s->thread_info.state));
959 pa_source_output_assert_ref(o);
960 pa_assert(o->thread_info.direct_on_input);
963 if (s->thread_info.state == PA_SOURCE_SUSPENDED)
966 if (s->thread_info.soft_muted || !pa_cvolume_is_norm(&s->thread_info.soft_volume)) {
967 pa_memchunk vchunk = *chunk;
969 pa_memblock_ref(vchunk.memblock);
970 pa_memchunk_make_writable(&vchunk, 0);
972 if (s->thread_info.soft_muted || pa_cvolume_is_muted(&s->thread_info.soft_volume))
973 pa_silence_memchunk(&vchunk, &s->sample_spec);
975 pa_volume_memchunk(&vchunk, &s->sample_spec, &s->thread_info.soft_volume);
977 pa_source_output_push(o, &vchunk);
979 pa_memblock_unref(vchunk.memblock);
981 pa_source_output_push(o, chunk);
984 /* Called from main thread */
985 int pa_source_update_rate(pa_source *s, uint32_t rate, bool passthrough) {
987 uint32_t desired_rate;
988 uint32_t default_rate = s->default_sample_rate;
989 uint32_t alternate_rate = s->alternate_sample_rate;
990 bool default_rate_is_usable = false;
991 bool alternate_rate_is_usable = false;
992 bool avoid_resampling = s->core->avoid_resampling;
994 if (rate == s->sample_spec.rate)
997 if (!s->update_rate && !s->monitor_of)
1000 if (PA_UNLIKELY(default_rate == alternate_rate && !passthrough && !avoid_resampling)) {
1001 pa_log_debug("Default and alternate sample rates are the same, so there is no point in switching.");
1005 if (PA_SOURCE_IS_RUNNING(s->state)) {
1006 pa_log_info("Cannot update rate, SOURCE_IS_RUNNING, will keep using %u Hz",
1007 s->sample_spec.rate);
1011 if (s->monitor_of) {
1012 if (PA_SINK_IS_RUNNING(s->monitor_of->state)) {
1013 pa_log_info("Cannot update rate, this is a monitor source and the sink is running.");
1018 if (PA_UNLIKELY(!pa_sample_rate_valid(rate)))
1022 /* We have to try to use the source output rate */
1023 desired_rate = rate;
1025 } else if (avoid_resampling && (rate >= default_rate || rate >= alternate_rate)) {
1026 /* We just try to set the source output's sample rate if it's not too low */
1027 desired_rate = rate;
1029 } else if (default_rate == rate || alternate_rate == rate) {
1030 /* We can directly try to use this rate */
1031 desired_rate = rate;
1034 /* See if we can pick a rate that results in less resampling effort */
1035 if (default_rate % 11025 == 0 && rate % 11025 == 0)
1036 default_rate_is_usable = true;
1037 if (default_rate % 4000 == 0 && rate % 4000 == 0)
1038 default_rate_is_usable = true;
1039 if (alternate_rate && alternate_rate % 11025 == 0 && rate % 11025 == 0)
1040 alternate_rate_is_usable = true;
1041 if (alternate_rate && alternate_rate % 4000 == 0 && rate % 4000 == 0)
1042 alternate_rate_is_usable = true;
1044 if (alternate_rate_is_usable && !default_rate_is_usable)
1045 desired_rate = alternate_rate;
1047 desired_rate = default_rate;
1050 if (desired_rate == s->sample_spec.rate)
1053 if (!passthrough && pa_source_used_by(s) > 0)
1056 pa_log_debug("Suspending source %s due to changing the sample rate.", s->name);
1057 pa_source_suspend(s, true, PA_SUSPEND_INTERNAL);
1060 ret = s->update_rate(s, desired_rate);
1062 /* This is a monitor source. */
1064 /* XXX: This code is written with non-passthrough streams in mind. I
1065 * have no idea whether the behaviour with passthrough streams is
1068 uint32_t old_rate = s->sample_spec.rate;
1070 s->sample_spec.rate = desired_rate;
1071 ret = pa_sink_update_rate(s->monitor_of, desired_rate, false);
1074 /* Changing the sink rate failed, roll back the old rate for
1075 * the monitor source. Why did we set the source rate before
1076 * calling pa_sink_update_rate(), you may ask. The reason is
1077 * that pa_sink_update_rate() tries to update the monitor
1078 * source rate, but we are already in the process of updating
1079 * the monitor source rate, so there's a risk of entering an
1080 * infinite loop. Setting the source rate before calling
1081 * pa_sink_update_rate() makes the rate == s->sample_spec.rate
1082 * check in the beginning of this function return early, so we
1084 s->sample_spec.rate = old_rate;
1092 pa_source_output *o;
1094 PA_IDXSET_FOREACH(o, s->outputs, idx) {
1095 if (o->state == PA_SOURCE_OUTPUT_CORKED)
1096 pa_source_output_update_rate(o);
1099 pa_log_info("Changed sampling rate successfully");
1102 pa_source_suspend(s, false, PA_SUSPEND_INTERNAL);
1107 /* Called from main thread */
1108 pa_usec_t pa_source_get_latency(pa_source *s) {
1111 pa_source_assert_ref(s);
1112 pa_assert_ctl_context();
1113 pa_assert(PA_SOURCE_IS_LINKED(s->state));
1115 if (s->state == PA_SOURCE_SUSPENDED)
1118 if (!(s->flags & PA_SOURCE_LATENCY))
1121 pa_assert_se(pa_asyncmsgq_send(s->asyncmsgq, PA_MSGOBJECT(s), PA_SOURCE_MESSAGE_GET_LATENCY, &usec, 0, NULL) == 0);
1123 /* The return value is unsigned, so check that the offset can be added to usec without
1125 if (-s->port_latency_offset <= usec)
1126 usec += s->port_latency_offset;
1130 return (pa_usec_t)usec;
1133 /* Called from IO thread */
1134 int64_t pa_source_get_latency_within_thread(pa_source *s, bool allow_negative) {
1138 pa_source_assert_ref(s);
1139 pa_source_assert_io_context(s);
1140 pa_assert(PA_SOURCE_IS_LINKED(s->thread_info.state));
1142 /* The returned value is supposed to be in the time domain of the sound card! */
1144 if (s->thread_info.state == PA_SOURCE_SUSPENDED)
1147 if (!(s->flags & PA_SOURCE_LATENCY))
1150 o = PA_MSGOBJECT(s);
1152 /* FIXME: We probably should make this a proper vtable callback instead of going through process_msg() */
1154 o->process_msg(o, PA_SOURCE_MESSAGE_GET_LATENCY, &usec, 0, NULL);
1156 /* If allow_negative is false, the call should only return positive values, */
1157 usec += s->thread_info.port_latency_offset;
1158 if (!allow_negative && usec < 0)
1164 /* Called from the main thread (and also from the IO thread while the main
1165 * thread is waiting).
1167 * When a source uses volume sharing, it never has the PA_SOURCE_FLAT_VOLUME flag
1168 * set. Instead, flat volume mode is detected by checking whether the root source
1169 * has the flag set. */
1170 bool pa_source_flat_volume_enabled(pa_source *s) {
1171 pa_source_assert_ref(s);
1173 s = pa_source_get_master(s);
1176 return (s->flags & PA_SOURCE_FLAT_VOLUME);
1181 /* Called from the main thread (and also from the IO thread while the main
1182 * thread is waiting). */
1183 pa_source *pa_source_get_master(pa_source *s) {
1184 pa_source_assert_ref(s);
1186 while (s && (s->flags & PA_SOURCE_SHARE_VOLUME_WITH_MASTER)) {
1187 if (PA_UNLIKELY(!s->output_from_master))
1190 s = s->output_from_master->source;
1196 /* Called from main context */
1197 bool pa_source_is_filter(pa_source *s) {
1198 pa_source_assert_ref(s);
1200 return (s->output_from_master != NULL);
1203 /* Called from main context */
1204 bool pa_source_is_passthrough(pa_source *s) {
1206 pa_source_assert_ref(s);
1208 /* NB Currently only monitor sources support passthrough mode */
1209 return (s->monitor_of && pa_sink_is_passthrough(s->monitor_of));
1212 /* Called from main context */
1213 void pa_source_enter_passthrough(pa_source *s) {
1216 /* set the volume to NORM */
1217 s->saved_volume = *pa_source_get_volume(s, true);
1218 s->saved_save_volume = s->save_volume;
1220 pa_cvolume_set(&volume, s->sample_spec.channels, PA_MIN(s->base_volume, PA_VOLUME_NORM));
1221 pa_source_set_volume(s, &volume, true, false);
1224 /* Called from main context */
1225 void pa_source_leave_passthrough(pa_source *s) {
1226 /* Restore source volume to what it was before we entered passthrough mode */
1227 pa_source_set_volume(s, &s->saved_volume, true, s->saved_save_volume);
1229 pa_cvolume_init(&s->saved_volume);
1230 s->saved_save_volume = false;
1233 /* Called from main context. */
1234 static void compute_reference_ratio(pa_source_output *o) {
1236 pa_cvolume remapped;
1240 pa_assert(pa_source_flat_volume_enabled(o->source));
1243 * Calculates the reference ratio from the source's reference
1244 * volume. This basically calculates:
1246 * o->reference_ratio = o->volume / o->source->reference_volume
1249 remapped = o->source->reference_volume;
1250 pa_cvolume_remap(&remapped, &o->source->channel_map, &o->channel_map);
1252 ratio = o->reference_ratio;
1254 for (c = 0; c < o->sample_spec.channels; c++) {
1256 /* We don't update when the source volume is 0 anyway */
1257 if (remapped.values[c] <= PA_VOLUME_MUTED)
1260 /* Don't update the reference ratio unless necessary */
1261 if (pa_sw_volume_multiply(
1263 remapped.values[c]) == o->volume.values[c])
1266 ratio.values[c] = pa_sw_volume_divide(
1267 o->volume.values[c],
1268 remapped.values[c]);
1271 pa_source_output_set_reference_ratio(o, &ratio);
1274 /* Called from main context. Only called for the root source in volume sharing
1275 * cases, except for internal recursive calls. */
1276 static void compute_reference_ratios(pa_source *s) {
1278 pa_source_output *o;
1280 pa_source_assert_ref(s);
1281 pa_assert_ctl_context();
1282 pa_assert(PA_SOURCE_IS_LINKED(s->state));
1283 pa_assert(pa_source_flat_volume_enabled(s));
1285 PA_IDXSET_FOREACH(o, s->outputs, idx) {
1286 compute_reference_ratio(o);
1288 if (o->destination_source && (o->destination_source->flags & PA_SOURCE_SHARE_VOLUME_WITH_MASTER))
1289 compute_reference_ratios(o->destination_source);
1293 /* Called from main context. Only called for the root source in volume sharing
1294 * cases, except for internal recursive calls. */
1295 static void compute_real_ratios(pa_source *s) {
1296 pa_source_output *o;
1299 pa_source_assert_ref(s);
1300 pa_assert_ctl_context();
1301 pa_assert(PA_SOURCE_IS_LINKED(s->state));
1302 pa_assert(pa_source_flat_volume_enabled(s));
1304 PA_IDXSET_FOREACH(o, s->outputs, idx) {
1306 pa_cvolume remapped;
1308 if (o->destination_source && (o->destination_source->flags & PA_SOURCE_SHARE_VOLUME_WITH_MASTER)) {
1309 /* The origin source uses volume sharing, so this input's real ratio
1310 * is handled as a special case - the real ratio must be 0 dB, and
1311 * as a result i->soft_volume must equal i->volume_factor. */
1312 pa_cvolume_reset(&o->real_ratio, o->real_ratio.channels);
1313 o->soft_volume = o->volume_factor;
1315 compute_real_ratios(o->destination_source);
1321 * This basically calculates:
1323 * i->real_ratio := i->volume / s->real_volume
1324 * i->soft_volume := i->real_ratio * i->volume_factor
1327 remapped = s->real_volume;
1328 pa_cvolume_remap(&remapped, &s->channel_map, &o->channel_map);
1330 o->real_ratio.channels = o->sample_spec.channels;
1331 o->soft_volume.channels = o->sample_spec.channels;
1333 for (c = 0; c < o->sample_spec.channels; c++) {
1335 if (remapped.values[c] <= PA_VOLUME_MUTED) {
1336 /* We leave o->real_ratio untouched */
1337 o->soft_volume.values[c] = PA_VOLUME_MUTED;
1341 /* Don't lose accuracy unless necessary */
1342 if (pa_sw_volume_multiply(
1343 o->real_ratio.values[c],
1344 remapped.values[c]) != o->volume.values[c])
1346 o->real_ratio.values[c] = pa_sw_volume_divide(
1347 o->volume.values[c],
1348 remapped.values[c]);
1350 o->soft_volume.values[c] = pa_sw_volume_multiply(
1351 o->real_ratio.values[c],
1352 o->volume_factor.values[c]);
1355 /* We don't copy the soft_volume to the thread_info data
1356 * here. That must be done by the caller */
1360 static pa_cvolume *cvolume_remap_minimal_impact(
1362 const pa_cvolume *template,
1363 const pa_channel_map *from,
1364 const pa_channel_map *to) {
1369 pa_assert(template);
1372 pa_assert(pa_cvolume_compatible_with_channel_map(v, from));
1373 pa_assert(pa_cvolume_compatible_with_channel_map(template, to));
1375 /* Much like pa_cvolume_remap(), but tries to minimize impact when
1376 * mapping from source output to source volumes:
1378 * If template is a possible remapping from v it is used instead
1379 * of remapping anew.
1381 * If the channel maps don't match we set an all-channel volume on
1382 * the source to ensure that changing a volume on one stream has no
1383 * effect that cannot be compensated for in another stream that
1384 * does not have the same channel map as the source. */
1386 if (pa_channel_map_equal(from, to))
1390 if (pa_cvolume_equal(pa_cvolume_remap(&t, to, from), v)) {
1395 pa_cvolume_set(v, to->channels, pa_cvolume_max(v));
1399 /* Called from main thread. Only called for the root source in volume sharing
1400 * cases, except for internal recursive calls. */
1401 static void get_maximum_output_volume(pa_source *s, pa_cvolume *max_volume, const pa_channel_map *channel_map) {
1402 pa_source_output *o;
1405 pa_source_assert_ref(s);
1406 pa_assert(max_volume);
1407 pa_assert(channel_map);
1408 pa_assert(pa_source_flat_volume_enabled(s));
1410 PA_IDXSET_FOREACH(o, s->outputs, idx) {
1411 pa_cvolume remapped;
1413 if (o->destination_source && (o->destination_source->flags & PA_SOURCE_SHARE_VOLUME_WITH_MASTER)) {
1414 get_maximum_output_volume(o->destination_source, max_volume, channel_map);
1416 /* Ignore this output. The origin source uses volume sharing, so this
1417 * output's volume will be set to be equal to the root source's real
1418 * volume. Obviously this output's current volume must not then
1419 * affect what the root source's real volume will be. */
1423 remapped = o->volume;
1424 cvolume_remap_minimal_impact(&remapped, max_volume, &o->channel_map, channel_map);
1425 pa_cvolume_merge(max_volume, max_volume, &remapped);
1429 /* Called from main thread. Only called for the root source in volume sharing
1430 * cases, except for internal recursive calls. */
1431 static bool has_outputs(pa_source *s) {
1432 pa_source_output *o;
1435 pa_source_assert_ref(s);
1437 PA_IDXSET_FOREACH(o, s->outputs, idx) {
1438 if (!o->destination_source || !(o->destination_source->flags & PA_SOURCE_SHARE_VOLUME_WITH_MASTER) || has_outputs(o->destination_source))
1445 /* Called from main thread. Only called for the root source in volume sharing
1446 * cases, except for internal recursive calls. */
1447 static void update_real_volume(pa_source *s, const pa_cvolume *new_volume, pa_channel_map *channel_map) {
1448 pa_source_output *o;
1451 pa_source_assert_ref(s);
1452 pa_assert(new_volume);
1453 pa_assert(channel_map);
1455 s->real_volume = *new_volume;
1456 pa_cvolume_remap(&s->real_volume, channel_map, &s->channel_map);
1458 PA_IDXSET_FOREACH(o, s->outputs, idx) {
1459 if (o->destination_source && (o->destination_source->flags & PA_SOURCE_SHARE_VOLUME_WITH_MASTER)) {
1460 if (pa_source_flat_volume_enabled(s)) {
1461 pa_cvolume new_output_volume;
1463 /* Follow the root source's real volume. */
1464 new_output_volume = *new_volume;
1465 pa_cvolume_remap(&new_output_volume, channel_map, &o->channel_map);
1466 pa_source_output_set_volume_direct(o, &new_output_volume);
1467 compute_reference_ratio(o);
1470 update_real_volume(o->destination_source, new_volume, channel_map);
1475 /* Called from main thread. Only called for the root source in shared volume
1477 static void compute_real_volume(pa_source *s) {
1478 pa_source_assert_ref(s);
1479 pa_assert_ctl_context();
1480 pa_assert(PA_SOURCE_IS_LINKED(s->state));
1481 pa_assert(pa_source_flat_volume_enabled(s));
1482 pa_assert(!(s->flags & PA_SOURCE_SHARE_VOLUME_WITH_MASTER));
1484 /* This determines the maximum volume of all streams and sets
1485 * s->real_volume accordingly. */
1487 if (!has_outputs(s)) {
1488 /* In the special case that we have no source outputs we leave the
1489 * volume unmodified. */
1490 update_real_volume(s, &s->reference_volume, &s->channel_map);
1494 pa_cvolume_mute(&s->real_volume, s->channel_map.channels);
1496 /* First let's determine the new maximum volume of all outputs
1497 * connected to this source */
1498 get_maximum_output_volume(s, &s->real_volume, &s->channel_map);
1499 update_real_volume(s, &s->real_volume, &s->channel_map);
1501 /* Then, let's update the real ratios/soft volumes of all outputs
1502 * connected to this source */
1503 compute_real_ratios(s);
1506 /* Called from main thread. Only called for the root source in shared volume
1507 * cases, except for internal recursive calls. */
1508 static void propagate_reference_volume(pa_source *s) {
1509 pa_source_output *o;
1512 pa_source_assert_ref(s);
1513 pa_assert_ctl_context();
1514 pa_assert(PA_SOURCE_IS_LINKED(s->state));
1515 pa_assert(pa_source_flat_volume_enabled(s));
1517 /* This is called whenever the source volume changes that is not
1518 * caused by a source output volume change. We need to fix up the
1519 * source output volumes accordingly */
1521 PA_IDXSET_FOREACH(o, s->outputs, idx) {
1522 pa_cvolume new_volume;
1524 if (o->destination_source && (o->destination_source->flags & PA_SOURCE_SHARE_VOLUME_WITH_MASTER)) {
1525 propagate_reference_volume(o->destination_source);
1527 /* Since the origin source uses volume sharing, this output's volume
1528 * needs to be updated to match the root source's real volume, but
1529 * that will be done later in update_shared_real_volume(). */
1533 /* This basically calculates:
1535 * o->volume := o->reference_volume * o->reference_ratio */
1537 new_volume = s->reference_volume;
1538 pa_cvolume_remap(&new_volume, &s->channel_map, &o->channel_map);
1539 pa_sw_cvolume_multiply(&new_volume, &new_volume, &o->reference_ratio);
1540 pa_source_output_set_volume_direct(o, &new_volume);
1544 /* Called from main thread. Only called for the root source in volume sharing
1545 * cases, except for internal recursive calls. The return value indicates
1546 * whether any reference volume actually changed. */
1547 static bool update_reference_volume(pa_source *s, const pa_cvolume *v, const pa_channel_map *channel_map, bool save) {
1549 bool reference_volume_changed;
1550 pa_source_output *o;
1553 pa_source_assert_ref(s);
1554 pa_assert(PA_SOURCE_IS_LINKED(s->state));
1556 pa_assert(channel_map);
1557 pa_assert(pa_cvolume_valid(v));
1560 pa_cvolume_remap(&volume, channel_map, &s->channel_map);
1562 reference_volume_changed = !pa_cvolume_equal(&volume, &s->reference_volume);
1563 pa_source_set_reference_volume_direct(s, &volume);
1565 s->save_volume = (!reference_volume_changed && s->save_volume) || save;
1567 if (!reference_volume_changed && !(s->flags & PA_SOURCE_SHARE_VOLUME_WITH_MASTER))
1568 /* If the root source's volume doesn't change, then there can't be any
1569 * changes in the other source in the source tree either.
1571 * It's probably theoretically possible that even if the root source's
1572 * volume changes slightly, some filter source doesn't change its volume
1573 * due to rounding errors. If that happens, we still want to propagate
1574 * the changed root source volume to the sources connected to the
1575 * intermediate source that didn't change its volume. This theoretical
1576 * possibility is the reason why we have that !(s->flags &
1577 * PA_SOURCE_SHARE_VOLUME_WITH_MASTER) condition. Probably nobody would
1578 * notice even if we returned here false always if
1579 * reference_volume_changed is false. */
1582 PA_IDXSET_FOREACH(o, s->outputs, idx) {
1583 if (o->destination_source && (o->destination_source->flags & PA_SOURCE_SHARE_VOLUME_WITH_MASTER))
1584 update_reference_volume(o->destination_source, v, channel_map, false);
1590 /* Called from main thread */
1591 void pa_source_set_volume(
1593 const pa_cvolume *volume,
1597 pa_cvolume new_reference_volume, root_real_volume;
1598 pa_source *root_source;
1600 pa_source_assert_ref(s);
1601 pa_assert_ctl_context();
1602 pa_assert(PA_SOURCE_IS_LINKED(s->state));
1603 pa_assert(!volume || pa_cvolume_valid(volume));
1604 pa_assert(volume || pa_source_flat_volume_enabled(s));
1605 pa_assert(!volume || volume->channels == 1 || pa_cvolume_compatible(volume, &s->sample_spec));
1607 /* make sure we don't change the volume in PASSTHROUGH mode ...
1608 * ... *except* if we're being invoked to reset the volume to ensure 0 dB gain */
1609 if (pa_source_is_passthrough(s) && (!volume || !pa_cvolume_is_norm(volume))) {
1610 pa_log_warn("Cannot change volume, source is monitor of a PASSTHROUGH sink");
1614 /* In case of volume sharing, the volume is set for the root source first,
1615 * from which it's then propagated to the sharing sources. */
1616 root_source = pa_source_get_master(s);
1618 if (PA_UNLIKELY(!root_source))
1621 /* As a special exception we accept mono volumes on all sources --
1622 * even on those with more complex channel maps */
1625 if (pa_cvolume_compatible(volume, &s->sample_spec))
1626 new_reference_volume = *volume;
1628 new_reference_volume = s->reference_volume;
1629 pa_cvolume_scale(&new_reference_volume, pa_cvolume_max(volume));
1632 pa_cvolume_remap(&new_reference_volume, &s->channel_map, &root_source->channel_map);
1634 if (update_reference_volume(root_source, &new_reference_volume, &root_source->channel_map, save)) {
1635 if (pa_source_flat_volume_enabled(root_source)) {
1636 /* OK, propagate this volume change back to the outputs */
1637 propagate_reference_volume(root_source);
1639 /* And now recalculate the real volume */
1640 compute_real_volume(root_source);
1642 update_real_volume(root_source, &root_source->reference_volume, &root_source->channel_map);
1646 /* If volume is NULL we synchronize the source's real and
1647 * reference volumes with the stream volumes. */
1649 pa_assert(pa_source_flat_volume_enabled(root_source));
1651 /* Ok, let's determine the new real volume */
1652 compute_real_volume(root_source);
1654 /* To propagate the reference volume from the filter to the root source,
1655 * we first take the real volume from the root source and remap it to
1656 * match the filter. Then, we merge in the reference volume from the
1657 * filter on top of this, and remap it back to the root source channel
1659 root_real_volume = root_source->real_volume;
1660 /* First we remap root's real volume to filter channel count and map if needed */
1661 if (s != root_source && !pa_channel_map_equal(&s->channel_map, &root_source->channel_map))
1662 pa_cvolume_remap(&root_real_volume, &root_source->channel_map, &s->channel_map);
1663 /* Then let's 'push' the reference volume if necessary */
1664 pa_cvolume_merge(&new_reference_volume, &s->reference_volume, &root_real_volume);
1665 /* If the source and its root don't have the same number of channels, we need to remap back */
1666 if (s != root_source && !pa_channel_map_equal(&s->channel_map, &root_source->channel_map))
1667 pa_cvolume_remap(&new_reference_volume, &s->channel_map, &root_source->channel_map);
1669 update_reference_volume(root_source, &new_reference_volume, &root_source->channel_map, save);
1671 /* Now that the reference volume is updated, we can update the streams'
1672 * reference ratios. */
1673 compute_reference_ratios(root_source);
1676 if (root_source->set_volume) {
1677 /* If we have a function set_volume(), then we do not apply a
1678 * soft volume by default. However, set_volume() is free to
1679 * apply one to root_source->soft_volume */
1681 pa_cvolume_reset(&root_source->soft_volume, root_source->sample_spec.channels);
1682 if (!(root_source->flags & PA_SOURCE_DEFERRED_VOLUME))
1683 root_source->set_volume(root_source);
1686 /* If we have no function set_volume(), then the soft volume
1687 * becomes the real volume */
1688 root_source->soft_volume = root_source->real_volume;
1690 /* This tells the source that soft volume and/or real volume changed */
1692 pa_assert_se(pa_asyncmsgq_send(root_source->asyncmsgq, PA_MSGOBJECT(root_source), PA_SOURCE_MESSAGE_SET_SHARED_VOLUME, NULL, 0, NULL) == 0);
1695 /* Called from the io thread if sync volume is used, otherwise from the main thread.
1696 * Only to be called by source implementor */
1697 void pa_source_set_soft_volume(pa_source *s, const pa_cvolume *volume) {
1699 pa_source_assert_ref(s);
1700 pa_assert(!(s->flags & PA_SOURCE_SHARE_VOLUME_WITH_MASTER));
1702 if (s->flags & PA_SOURCE_DEFERRED_VOLUME)
1703 pa_source_assert_io_context(s);
1705 pa_assert_ctl_context();
1708 pa_cvolume_reset(&s->soft_volume, s->sample_spec.channels);
1710 s->soft_volume = *volume;
1712 if (PA_SOURCE_IS_LINKED(s->state) && !(s->flags & PA_SOURCE_DEFERRED_VOLUME))
1713 pa_assert_se(pa_asyncmsgq_send(s->asyncmsgq, PA_MSGOBJECT(s), PA_SOURCE_MESSAGE_SET_VOLUME, NULL, 0, NULL) == 0);
1715 s->thread_info.soft_volume = s->soft_volume;
1718 /* Called from the main thread. Only called for the root source in volume sharing
1719 * cases, except for internal recursive calls. */
1720 static void propagate_real_volume(pa_source *s, const pa_cvolume *old_real_volume) {
1721 pa_source_output *o;
1724 pa_source_assert_ref(s);
1725 pa_assert(old_real_volume);
1726 pa_assert_ctl_context();
1727 pa_assert(PA_SOURCE_IS_LINKED(s->state));
1729 /* This is called when the hardware's real volume changes due to
1730 * some external event. We copy the real volume into our
1731 * reference volume and then rebuild the stream volumes based on
1732 * i->real_ratio which should stay fixed. */
1734 if (!(s->flags & PA_SOURCE_SHARE_VOLUME_WITH_MASTER)) {
1735 if (pa_cvolume_equal(old_real_volume, &s->real_volume))
1738 /* 1. Make the real volume the reference volume */
1739 update_reference_volume(s, &s->real_volume, &s->channel_map, true);
1742 if (pa_source_flat_volume_enabled(s)) {
1743 PA_IDXSET_FOREACH(o, s->outputs, idx) {
1744 pa_cvolume new_volume;
1746 /* 2. Since the source's reference and real volumes are equal
1747 * now our ratios should be too. */
1748 pa_source_output_set_reference_ratio(o, &o->real_ratio);
1750 /* 3. Recalculate the new stream reference volume based on the
1751 * reference ratio and the sink's reference volume.
1753 * This basically calculates:
1755 * o->volume = s->reference_volume * o->reference_ratio
1757 * This is identical to propagate_reference_volume() */
1758 new_volume = s->reference_volume;
1759 pa_cvolume_remap(&new_volume, &s->channel_map, &o->channel_map);
1760 pa_sw_cvolume_multiply(&new_volume, &new_volume, &o->reference_ratio);
1761 pa_source_output_set_volume_direct(o, &new_volume);
1763 if (o->destination_source && (o->destination_source->flags & PA_SOURCE_SHARE_VOLUME_WITH_MASTER))
1764 propagate_real_volume(o->destination_source, old_real_volume);
1768 /* Something got changed in the hardware. It probably makes sense
1769 * to save changed hw settings given that hw volume changes not
1770 * triggered by PA are almost certainly done by the user. */
1771 if (!(s->flags & PA_SOURCE_SHARE_VOLUME_WITH_MASTER))
1772 s->save_volume = true;
1775 /* Called from io thread */
1776 void pa_source_update_volume_and_mute(pa_source *s) {
1778 pa_source_assert_io_context(s);
1780 pa_asyncmsgq_post(pa_thread_mq_get()->outq, PA_MSGOBJECT(s), PA_SOURCE_MESSAGE_UPDATE_VOLUME_AND_MUTE, NULL, 0, NULL, NULL);
1783 /* Called from main thread */
1784 const pa_cvolume *pa_source_get_volume(pa_source *s, bool force_refresh) {
1785 pa_source_assert_ref(s);
1786 pa_assert_ctl_context();
1787 pa_assert(PA_SOURCE_IS_LINKED(s->state));
1789 if (s->refresh_volume || force_refresh) {
1790 struct pa_cvolume old_real_volume;
1792 pa_assert(!(s->flags & PA_SOURCE_SHARE_VOLUME_WITH_MASTER));
1794 old_real_volume = s->real_volume;
1796 if (!(s->flags & PA_SOURCE_DEFERRED_VOLUME) && s->get_volume)
1799 pa_assert_se(pa_asyncmsgq_send(s->asyncmsgq, PA_MSGOBJECT(s), PA_SOURCE_MESSAGE_GET_VOLUME, NULL, 0, NULL) == 0);
1801 update_real_volume(s, &s->real_volume, &s->channel_map);
1802 propagate_real_volume(s, &old_real_volume);
1805 return &s->reference_volume;
1808 /* Called from main thread. In volume sharing cases, only the root source may
1810 void pa_source_volume_changed(pa_source *s, const pa_cvolume *new_real_volume) {
1811 pa_cvolume old_real_volume;
1813 pa_source_assert_ref(s);
1814 pa_assert_ctl_context();
1815 pa_assert(PA_SOURCE_IS_LINKED(s->state));
1816 pa_assert(!(s->flags & PA_SOURCE_SHARE_VOLUME_WITH_MASTER));
1818 /* The source implementor may call this if the volume changed to make sure everyone is notified */
1820 old_real_volume = s->real_volume;
1821 update_real_volume(s, new_real_volume, &s->channel_map);
1822 propagate_real_volume(s, &old_real_volume);
1825 /* Called from main thread */
1826 void pa_source_set_mute(pa_source *s, bool mute, bool save) {
1829 pa_source_assert_ref(s);
1830 pa_assert_ctl_context();
1832 old_muted = s->muted;
1834 if (mute == old_muted) {
1835 s->save_muted |= save;
1840 s->save_muted = save;
1842 if (!(s->flags & PA_SOURCE_DEFERRED_VOLUME) && s->set_mute) {
1843 s->set_mute_in_progress = true;
1845 s->set_mute_in_progress = false;
1848 if (!PA_SOURCE_IS_LINKED(s->state))
1851 pa_log_debug("The mute of source %s changed from %s to %s.", s->name, pa_yes_no(old_muted), pa_yes_no(mute));
1852 pa_assert_se(pa_asyncmsgq_send(s->asyncmsgq, PA_MSGOBJECT(s), PA_SOURCE_MESSAGE_SET_MUTE, NULL, 0, NULL) == 0);
1853 pa_subscription_post(s->core, PA_SUBSCRIPTION_EVENT_SOURCE|PA_SUBSCRIPTION_EVENT_CHANGE, s->index);
1854 pa_hook_fire(&s->core->hooks[PA_CORE_HOOK_SOURCE_MUTE_CHANGED], s);
1857 /* Called from main thread */
1858 bool pa_source_get_mute(pa_source *s, bool force_refresh) {
1860 pa_source_assert_ref(s);
1861 pa_assert_ctl_context();
1862 pa_assert(PA_SOURCE_IS_LINKED(s->state));
1864 if ((s->refresh_muted || force_refresh) && s->get_mute) {
1867 if (s->flags & PA_SOURCE_DEFERRED_VOLUME) {
1868 if (pa_asyncmsgq_send(s->asyncmsgq, PA_MSGOBJECT(s), PA_SOURCE_MESSAGE_GET_MUTE, &mute, 0, NULL) >= 0)
1869 pa_source_mute_changed(s, mute);
1871 if (s->get_mute(s, &mute) >= 0)
1872 pa_source_mute_changed(s, mute);
1879 /* Called from main thread */
1880 void pa_source_mute_changed(pa_source *s, bool new_muted) {
1881 pa_source_assert_ref(s);
1882 pa_assert_ctl_context();
1883 pa_assert(PA_SOURCE_IS_LINKED(s->state));
1885 if (s->set_mute_in_progress)
1888 /* pa_source_set_mute() does this same check, so this may appear redundant,
1889 * but we must have this here also, because the save parameter of
1890 * pa_source_set_mute() would otherwise have unintended side effects
1891 * (saving the mute state when it shouldn't be saved). */
1892 if (new_muted == s->muted)
1895 pa_source_set_mute(s, new_muted, true);
1898 /* Called from main thread */
1899 bool pa_source_update_proplist(pa_source *s, pa_update_mode_t mode, pa_proplist *p) {
1900 pa_source_assert_ref(s);
1901 pa_assert_ctl_context();
1904 pa_proplist_update(s->proplist, mode, p);
1906 if (PA_SOURCE_IS_LINKED(s->state)) {
1907 pa_hook_fire(&s->core->hooks[PA_CORE_HOOK_SOURCE_PROPLIST_CHANGED], s);
1908 pa_subscription_post(s->core, PA_SUBSCRIPTION_EVENT_SOURCE|PA_SUBSCRIPTION_EVENT_CHANGE, s->index);
1914 /* Called from main thread */
1915 /* FIXME -- this should be dropped and be merged into pa_source_update_proplist() */
1916 void pa_source_set_description(pa_source *s, const char *description) {
1918 pa_source_assert_ref(s);
1919 pa_assert_ctl_context();
1921 if (!description && !pa_proplist_contains(s->proplist, PA_PROP_DEVICE_DESCRIPTION))
1924 old = pa_proplist_gets(s->proplist, PA_PROP_DEVICE_DESCRIPTION);
1926 if (old && description && pa_streq(old, description))
1930 pa_proplist_sets(s->proplist, PA_PROP_DEVICE_DESCRIPTION, description);
1932 pa_proplist_unset(s->proplist, PA_PROP_DEVICE_DESCRIPTION);
1934 if (PA_SOURCE_IS_LINKED(s->state)) {
1935 pa_subscription_post(s->core, PA_SUBSCRIPTION_EVENT_SOURCE|PA_SUBSCRIPTION_EVENT_CHANGE, s->index);
1936 pa_hook_fire(&s->core->hooks[PA_CORE_HOOK_SOURCE_PROPLIST_CHANGED], s);
1940 /* Called from main thread */
1941 unsigned pa_source_linked_by(pa_source *s) {
1942 pa_source_assert_ref(s);
1943 pa_assert_ctl_context();
1944 pa_assert(PA_SOURCE_IS_LINKED(s->state));
1946 return pa_idxset_size(s->outputs);
1949 /* Called from main thread */
1950 unsigned pa_source_used_by(pa_source *s) {
1953 pa_source_assert_ref(s);
1954 pa_assert_ctl_context();
1955 pa_assert(PA_SOURCE_IS_LINKED(s->state));
1957 ret = pa_idxset_size(s->outputs);
1958 pa_assert(ret >= s->n_corked);
1960 return ret - s->n_corked;
1963 /* Called from main thread */
1964 unsigned pa_source_check_suspend(pa_source *s, pa_source_output *ignore) {
1966 pa_source_output *o;
1969 pa_source_assert_ref(s);
1970 pa_assert_ctl_context();
1972 if (!PA_SOURCE_IS_LINKED(s->state))
1977 PA_IDXSET_FOREACH(o, s->outputs, idx) {
1978 pa_source_output_state_t st;
1983 st = pa_source_output_get_state(o);
1985 /* We do not assert here. It is perfectly valid for a source output to
1986 * be in the INIT state (i.e. created, marked done but not yet put)
1987 * and we should not care if it's unlinked as it won't contribute
1988 * towards our busy status.
1990 if (!PA_SOURCE_OUTPUT_IS_LINKED(st))
1993 if (st == PA_SOURCE_OUTPUT_CORKED)
1996 if (o->flags & PA_SOURCE_OUTPUT_DONT_INHIBIT_AUTO_SUSPEND)
2005 /* Called from the IO thread */
2006 static void sync_output_volumes_within_thread(pa_source *s) {
2007 pa_source_output *o;
2010 pa_source_assert_ref(s);
2011 pa_source_assert_io_context(s);
2013 PA_HASHMAP_FOREACH(o, s->thread_info.outputs, state) {
2014 if (pa_cvolume_equal(&o->thread_info.soft_volume, &o->soft_volume))
2017 o->thread_info.soft_volume = o->soft_volume;
2018 //pa_source_output_request_rewind(o, 0, true, false, false);
2022 /* Called from the IO thread. Only called for the root source in volume sharing
2023 * cases, except for internal recursive calls. */
2024 static void set_shared_volume_within_thread(pa_source *s) {
2025 pa_source_output *o;
2028 pa_source_assert_ref(s);
2030 PA_MSGOBJECT(s)->process_msg(PA_MSGOBJECT(s), PA_SOURCE_MESSAGE_SET_VOLUME_SYNCED, NULL, 0, NULL);
2032 PA_HASHMAP_FOREACH(o, s->thread_info.outputs, state) {
2033 if (o->destination_source && (o->destination_source->flags & PA_SOURCE_SHARE_VOLUME_WITH_MASTER))
2034 set_shared_volume_within_thread(o->destination_source);
2038 /* Called from IO thread, except when it is not */
2039 int pa_source_process_msg(pa_msgobject *object, int code, void *userdata, int64_t offset, pa_memchunk *chunk) {
2040 pa_source *s = PA_SOURCE(object);
2041 pa_source_assert_ref(s);
2043 switch ((pa_source_message_t) code) {
2045 case PA_SOURCE_MESSAGE_ADD_OUTPUT: {
2046 pa_source_output *o = PA_SOURCE_OUTPUT(userdata);
2048 pa_hashmap_put(s->thread_info.outputs, PA_UINT32_TO_PTR(o->index), pa_source_output_ref(o));
2050 if (o->direct_on_input) {
2051 o->thread_info.direct_on_input = o->direct_on_input;
2052 pa_hashmap_put(o->thread_info.direct_on_input->thread_info.direct_outputs, PA_UINT32_TO_PTR(o->index), o);
2055 pa_source_output_attach(o);
2057 pa_source_output_set_state_within_thread(o, o->state);
2059 if (o->thread_info.requested_source_latency != (pa_usec_t) -1)
2060 pa_source_output_set_requested_latency_within_thread(o, o->thread_info.requested_source_latency);
2062 pa_source_output_update_max_rewind(o, s->thread_info.max_rewind);
2064 /* We don't just invalidate the requested latency here,
2065 * because if we are in a move we might need to fix up the
2066 * requested latency. */
2067 pa_source_output_set_requested_latency_within_thread(o, o->thread_info.requested_source_latency);
2069 /* In flat volume mode we need to update the volume as
2071 return object->process_msg(object, PA_SOURCE_MESSAGE_SET_SHARED_VOLUME, NULL, 0, NULL);
2074 case PA_SOURCE_MESSAGE_REMOVE_OUTPUT: {
2075 pa_source_output *o = PA_SOURCE_OUTPUT(userdata);
2077 pa_source_output_set_state_within_thread(o, o->state);
2079 pa_source_output_detach(o);
2081 if (o->thread_info.direct_on_input) {
2082 pa_hashmap_remove(o->thread_info.direct_on_input->thread_info.direct_outputs, PA_UINT32_TO_PTR(o->index));
2083 o->thread_info.direct_on_input = NULL;
2086 pa_hashmap_remove_and_free(s->thread_info.outputs, PA_UINT32_TO_PTR(o->index));
2087 pa_source_invalidate_requested_latency(s, true);
2089 /* In flat volume mode we need to update the volume as
2091 return object->process_msg(object, PA_SOURCE_MESSAGE_SET_SHARED_VOLUME, NULL, 0, NULL);
2094 case PA_SOURCE_MESSAGE_SET_SHARED_VOLUME: {
2095 pa_source *root_source = pa_source_get_master(s);
2097 if (PA_LIKELY(root_source))
2098 set_shared_volume_within_thread(root_source);
2103 case PA_SOURCE_MESSAGE_SET_VOLUME_SYNCED:
2105 if (s->flags & PA_SOURCE_DEFERRED_VOLUME) {
2107 pa_source_volume_change_push(s);
2109 /* Fall through ... */
2111 case PA_SOURCE_MESSAGE_SET_VOLUME:
2113 if (!pa_cvolume_equal(&s->thread_info.soft_volume, &s->soft_volume)) {
2114 s->thread_info.soft_volume = s->soft_volume;
2117 /* Fall through ... */
2119 case PA_SOURCE_MESSAGE_SYNC_VOLUMES:
2120 sync_output_volumes_within_thread(s);
2123 case PA_SOURCE_MESSAGE_GET_VOLUME:
2125 if ((s->flags & PA_SOURCE_DEFERRED_VOLUME) && s->get_volume) {
2127 pa_source_volume_change_flush(s);
2128 pa_sw_cvolume_divide(&s->thread_info.current_hw_volume, &s->real_volume, &s->soft_volume);
2131 /* In case source implementor reset SW volume. */
2132 if (!pa_cvolume_equal(&s->thread_info.soft_volume, &s->soft_volume)) {
2133 s->thread_info.soft_volume = s->soft_volume;
2138 case PA_SOURCE_MESSAGE_SET_MUTE:
2140 if (s->thread_info.soft_muted != s->muted) {
2141 s->thread_info.soft_muted = s->muted;
2144 if (s->flags & PA_SOURCE_DEFERRED_VOLUME && s->set_mute)
2149 case PA_SOURCE_MESSAGE_GET_MUTE:
2151 if (s->flags & PA_SOURCE_DEFERRED_VOLUME && s->get_mute)
2152 return s->get_mute(s, userdata);
2156 case PA_SOURCE_MESSAGE_SET_STATE: {
2158 bool suspend_change =
2159 (s->thread_info.state == PA_SOURCE_SUSPENDED && PA_SOURCE_IS_OPENED(PA_PTR_TO_UINT(userdata))) ||
2160 (PA_SOURCE_IS_OPENED(s->thread_info.state) && PA_PTR_TO_UINT(userdata) == PA_SOURCE_SUSPENDED);
2162 s->thread_info.state = PA_PTR_TO_UINT(userdata);
2164 if (suspend_change) {
2165 pa_source_output *o;
2168 while ((o = pa_hashmap_iterate(s->thread_info.outputs, &state, NULL)))
2169 if (o->suspend_within_thread)
2170 o->suspend_within_thread(o, s->thread_info.state == PA_SOURCE_SUSPENDED);
2176 case PA_SOURCE_MESSAGE_GET_REQUESTED_LATENCY: {
2178 pa_usec_t *usec = userdata;
2179 *usec = pa_source_get_requested_latency_within_thread(s);
2181 /* Yes, that's right, the IO thread will see -1 when no
2182 * explicit requested latency is configured, the main
2183 * thread will see max_latency */
2184 if (*usec == (pa_usec_t) -1)
2185 *usec = s->thread_info.max_latency;
2190 case PA_SOURCE_MESSAGE_SET_LATENCY_RANGE: {
2191 pa_usec_t *r = userdata;
2193 pa_source_set_latency_range_within_thread(s, r[0], r[1]);
2198 case PA_SOURCE_MESSAGE_GET_LATENCY_RANGE: {
2199 pa_usec_t *r = userdata;
2201 r[0] = s->thread_info.min_latency;
2202 r[1] = s->thread_info.max_latency;
2207 case PA_SOURCE_MESSAGE_GET_FIXED_LATENCY:
2209 *((pa_usec_t*) userdata) = s->thread_info.fixed_latency;
2212 case PA_SOURCE_MESSAGE_SET_FIXED_LATENCY:
2214 pa_source_set_fixed_latency_within_thread(s, (pa_usec_t) offset);
2217 case PA_SOURCE_MESSAGE_GET_MAX_REWIND:
2219 *((size_t*) userdata) = s->thread_info.max_rewind;
2222 case PA_SOURCE_MESSAGE_SET_MAX_REWIND:
2224 pa_source_set_max_rewind_within_thread(s, (size_t) offset);
2227 case PA_SOURCE_MESSAGE_GET_LATENCY:
2229 if (s->monitor_of) {
2230 *((int64_t*) userdata) = -pa_sink_get_latency_within_thread(s->monitor_of, true);
2234 /* Implementors need to overwrite this implementation! */
2237 case PA_SOURCE_MESSAGE_SET_PORT:
2239 pa_assert(userdata);
2241 struct source_message_set_port *msg_data = userdata;
2242 msg_data->ret = s->set_port(s, msg_data->port);
2246 case PA_SOURCE_MESSAGE_UPDATE_VOLUME_AND_MUTE:
2247 /* This message is sent from IO-thread and handled in main thread. */
2248 pa_assert_ctl_context();
2250 /* Make sure we're not messing with main thread when no longer linked */
2251 if (!PA_SOURCE_IS_LINKED(s->state))
2254 pa_source_get_volume(s, true);
2255 pa_source_get_mute(s, true);
2258 case PA_SOURCE_MESSAGE_SET_PORT_LATENCY_OFFSET:
2259 s->thread_info.port_latency_offset = offset;
2262 case PA_SOURCE_MESSAGE_MAX:
2269 /* Called from main thread */
2270 int pa_source_suspend_all(pa_core *c, bool suspend, pa_suspend_cause_t cause) {
2275 pa_core_assert_ref(c);
2276 pa_assert_ctl_context();
2277 pa_assert(cause != 0);
2279 for (source = PA_SOURCE(pa_idxset_first(c->sources, &idx)); source; source = PA_SOURCE(pa_idxset_next(c->sources, &idx))) {
2282 if (source->monitor_of)
2285 if ((r = pa_source_suspend(source, suspend, cause)) < 0)
2292 /* Called from IO thread */
2293 void pa_source_detach_within_thread(pa_source *s) {
2294 pa_source_output *o;
2297 pa_source_assert_ref(s);
2298 pa_source_assert_io_context(s);
2299 pa_assert(PA_SOURCE_IS_LINKED(s->thread_info.state));
2301 PA_HASHMAP_FOREACH(o, s->thread_info.outputs, state)
2302 pa_source_output_detach(o);
2305 /* Called from IO thread */
2306 void pa_source_attach_within_thread(pa_source *s) {
2307 pa_source_output *o;
2310 pa_source_assert_ref(s);
2311 pa_source_assert_io_context(s);
2312 pa_assert(PA_SOURCE_IS_LINKED(s->thread_info.state));
2314 PA_HASHMAP_FOREACH(o, s->thread_info.outputs, state)
2315 pa_source_output_attach(o);
2318 /* Called from IO thread */
2319 pa_usec_t pa_source_get_requested_latency_within_thread(pa_source *s) {
2320 pa_usec_t result = (pa_usec_t) -1;
2321 pa_source_output *o;
2324 pa_source_assert_ref(s);
2325 pa_source_assert_io_context(s);
2327 if (!(s->flags & PA_SOURCE_DYNAMIC_LATENCY))
2328 return PA_CLAMP(s->thread_info.fixed_latency, s->thread_info.min_latency, s->thread_info.max_latency);
2330 if (s->thread_info.requested_latency_valid)
2331 return s->thread_info.requested_latency;
2333 PA_HASHMAP_FOREACH(o, s->thread_info.outputs, state)
2334 if (o->thread_info.requested_source_latency != (pa_usec_t) -1 &&
2335 (result == (pa_usec_t) -1 || result > o->thread_info.requested_source_latency))
2336 result = o->thread_info.requested_source_latency;
2338 if (result != (pa_usec_t) -1)
2339 result = PA_CLAMP(result, s->thread_info.min_latency, s->thread_info.max_latency);
2341 if (PA_SOURCE_IS_LINKED(s->thread_info.state)) {
2342 /* Only cache this if we are fully set up */
2343 s->thread_info.requested_latency = result;
2344 s->thread_info.requested_latency_valid = true;
2350 /* Called from main thread */
2351 pa_usec_t pa_source_get_requested_latency(pa_source *s) {
2354 pa_source_assert_ref(s);
2355 pa_assert_ctl_context();
2356 pa_assert(PA_SOURCE_IS_LINKED(s->state));
2358 if (s->state == PA_SOURCE_SUSPENDED)
2361 pa_assert_se(pa_asyncmsgq_send(s->asyncmsgq, PA_MSGOBJECT(s), PA_SOURCE_MESSAGE_GET_REQUESTED_LATENCY, &usec, 0, NULL) == 0);
2366 /* Called from IO thread */
2367 void pa_source_set_max_rewind_within_thread(pa_source *s, size_t max_rewind) {
2368 pa_source_output *o;
2371 pa_source_assert_ref(s);
2372 pa_source_assert_io_context(s);
2374 if (max_rewind == s->thread_info.max_rewind)
2377 s->thread_info.max_rewind = max_rewind;
2379 if (PA_SOURCE_IS_LINKED(s->thread_info.state))
2380 PA_HASHMAP_FOREACH(o, s->thread_info.outputs, state)
2381 pa_source_output_update_max_rewind(o, s->thread_info.max_rewind);
2384 /* Called from main thread */
2385 void pa_source_set_max_rewind(pa_source *s, size_t max_rewind) {
2386 pa_source_assert_ref(s);
2387 pa_assert_ctl_context();
2389 if (PA_SOURCE_IS_LINKED(s->state))
2390 pa_assert_se(pa_asyncmsgq_send(s->asyncmsgq, PA_MSGOBJECT(s), PA_SOURCE_MESSAGE_SET_MAX_REWIND, NULL, max_rewind, NULL) == 0);
2392 pa_source_set_max_rewind_within_thread(s, max_rewind);
2395 /* Called from IO thread */
2396 void pa_source_invalidate_requested_latency(pa_source *s, bool dynamic) {
2397 pa_source_output *o;
2400 pa_source_assert_ref(s);
2401 pa_source_assert_io_context(s);
2403 if ((s->flags & PA_SOURCE_DYNAMIC_LATENCY))
2404 s->thread_info.requested_latency_valid = false;
2408 if (PA_SOURCE_IS_LINKED(s->thread_info.state)) {
2410 if (s->update_requested_latency)
2411 s->update_requested_latency(s);
2413 while ((o = pa_hashmap_iterate(s->thread_info.outputs, &state, NULL)))
2414 if (o->update_source_requested_latency)
2415 o->update_source_requested_latency(o);
2419 pa_sink_invalidate_requested_latency(s->monitor_of, dynamic);
2422 /* Called from main thread */
2423 void pa_source_set_latency_range(pa_source *s, pa_usec_t min_latency, pa_usec_t max_latency) {
2424 pa_source_assert_ref(s);
2425 pa_assert_ctl_context();
2427 /* min_latency == 0: no limit
2428 * min_latency anything else: specified limit
2430 * Similar for max_latency */
2432 if (min_latency < ABSOLUTE_MIN_LATENCY)
2433 min_latency = ABSOLUTE_MIN_LATENCY;
2435 if (max_latency <= 0 ||
2436 max_latency > ABSOLUTE_MAX_LATENCY)
2437 max_latency = ABSOLUTE_MAX_LATENCY;
2439 pa_assert(min_latency <= max_latency);
2441 /* Hmm, let's see if someone forgot to set PA_SOURCE_DYNAMIC_LATENCY here... */
2442 pa_assert((min_latency == ABSOLUTE_MIN_LATENCY &&
2443 max_latency == ABSOLUTE_MAX_LATENCY) ||
2444 (s->flags & PA_SOURCE_DYNAMIC_LATENCY));
2446 if (PA_SOURCE_IS_LINKED(s->state)) {
2452 pa_assert_se(pa_asyncmsgq_send(s->asyncmsgq, PA_MSGOBJECT(s), PA_SOURCE_MESSAGE_SET_LATENCY_RANGE, r, 0, NULL) == 0);
2454 pa_source_set_latency_range_within_thread(s, min_latency, max_latency);
2457 /* Called from main thread */
2458 void pa_source_get_latency_range(pa_source *s, pa_usec_t *min_latency, pa_usec_t *max_latency) {
2459 pa_source_assert_ref(s);
2460 pa_assert_ctl_context();
2461 pa_assert(min_latency);
2462 pa_assert(max_latency);
2464 if (PA_SOURCE_IS_LINKED(s->state)) {
2465 pa_usec_t r[2] = { 0, 0 };
2467 pa_assert_se(pa_asyncmsgq_send(s->asyncmsgq, PA_MSGOBJECT(s), PA_SOURCE_MESSAGE_GET_LATENCY_RANGE, r, 0, NULL) == 0);
2469 *min_latency = r[0];
2470 *max_latency = r[1];
2472 *min_latency = s->thread_info.min_latency;
2473 *max_latency = s->thread_info.max_latency;
2477 /* Called from IO thread, and from main thread before pa_source_put() is called */
2478 void pa_source_set_latency_range_within_thread(pa_source *s, pa_usec_t min_latency, pa_usec_t max_latency) {
2479 pa_source_assert_ref(s);
2480 pa_source_assert_io_context(s);
2482 pa_assert(min_latency >= ABSOLUTE_MIN_LATENCY);
2483 pa_assert(max_latency <= ABSOLUTE_MAX_LATENCY);
2484 pa_assert(min_latency <= max_latency);
2486 /* Hmm, let's see if someone forgot to set PA_SOURCE_DYNAMIC_LATENCY here... */
2487 pa_assert((min_latency == ABSOLUTE_MIN_LATENCY &&
2488 max_latency == ABSOLUTE_MAX_LATENCY) ||
2489 (s->flags & PA_SOURCE_DYNAMIC_LATENCY) ||
2492 if (s->thread_info.min_latency == min_latency &&
2493 s->thread_info.max_latency == max_latency)
2496 s->thread_info.min_latency = min_latency;
2497 s->thread_info.max_latency = max_latency;
2499 if (PA_SOURCE_IS_LINKED(s->thread_info.state)) {
2500 pa_source_output *o;
2503 PA_HASHMAP_FOREACH(o, s->thread_info.outputs, state)
2504 if (o->update_source_latency_range)
2505 o->update_source_latency_range(o);
2508 pa_source_invalidate_requested_latency(s, false);
2511 /* Called from main thread, before the source is put */
2512 void pa_source_set_fixed_latency(pa_source *s, pa_usec_t latency) {
2513 pa_source_assert_ref(s);
2514 pa_assert_ctl_context();
2516 if (s->flags & PA_SOURCE_DYNAMIC_LATENCY) {
2517 pa_assert(latency == 0);
2521 if (latency < ABSOLUTE_MIN_LATENCY)
2522 latency = ABSOLUTE_MIN_LATENCY;
2524 if (latency > ABSOLUTE_MAX_LATENCY)
2525 latency = ABSOLUTE_MAX_LATENCY;
2527 if (PA_SOURCE_IS_LINKED(s->state))
2528 pa_assert_se(pa_asyncmsgq_send(s->asyncmsgq, PA_MSGOBJECT(s), PA_SOURCE_MESSAGE_SET_FIXED_LATENCY, NULL, (int64_t) latency, NULL) == 0);
2530 s->thread_info.fixed_latency = latency;
2533 /* Called from main thread */
2534 pa_usec_t pa_source_get_fixed_latency(pa_source *s) {
2537 pa_source_assert_ref(s);
2538 pa_assert_ctl_context();
2540 if (s->flags & PA_SOURCE_DYNAMIC_LATENCY)
2543 if (PA_SOURCE_IS_LINKED(s->state))
2544 pa_assert_se(pa_asyncmsgq_send(s->asyncmsgq, PA_MSGOBJECT(s), PA_SOURCE_MESSAGE_GET_FIXED_LATENCY, &latency, 0, NULL) == 0);
2546 latency = s->thread_info.fixed_latency;
2551 /* Called from IO thread */
2552 void pa_source_set_fixed_latency_within_thread(pa_source *s, pa_usec_t latency) {
2553 pa_source_assert_ref(s);
2554 pa_source_assert_io_context(s);
2556 if (s->flags & PA_SOURCE_DYNAMIC_LATENCY) {
2557 pa_assert(latency == 0);
2558 s->thread_info.fixed_latency = 0;
2563 pa_assert(latency >= ABSOLUTE_MIN_LATENCY);
2564 pa_assert(latency <= ABSOLUTE_MAX_LATENCY);
2566 if (s->thread_info.fixed_latency == latency)
2569 s->thread_info.fixed_latency = latency;
2571 if (PA_SOURCE_IS_LINKED(s->thread_info.state)) {
2572 pa_source_output *o;
2575 PA_HASHMAP_FOREACH(o, s->thread_info.outputs, state)
2576 if (o->update_source_fixed_latency)
2577 o->update_source_fixed_latency(o);
2580 pa_source_invalidate_requested_latency(s, false);
2583 /* Called from main thread */
2584 void pa_source_set_port_latency_offset(pa_source *s, int64_t offset) {
2585 pa_source_assert_ref(s);
2587 s->port_latency_offset = offset;
2589 if (PA_SOURCE_IS_LINKED(s->state))
2590 pa_assert_se(pa_asyncmsgq_send(s->asyncmsgq, PA_MSGOBJECT(s), PA_SOURCE_MESSAGE_SET_PORT_LATENCY_OFFSET, NULL, offset, NULL) == 0);
2592 s->thread_info.port_latency_offset = offset;
2594 pa_hook_fire(&s->core->hooks[PA_CORE_HOOK_SOURCE_PORT_LATENCY_OFFSET_CHANGED], s);
2597 /* Called from main thread */
2598 size_t pa_source_get_max_rewind(pa_source *s) {
2600 pa_assert_ctl_context();
2601 pa_source_assert_ref(s);
2603 if (!PA_SOURCE_IS_LINKED(s->state))
2604 return s->thread_info.max_rewind;
2606 pa_assert_se(pa_asyncmsgq_send(s->asyncmsgq, PA_MSGOBJECT(s), PA_SOURCE_MESSAGE_GET_MAX_REWIND, &r, 0, NULL) == 0);
2611 /* Called from main context */
2612 int pa_source_set_port(pa_source *s, const char *name, bool save) {
2613 pa_device_port *port;
2616 pa_source_assert_ref(s);
2617 pa_assert_ctl_context();
2620 pa_log_debug("set_port() operation not implemented for source %u \"%s\"", s->index, s->name);
2621 return -PA_ERR_NOTIMPLEMENTED;
2625 return -PA_ERR_NOENTITY;
2627 if (!(port = pa_hashmap_get(s->ports, name)))
2628 return -PA_ERR_NOENTITY;
2630 if (s->active_port == port) {
2631 s->save_port = s->save_port || save;
2635 if (s->flags & PA_SOURCE_DEFERRED_VOLUME) {
2636 struct source_message_set_port msg = { .port = port, .ret = 0 };
2637 pa_assert_se(pa_asyncmsgq_send(s->asyncmsgq, PA_MSGOBJECT(s), PA_SOURCE_MESSAGE_SET_PORT, &msg, 0, NULL) == 0);
2641 ret = s->set_port(s, port);
2644 return -PA_ERR_NOENTITY;
2646 pa_subscription_post(s->core, PA_SUBSCRIPTION_EVENT_SOURCE|PA_SUBSCRIPTION_EVENT_CHANGE, s->index);
2648 pa_log_info("Changed port of source %u \"%s\" to %s", s->index, s->name, port->name);
2650 s->active_port = port;
2651 s->save_port = save;
2653 /* The active port affects the default source selection. */
2654 pa_core_update_default_source(s->core);
2656 pa_source_set_port_latency_offset(s, s->active_port->latency_offset);
2658 pa_hook_fire(&s->core->hooks[PA_CORE_HOOK_SOURCE_PORT_CHANGED], s);
2663 PA_STATIC_FLIST_DECLARE(pa_source_volume_change, 0, pa_xfree);
2665 /* Called from the IO thread. */
2666 static pa_source_volume_change *pa_source_volume_change_new(pa_source *s) {
2667 pa_source_volume_change *c;
2668 if (!(c = pa_flist_pop(PA_STATIC_FLIST_GET(pa_source_volume_change))))
2669 c = pa_xnew(pa_source_volume_change, 1);
2671 PA_LLIST_INIT(pa_source_volume_change, c);
2673 pa_cvolume_reset(&c->hw_volume, s->sample_spec.channels);
2677 /* Called from the IO thread. */
2678 static void pa_source_volume_change_free(pa_source_volume_change *c) {
2680 if (pa_flist_push(PA_STATIC_FLIST_GET(pa_source_volume_change), c) < 0)
2684 /* Called from the IO thread. */
2685 void pa_source_volume_change_push(pa_source *s) {
2686 pa_source_volume_change *c = NULL;
2687 pa_source_volume_change *nc = NULL;
2688 pa_source_volume_change *pc = NULL;
2689 uint32_t safety_margin = s->thread_info.volume_change_safety_margin;
2691 const char *direction = NULL;
2694 nc = pa_source_volume_change_new(s);
2696 /* NOTE: There is already more different volumes in pa_source that I can remember.
2697 * Adding one more volume for HW would get us rid of this, but I am trying
2698 * to survive with the ones we already have. */
2699 pa_sw_cvolume_divide(&nc->hw_volume, &s->real_volume, &s->soft_volume);
2701 if (!s->thread_info.volume_changes && pa_cvolume_equal(&nc->hw_volume, &s->thread_info.current_hw_volume)) {
2702 pa_log_debug("Volume not changing");
2703 pa_source_volume_change_free(nc);
2707 nc->at = pa_source_get_latency_within_thread(s, false);
2708 nc->at += pa_rtclock_now() + s->thread_info.volume_change_extra_delay;
2710 if (s->thread_info.volume_changes_tail) {
2711 for (c = s->thread_info.volume_changes_tail; c; c = c->prev) {
2712 /* If volume is going up let's do it a bit late. If it is going
2713 * down let's do it a bit early. */
2714 if (pa_cvolume_avg(&nc->hw_volume) > pa_cvolume_avg(&c->hw_volume)) {
2715 if (nc->at + safety_margin > c->at) {
2716 nc->at += safety_margin;
2721 else if (nc->at - safety_margin > c->at) {
2722 nc->at -= safety_margin;
2730 if (pa_cvolume_avg(&nc->hw_volume) > pa_cvolume_avg(&s->thread_info.current_hw_volume)) {
2731 nc->at += safety_margin;
2734 nc->at -= safety_margin;
2737 PA_LLIST_PREPEND(pa_source_volume_change, s->thread_info.volume_changes, nc);
2740 PA_LLIST_INSERT_AFTER(pa_source_volume_change, s->thread_info.volume_changes, c, nc);
2743 pa_log_debug("Volume going %s to %d at %llu", direction, pa_cvolume_avg(&nc->hw_volume), (long long unsigned) nc->at);
2745 /* We can ignore volume events that came earlier but should happen later than this. */
2746 PA_LLIST_FOREACH_SAFE(c, pc, nc->next) {
2747 pa_log_debug("Volume change to %d at %llu was dropped", pa_cvolume_avg(&c->hw_volume), (long long unsigned) c->at);
2748 pa_source_volume_change_free(c);
2751 s->thread_info.volume_changes_tail = nc;
2754 /* Called from the IO thread. */
2755 static void pa_source_volume_change_flush(pa_source *s) {
2756 pa_source_volume_change *c = s->thread_info.volume_changes;
2758 s->thread_info.volume_changes = NULL;
2759 s->thread_info.volume_changes_tail = NULL;
2761 pa_source_volume_change *next = c->next;
2762 pa_source_volume_change_free(c);
2767 /* Called from the IO thread. */
2768 bool pa_source_volume_change_apply(pa_source *s, pa_usec_t *usec_to_next) {
2774 if (!s->thread_info.volume_changes || !PA_SOURCE_IS_LINKED(s->state)) {
2780 pa_assert(s->write_volume);
2782 now = pa_rtclock_now();
2784 while (s->thread_info.volume_changes && now >= s->thread_info.volume_changes->at) {
2785 pa_source_volume_change *c = s->thread_info.volume_changes;
2786 PA_LLIST_REMOVE(pa_source_volume_change, s->thread_info.volume_changes, c);
2787 pa_log_debug("Volume change to %d at %llu was written %llu usec late",
2788 pa_cvolume_avg(&c->hw_volume), (long long unsigned) c->at, (long long unsigned) (now - c->at));
2790 s->thread_info.current_hw_volume = c->hw_volume;
2791 pa_source_volume_change_free(c);
2797 if (s->thread_info.volume_changes) {
2799 *usec_to_next = s->thread_info.volume_changes->at - now;
2800 if (pa_log_ratelimit(PA_LOG_DEBUG))
2801 pa_log_debug("Next volume change in %lld usec", (long long) (s->thread_info.volume_changes->at - now));
2806 s->thread_info.volume_changes_tail = NULL;
2811 /* Called from the main thread */
2812 /* Gets the list of formats supported by the source. The members and idxset must
2813 * be freed by the caller. */
2814 pa_idxset* pa_source_get_formats(pa_source *s) {
2819 if (s->get_formats) {
2820 /* Source supports format query, all is good */
2821 ret = s->get_formats(s);
2823 /* Source doesn't support format query, so assume it does PCM */
2824 pa_format_info *f = pa_format_info_new();
2825 f->encoding = PA_ENCODING_PCM;
2827 ret = pa_idxset_new(NULL, NULL);
2828 pa_idxset_put(ret, f, NULL);
2834 /* Called from the main thread */
2835 /* Checks if the source can accept this format */
2836 bool pa_source_check_format(pa_source *s, pa_format_info *f) {
2837 pa_idxset *formats = NULL;
2843 formats = pa_source_get_formats(s);
2846 pa_format_info *finfo_device;
2849 PA_IDXSET_FOREACH(finfo_device, formats, i) {
2850 if (pa_format_info_is_compatible(finfo_device, f)) {
2856 pa_idxset_free(formats, (pa_free_cb_t) pa_format_info_free);
2862 /* Called from the main thread */
2863 /* Calculates the intersection between formats supported by the source and
2864 * in_formats, and returns these, in the order of the source's formats. */
2865 pa_idxset* pa_source_check_formats(pa_source *s, pa_idxset *in_formats) {
2866 pa_idxset *out_formats = pa_idxset_new(NULL, NULL), *source_formats = NULL;
2867 pa_format_info *f_source, *f_in;
2872 if (!in_formats || pa_idxset_isempty(in_formats))
2875 source_formats = pa_source_get_formats(s);
2877 PA_IDXSET_FOREACH(f_source, source_formats, i) {
2878 PA_IDXSET_FOREACH(f_in, in_formats, j) {
2879 if (pa_format_info_is_compatible(f_source, f_in))
2880 pa_idxset_put(out_formats, pa_format_info_copy(f_in), NULL);
2886 pa_idxset_free(source_formats, (pa_free_cb_t) pa_format_info_free);
2891 /* Called from the main thread. */
2892 void pa_source_set_reference_volume_direct(pa_source *s, const pa_cvolume *volume) {
2893 pa_cvolume old_volume;
2894 char old_volume_str[PA_CVOLUME_SNPRINT_VERBOSE_MAX];
2895 char new_volume_str[PA_CVOLUME_SNPRINT_VERBOSE_MAX];
2900 old_volume = s->reference_volume;
2902 if (pa_cvolume_equal(volume, &old_volume))
2905 s->reference_volume = *volume;
2906 pa_log_debug("The reference volume of source %s changed from %s to %s.", s->name,
2907 pa_cvolume_snprint_verbose(old_volume_str, sizeof(old_volume_str), &old_volume, &s->channel_map,
2908 s->flags & PA_SOURCE_DECIBEL_VOLUME),
2909 pa_cvolume_snprint_verbose(new_volume_str, sizeof(new_volume_str), volume, &s->channel_map,
2910 s->flags & PA_SOURCE_DECIBEL_VOLUME));
2912 pa_subscription_post(s->core, PA_SUBSCRIPTION_EVENT_SOURCE|PA_SUBSCRIPTION_EVENT_CHANGE, s->index);
2913 pa_hook_fire(&s->core->hooks[PA_CORE_HOOK_SOURCE_VOLUME_CHANGED], s);