2 This file is part of PulseAudio.
4 Copyright 2004-2006 Lennart Poettering
5 Copyright 2006 Pierre Ossman <ossman@cendio.se> for Cendio AB
7 PulseAudio is free software; you can redistribute it and/or modify
8 it under the terms of the GNU Lesser General Public License as published
9 by the Free Software Foundation; either version 2.1 of the License,
10 or (at your option) any later version.
12 PulseAudio is distributed in the hope that it will be useful, but
13 WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15 General Public License for more details.
17 You should have received a copy of the GNU Lesser General Public License
18 along with PulseAudio; if not, write to the Free Software
19 Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307
30 #include <pulse/format.h>
31 #include <pulse/utf8.h>
32 #include <pulse/xmalloc.h>
33 #include <pulse/timeval.h>
34 #include <pulse/util.h>
35 #include <pulse/rtclock.h>
36 #include <pulse/internal.h>
38 #include <pulsecore/core-util.h>
39 #include <pulsecore/source-output.h>
40 #include <pulsecore/namereg.h>
41 #include <pulsecore/core-subscribe.h>
42 #include <pulsecore/log.h>
43 #include <pulsecore/mix.h>
44 #include <pulsecore/flist.h>
48 #define ABSOLUTE_MIN_LATENCY (500)
49 #define ABSOLUTE_MAX_LATENCY (10*PA_USEC_PER_SEC)
50 #define DEFAULT_FIXED_LATENCY (250*PA_USEC_PER_MSEC)
52 PA_DEFINE_PUBLIC_CLASS(pa_source, pa_msgobject);
54 struct pa_source_volume_change {
58 PA_LLIST_FIELDS(pa_source_volume_change);
61 struct source_message_set_port {
66 static void source_free(pa_object *o);
68 static void pa_source_volume_change_push(pa_source *s);
69 static void pa_source_volume_change_flush(pa_source *s);
71 pa_source_new_data* pa_source_new_data_init(pa_source_new_data *data) {
75 data->proplist = pa_proplist_new();
76 data->ports = pa_hashmap_new(pa_idxset_string_hash_func, pa_idxset_string_compare_func);
81 void pa_source_new_data_set_name(pa_source_new_data *data, const char *name) {
85 data->name = pa_xstrdup(name);
88 void pa_source_new_data_set_sample_spec(pa_source_new_data *data, const pa_sample_spec *spec) {
91 if ((data->sample_spec_is_set = !!spec))
92 data->sample_spec = *spec;
95 void pa_source_new_data_set_channel_map(pa_source_new_data *data, const pa_channel_map *map) {
98 if ((data->channel_map_is_set = !!map))
99 data->channel_map = *map;
102 void pa_source_new_data_set_alternate_sample_rate(pa_source_new_data *data, const uint32_t alternate_sample_rate) {
105 data->alternate_sample_rate_is_set = TRUE;
106 data->alternate_sample_rate = alternate_sample_rate;
109 void pa_source_new_data_set_volume(pa_source_new_data *data, const pa_cvolume *volume) {
112 if ((data->volume_is_set = !!volume))
113 data->volume = *volume;
116 void pa_source_new_data_set_muted(pa_source_new_data *data, pa_bool_t mute) {
119 data->muted_is_set = TRUE;
120 data->muted = !!mute;
123 void pa_source_new_data_set_port(pa_source_new_data *data, const char *port) {
126 pa_xfree(data->active_port);
127 data->active_port = pa_xstrdup(port);
130 void pa_source_new_data_done(pa_source_new_data *data) {
133 pa_proplist_free(data->proplist);
136 pa_device_port_hashmap_free(data->ports);
138 pa_xfree(data->name);
139 pa_xfree(data->active_port);
142 /* Called from main context */
143 static void reset_callbacks(pa_source *s) {
147 s->get_volume = NULL;
148 s->set_volume = NULL;
149 s->write_volume = NULL;
152 s->update_requested_latency = NULL;
154 s->get_formats = NULL;
155 s->update_rate = NULL;
158 /* Called from main context */
159 pa_source* pa_source_new(
161 pa_source_new_data *data,
162 pa_source_flags_t flags) {
166 char st[PA_SAMPLE_SPEC_SNPRINT_MAX], cm[PA_CHANNEL_MAP_SNPRINT_MAX];
171 pa_assert(data->name);
172 pa_assert_ctl_context();
174 s = pa_msgobject_new(pa_source);
176 if (!(name = pa_namereg_register(core, data->name, PA_NAMEREG_SOURCE, s, data->namereg_fail))) {
177 pa_log_debug("Failed to register name %s.", data->name);
182 pa_source_new_data_set_name(data, name);
184 if (pa_hook_fire(&core->hooks[PA_CORE_HOOK_SOURCE_NEW], data) < 0) {
186 pa_namereg_unregister(core, name);
190 /* FIXME, need to free s here on failure */
192 pa_return_null_if_fail(!data->driver || pa_utf8_valid(data->driver));
193 pa_return_null_if_fail(data->name && pa_utf8_valid(data->name) && data->name[0]);
195 pa_return_null_if_fail(data->sample_spec_is_set && pa_sample_spec_valid(&data->sample_spec));
197 if (!data->channel_map_is_set)
198 pa_return_null_if_fail(pa_channel_map_init_auto(&data->channel_map, data->sample_spec.channels, PA_CHANNEL_MAP_DEFAULT));
200 pa_return_null_if_fail(pa_channel_map_valid(&data->channel_map));
201 pa_return_null_if_fail(data->channel_map.channels == data->sample_spec.channels);
203 /* FIXME: There should probably be a general function for checking whether
204 * the source volume is allowed to be set, like there is for source outputs. */
205 pa_assert(!data->volume_is_set || !(flags & PA_SOURCE_SHARE_VOLUME_WITH_MASTER));
207 if (!data->volume_is_set) {
208 pa_cvolume_reset(&data->volume, data->sample_spec.channels);
209 data->save_volume = FALSE;
212 pa_return_null_if_fail(pa_cvolume_valid(&data->volume));
213 pa_return_null_if_fail(pa_cvolume_compatible(&data->volume, &data->sample_spec));
215 if (!data->muted_is_set)
219 pa_proplist_update(data->proplist, PA_UPDATE_MERGE, data->card->proplist);
221 pa_device_init_description(data->proplist);
222 pa_device_init_icon(data->proplist, FALSE);
223 pa_device_init_intended_roles(data->proplist);
225 if (pa_hook_fire(&core->hooks[PA_CORE_HOOK_SOURCE_FIXATE], data) < 0) {
227 pa_namereg_unregister(core, name);
231 s->parent.parent.free = source_free;
232 s->parent.process_msg = pa_source_process_msg;
235 s->state = PA_SOURCE_INIT;
238 s->suspend_cause = data->suspend_cause;
239 pa_source_set_mixer_dirty(s, FALSE);
240 s->name = pa_xstrdup(name);
241 s->proplist = pa_proplist_copy(data->proplist);
242 s->driver = pa_xstrdup(pa_path_get_filename(data->driver));
243 s->module = data->module;
244 s->card = data->card;
246 s->priority = pa_device_init_priority(s->proplist);
248 s->sample_spec = data->sample_spec;
249 s->channel_map = data->channel_map;
250 s->default_sample_rate = s->sample_spec.rate;
252 if (data->alternate_sample_rate_is_set)
253 s->alternate_sample_rate = data->alternate_sample_rate;
255 s->alternate_sample_rate = s->core->alternate_sample_rate;
257 if (s->sample_spec.rate == s->alternate_sample_rate) {
258 pa_log_warn("Default and alternate sample rates are the same.");
259 s->alternate_sample_rate = 0;
262 s->outputs = pa_idxset_new(NULL, NULL);
264 s->monitor_of = NULL;
265 s->output_from_master = NULL;
267 s->reference_volume = s->real_volume = data->volume;
268 pa_cvolume_reset(&s->soft_volume, s->sample_spec.channels);
269 s->base_volume = PA_VOLUME_NORM;
270 s->n_volume_steps = PA_VOLUME_NORM+1;
271 s->muted = data->muted;
272 s->refresh_volume = s->refresh_muted = FALSE;
279 /* As a minor optimization we just steal the list instead of
281 s->ports = data->ports;
284 s->active_port = NULL;
285 s->save_port = FALSE;
287 if (data->active_port)
288 if ((s->active_port = pa_hashmap_get(s->ports, data->active_port)))
289 s->save_port = data->save_port;
291 if (!s->active_port) {
295 PA_HASHMAP_FOREACH(p, s->ports, state)
296 if (!s->active_port || p->priority > s->active_port->priority)
301 s->latency_offset = s->active_port->latency_offset;
303 s->latency_offset = 0;
305 s->save_volume = data->save_volume;
306 s->save_muted = data->save_muted;
308 pa_silence_memchunk_get(
309 &core->silence_cache,
315 s->thread_info.rtpoll = NULL;
316 s->thread_info.outputs = pa_hashmap_new(pa_idxset_trivial_hash_func, pa_idxset_trivial_compare_func);
317 s->thread_info.soft_volume = s->soft_volume;
318 s->thread_info.soft_muted = s->muted;
319 s->thread_info.state = s->state;
320 s->thread_info.max_rewind = 0;
321 s->thread_info.requested_latency_valid = FALSE;
322 s->thread_info.requested_latency = 0;
323 s->thread_info.min_latency = ABSOLUTE_MIN_LATENCY;
324 s->thread_info.max_latency = ABSOLUTE_MAX_LATENCY;
325 s->thread_info.fixed_latency = flags & PA_SOURCE_DYNAMIC_LATENCY ? 0 : DEFAULT_FIXED_LATENCY;
327 PA_LLIST_HEAD_INIT(pa_source_volume_change, s->thread_info.volume_changes);
328 s->thread_info.volume_changes_tail = NULL;
329 pa_sw_cvolume_multiply(&s->thread_info.current_hw_volume, &s->soft_volume, &s->real_volume);
330 s->thread_info.volume_change_safety_margin = core->deferred_volume_safety_margin_usec;
331 s->thread_info.volume_change_extra_delay = core->deferred_volume_extra_delay_usec;
332 s->thread_info.latency_offset = s->latency_offset;
334 /* FIXME: This should probably be moved to pa_source_put() */
335 pa_assert_se(pa_idxset_put(core->sources, s, &s->index) >= 0);
338 pa_assert_se(pa_idxset_put(s->card->sources, s, NULL) >= 0);
340 pt = pa_proplist_to_string_sep(s->proplist, "\n ");
341 pa_log_info("Created source %u \"%s\" with sample spec %s and channel map %s\n %s",
344 pa_sample_spec_snprint(st, sizeof(st), &s->sample_spec),
345 pa_channel_map_snprint(cm, sizeof(cm), &s->channel_map),
352 /* Called from main context */
353 static int source_set_state(pa_source *s, pa_source_state_t state) {
355 pa_bool_t suspend_change;
356 pa_source_state_t original_state;
359 pa_assert_ctl_context();
361 if (s->state == state)
364 original_state = s->state;
367 (original_state == PA_SOURCE_SUSPENDED && PA_SOURCE_IS_OPENED(state)) ||
368 (PA_SOURCE_IS_OPENED(original_state) && state == PA_SOURCE_SUSPENDED);
371 if ((ret = s->set_state(s, state)) < 0)
375 if ((ret = pa_asyncmsgq_send(s->asyncmsgq, PA_MSGOBJECT(s), PA_SOURCE_MESSAGE_SET_STATE, PA_UINT_TO_PTR(state), 0, NULL)) < 0) {
378 s->set_state(s, original_state);
385 if (state != PA_SOURCE_UNLINKED) { /* if we enter UNLINKED state pa_source_unlink() will fire the appropriate events */
386 pa_hook_fire(&s->core->hooks[PA_CORE_HOOK_SOURCE_STATE_CHANGED], s);
387 pa_subscription_post(s->core, PA_SUBSCRIPTION_EVENT_SOURCE | PA_SUBSCRIPTION_EVENT_CHANGE, s->index);
390 if (suspend_change) {
394 /* We're suspending or resuming, tell everyone about it */
396 PA_IDXSET_FOREACH(o, s->outputs, idx)
397 if (s->state == PA_SOURCE_SUSPENDED &&
398 (o->flags & PA_SOURCE_OUTPUT_KILL_ON_SUSPEND))
399 pa_source_output_kill(o);
401 o->suspend(o, state == PA_SOURCE_SUSPENDED);
407 void pa_source_set_get_volume_callback(pa_source *s, pa_source_cb_t cb) {
413 void pa_source_set_set_volume_callback(pa_source *s, pa_source_cb_t cb) {
414 pa_source_flags_t flags;
417 pa_assert(!s->write_volume || cb);
421 /* Save the current flags so we can tell if they've changed */
425 /* The source implementor is responsible for setting decibel volume support */
426 s->flags |= PA_SOURCE_HW_VOLUME_CTRL;
428 s->flags &= ~PA_SOURCE_HW_VOLUME_CTRL;
429 /* See note below in pa_source_put() about volume sharing and decibel volumes */
430 pa_source_enable_decibel_volume(s, !(s->flags & PA_SOURCE_SHARE_VOLUME_WITH_MASTER));
433 /* If the flags have changed after init, let any clients know via a change event */
434 if (s->state != PA_SOURCE_INIT && flags != s->flags)
435 pa_subscription_post(s->core, PA_SUBSCRIPTION_EVENT_SOURCE|PA_SUBSCRIPTION_EVENT_CHANGE, s->index);
438 void pa_source_set_write_volume_callback(pa_source *s, pa_source_cb_t cb) {
439 pa_source_flags_t flags;
442 pa_assert(!cb || s->set_volume);
444 s->write_volume = cb;
446 /* Save the current flags so we can tell if they've changed */
450 s->flags |= PA_SOURCE_DEFERRED_VOLUME;
452 s->flags &= ~PA_SOURCE_DEFERRED_VOLUME;
454 /* If the flags have changed after init, let any clients know via a change event */
455 if (s->state != PA_SOURCE_INIT && flags != s->flags)
456 pa_subscription_post(s->core, PA_SUBSCRIPTION_EVENT_SOURCE|PA_SUBSCRIPTION_EVENT_CHANGE, s->index);
459 void pa_source_set_get_mute_callback(pa_source *s, pa_source_cb_t cb) {
465 void pa_source_set_set_mute_callback(pa_source *s, pa_source_cb_t cb) {
466 pa_source_flags_t flags;
472 /* Save the current flags so we can tell if they've changed */
476 s->flags |= PA_SOURCE_HW_MUTE_CTRL;
478 s->flags &= ~PA_SOURCE_HW_MUTE_CTRL;
480 /* If the flags have changed after init, let any clients know via a change event */
481 if (s->state != PA_SOURCE_INIT && flags != s->flags)
482 pa_subscription_post(s->core, PA_SUBSCRIPTION_EVENT_SOURCE|PA_SUBSCRIPTION_EVENT_CHANGE, s->index);
485 static void enable_flat_volume(pa_source *s, pa_bool_t enable) {
486 pa_source_flags_t flags;
490 /* Always follow the overall user preference here */
491 enable = enable && s->core->flat_volumes;
493 /* Save the current flags so we can tell if they've changed */
497 s->flags |= PA_SOURCE_FLAT_VOLUME;
499 s->flags &= ~PA_SOURCE_FLAT_VOLUME;
501 /* If the flags have changed after init, let any clients know via a change event */
502 if (s->state != PA_SOURCE_INIT && flags != s->flags)
503 pa_subscription_post(s->core, PA_SUBSCRIPTION_EVENT_SOURCE|PA_SUBSCRIPTION_EVENT_CHANGE, s->index);
506 void pa_source_enable_decibel_volume(pa_source *s, pa_bool_t enable) {
507 pa_source_flags_t flags;
511 /* Save the current flags so we can tell if they've changed */
515 s->flags |= PA_SOURCE_DECIBEL_VOLUME;
516 enable_flat_volume(s, TRUE);
518 s->flags &= ~PA_SOURCE_DECIBEL_VOLUME;
519 enable_flat_volume(s, FALSE);
522 /* If the flags have changed after init, let any clients know via a change event */
523 if (s->state != PA_SOURCE_INIT && flags != s->flags)
524 pa_subscription_post(s->core, PA_SUBSCRIPTION_EVENT_SOURCE|PA_SUBSCRIPTION_EVENT_CHANGE, s->index);
527 /* Called from main context */
528 void pa_source_put(pa_source *s) {
529 pa_source_assert_ref(s);
530 pa_assert_ctl_context();
532 pa_assert(s->state == PA_SOURCE_INIT);
533 pa_assert(!(s->flags & PA_SOURCE_SHARE_VOLUME_WITH_MASTER) || s->output_from_master);
535 /* The following fields must be initialized properly when calling _put() */
536 pa_assert(s->asyncmsgq);
537 pa_assert(s->thread_info.min_latency <= s->thread_info.max_latency);
539 /* Generally, flags should be initialized via pa_source_new(). As a
540 * special exception we allow some volume related flags to be set
541 * between _new() and _put() by the callback setter functions above.
543 * Thus we implement a couple safeguards here which ensure the above
544 * setters were used (or at least the implementor made manual changes
545 * in a compatible way).
547 * Note: All of these flags set here can change over the life time
549 pa_assert(!(s->flags & PA_SOURCE_HW_VOLUME_CTRL) || s->set_volume);
550 pa_assert(!(s->flags & PA_SOURCE_DEFERRED_VOLUME) || s->write_volume);
551 pa_assert(!(s->flags & PA_SOURCE_HW_MUTE_CTRL) || s->set_mute);
553 /* XXX: Currently decibel volume is disabled for all sources that use volume
554 * sharing. When the master source supports decibel volume, it would be good
555 * to have the flag also in the filter source, but currently we don't do that
556 * so that the flags of the filter source never change when it's moved from
557 * a master source to another. One solution for this problem would be to
558 * remove user-visible volume altogether from filter sources when volume
559 * sharing is used, but the current approach was easier to implement... */
560 /* We always support decibel volumes in software, otherwise we leave it to
561 * the source implementor to set this flag as needed.
563 * Note: This flag can also change over the life time of the source. */
564 if (!(s->flags & PA_SOURCE_HW_VOLUME_CTRL) && !(s->flags & PA_SOURCE_SHARE_VOLUME_WITH_MASTER))
565 pa_source_enable_decibel_volume(s, TRUE);
567 /* If the source implementor support DB volumes by itself, we should always
568 * try and enable flat volumes too */
569 if ((s->flags & PA_SOURCE_DECIBEL_VOLUME))
570 enable_flat_volume(s, TRUE);
572 if (s->flags & PA_SOURCE_SHARE_VOLUME_WITH_MASTER) {
573 pa_source *root_source = pa_source_get_master(s);
575 pa_assert(PA_LIKELY(root_source));
577 s->reference_volume = root_source->reference_volume;
578 pa_cvolume_remap(&s->reference_volume, &root_source->channel_map, &s->channel_map);
580 s->real_volume = root_source->real_volume;
581 pa_cvolume_remap(&s->real_volume, &root_source->channel_map, &s->channel_map);
583 /* We assume that if the sink implementor changed the default
584 * volume he did so in real_volume, because that is the usual
585 * place where he is supposed to place his changes. */
586 s->reference_volume = s->real_volume;
588 s->thread_info.soft_volume = s->soft_volume;
589 s->thread_info.soft_muted = s->muted;
590 pa_sw_cvolume_multiply(&s->thread_info.current_hw_volume, &s->soft_volume, &s->real_volume);
592 pa_assert((s->flags & PA_SOURCE_HW_VOLUME_CTRL)
593 || (s->base_volume == PA_VOLUME_NORM
594 && ((s->flags & PA_SOURCE_DECIBEL_VOLUME || (s->flags & PA_SOURCE_SHARE_VOLUME_WITH_MASTER)))));
595 pa_assert(!(s->flags & PA_SOURCE_DECIBEL_VOLUME) || s->n_volume_steps == PA_VOLUME_NORM+1);
596 pa_assert(!(s->flags & PA_SOURCE_DYNAMIC_LATENCY) == (s->thread_info.fixed_latency != 0));
598 if (s->suspend_cause)
599 pa_assert_se(source_set_state(s, PA_SOURCE_SUSPENDED) == 0);
601 pa_assert_se(source_set_state(s, PA_SOURCE_IDLE) == 0);
603 pa_subscription_post(s->core, PA_SUBSCRIPTION_EVENT_SOURCE | PA_SUBSCRIPTION_EVENT_NEW, s->index);
604 pa_hook_fire(&s->core->hooks[PA_CORE_HOOK_SOURCE_PUT], s);
607 /* Called from main context */
608 void pa_source_unlink(pa_source *s) {
610 pa_source_output *o, *j = NULL;
613 pa_assert_ctl_context();
615 /* See pa_sink_unlink() for a couple of comments how this function
618 linked = PA_SOURCE_IS_LINKED(s->state);
621 pa_hook_fire(&s->core->hooks[PA_CORE_HOOK_SOURCE_UNLINK], s);
623 if (s->state != PA_SOURCE_UNLINKED)
624 pa_namereg_unregister(s->core, s->name);
625 pa_idxset_remove_by_data(s->core->sources, s, NULL);
628 pa_idxset_remove_by_data(s->card->sources, s, NULL);
630 while ((o = pa_idxset_first(s->outputs, NULL))) {
632 pa_source_output_kill(o);
637 source_set_state(s, PA_SOURCE_UNLINKED);
639 s->state = PA_SOURCE_UNLINKED;
644 pa_subscription_post(s->core, PA_SUBSCRIPTION_EVENT_SOURCE | PA_SUBSCRIPTION_EVENT_REMOVE, s->index);
645 pa_hook_fire(&s->core->hooks[PA_CORE_HOOK_SOURCE_UNLINK_POST], s);
649 /* Called from main context */
650 static void source_free(pa_object *o) {
651 pa_source_output *so;
652 pa_source *s = PA_SOURCE(o);
655 pa_assert_ctl_context();
656 pa_assert(pa_source_refcnt(s) == 0);
658 if (PA_SOURCE_IS_LINKED(s->state))
661 pa_log_info("Freeing source %u \"%s\"", s->index, s->name);
663 pa_idxset_free(s->outputs, NULL, NULL);
665 while ((so = pa_hashmap_steal_first(s->thread_info.outputs)))
666 pa_source_output_unref(so);
668 pa_hashmap_free(s->thread_info.outputs, NULL, NULL);
670 if (s->silence.memblock)
671 pa_memblock_unref(s->silence.memblock);
677 pa_proplist_free(s->proplist);
680 pa_device_port_hashmap_free(s->ports);
685 /* Called from main context, and not while the IO thread is active, please */
686 void pa_source_set_asyncmsgq(pa_source *s, pa_asyncmsgq *q) {
687 pa_source_assert_ref(s);
688 pa_assert_ctl_context();
693 /* Called from main context, and not while the IO thread is active, please */
694 void pa_source_update_flags(pa_source *s, pa_source_flags_t mask, pa_source_flags_t value) {
695 pa_source_assert_ref(s);
696 pa_assert_ctl_context();
701 /* For now, allow only a minimal set of flags to be changed. */
702 pa_assert((mask & ~(PA_SOURCE_DYNAMIC_LATENCY|PA_SOURCE_LATENCY)) == 0);
704 s->flags = (s->flags & ~mask) | (value & mask);
707 /* Called from IO context, or before _put() from main context */
708 void pa_source_set_rtpoll(pa_source *s, pa_rtpoll *p) {
709 pa_source_assert_ref(s);
710 pa_source_assert_io_context(s);
712 s->thread_info.rtpoll = p;
715 /* Called from main context */
716 int pa_source_update_status(pa_source*s) {
717 pa_source_assert_ref(s);
718 pa_assert_ctl_context();
719 pa_assert(PA_SOURCE_IS_LINKED(s->state));
721 if (s->state == PA_SOURCE_SUSPENDED)
724 return source_set_state(s, pa_source_used_by(s) ? PA_SOURCE_RUNNING : PA_SOURCE_IDLE);
727 /* Called from any context - must be threadsafe */
728 void pa_source_set_mixer_dirty(pa_source *s, pa_bool_t is_dirty)
730 pa_atomic_store(&s->mixer_dirty, is_dirty ? 1 : 0);
733 /* Called from main context */
734 int pa_source_suspend(pa_source *s, pa_bool_t suspend, pa_suspend_cause_t cause) {
735 pa_source_assert_ref(s);
736 pa_assert_ctl_context();
737 pa_assert(PA_SOURCE_IS_LINKED(s->state));
738 pa_assert(cause != 0);
740 if (s->monitor_of && cause != PA_SUSPEND_PASSTHROUGH)
741 return -PA_ERR_NOTSUPPORTED;
744 s->suspend_cause |= cause;
746 s->suspend_cause &= ~cause;
748 if (!(s->suspend_cause & PA_SUSPEND_SESSION) && (pa_atomic_load(&s->mixer_dirty) != 0)) {
749 /* This might look racy but isn't: If somebody sets mixer_dirty exactly here,
750 it'll be handled just fine. */
751 pa_source_set_mixer_dirty(s, FALSE);
752 pa_log_debug("Mixer is now accessible. Updating alsa mixer settings.");
753 if (s->active_port && s->set_port) {
754 if (s->flags & PA_SOURCE_DEFERRED_VOLUME) {
755 struct source_message_set_port msg = { .port = s->active_port, .ret = 0 };
756 pa_assert_se(pa_asyncmsgq_send(s->asyncmsgq, PA_MSGOBJECT(s), PA_SOURCE_MESSAGE_SET_PORT, &msg, 0, NULL) == 0);
759 s->set_port(s, s->active_port);
769 if ((pa_source_get_state(s) == PA_SOURCE_SUSPENDED) == !!s->suspend_cause)
772 pa_log_debug("Suspend cause of source %s is 0x%04x, %s", s->name, s->suspend_cause, s->suspend_cause ? "suspending" : "resuming");
774 if (s->suspend_cause)
775 return source_set_state(s, PA_SOURCE_SUSPENDED);
777 return source_set_state(s, pa_source_used_by(s) ? PA_SOURCE_RUNNING : PA_SOURCE_IDLE);
780 /* Called from main context */
781 int pa_source_sync_suspend(pa_source *s) {
782 pa_sink_state_t state;
784 pa_source_assert_ref(s);
785 pa_assert_ctl_context();
786 pa_assert(PA_SOURCE_IS_LINKED(s->state));
787 pa_assert(s->monitor_of);
789 state = pa_sink_get_state(s->monitor_of);
791 if (state == PA_SINK_SUSPENDED)
792 return source_set_state(s, PA_SOURCE_SUSPENDED);
794 pa_assert(PA_SINK_IS_OPENED(state));
796 return source_set_state(s, pa_source_used_by(s) ? PA_SOURCE_RUNNING : PA_SOURCE_IDLE);
799 /* Called from main context */
800 pa_queue *pa_source_move_all_start(pa_source *s, pa_queue *q) {
801 pa_source_output *o, *n;
804 pa_source_assert_ref(s);
805 pa_assert_ctl_context();
806 pa_assert(PA_SOURCE_IS_LINKED(s->state));
811 for (o = PA_SOURCE_OUTPUT(pa_idxset_first(s->outputs, &idx)); o; o = n) {
812 n = PA_SOURCE_OUTPUT(pa_idxset_next(s->outputs, &idx));
814 pa_source_output_ref(o);
816 if (pa_source_output_start_move(o) >= 0)
819 pa_source_output_unref(o);
825 /* Called from main context */
826 void pa_source_move_all_finish(pa_source *s, pa_queue *q, pa_bool_t save) {
829 pa_source_assert_ref(s);
830 pa_assert_ctl_context();
831 pa_assert(PA_SOURCE_IS_LINKED(s->state));
834 while ((o = PA_SOURCE_OUTPUT(pa_queue_pop(q)))) {
835 if (pa_source_output_finish_move(o, s, save) < 0)
836 pa_source_output_fail_move(o);
838 pa_source_output_unref(o);
841 pa_queue_free(q, NULL);
844 /* Called from main context */
845 void pa_source_move_all_fail(pa_queue *q) {
848 pa_assert_ctl_context();
851 while ((o = PA_SOURCE_OUTPUT(pa_queue_pop(q)))) {
852 pa_source_output_fail_move(o);
853 pa_source_output_unref(o);
856 pa_queue_free(q, NULL);
859 /* Called from IO thread context */
860 void pa_source_process_rewind(pa_source *s, size_t nbytes) {
864 pa_source_assert_ref(s);
865 pa_source_assert_io_context(s);
866 pa_assert(PA_SOURCE_IS_LINKED(s->thread_info.state));
871 if (s->thread_info.state == PA_SOURCE_SUSPENDED)
874 pa_log_debug("Processing rewind...");
876 PA_HASHMAP_FOREACH(o, s->thread_info.outputs, state) {
877 pa_source_output_assert_ref(o);
878 pa_source_output_process_rewind(o, nbytes);
882 /* Called from IO thread context */
883 void pa_source_post(pa_source*s, const pa_memchunk *chunk) {
887 pa_source_assert_ref(s);
888 pa_source_assert_io_context(s);
889 pa_assert(PA_SOURCE_IS_LINKED(s->thread_info.state));
892 if (s->thread_info.state == PA_SOURCE_SUSPENDED)
895 if (s->thread_info.soft_muted || !pa_cvolume_is_norm(&s->thread_info.soft_volume)) {
896 pa_memchunk vchunk = *chunk;
898 pa_memblock_ref(vchunk.memblock);
899 pa_memchunk_make_writable(&vchunk, 0);
901 if (s->thread_info.soft_muted || pa_cvolume_is_muted(&s->thread_info.soft_volume))
902 pa_silence_memchunk(&vchunk, &s->sample_spec);
904 pa_volume_memchunk(&vchunk, &s->sample_spec, &s->thread_info.soft_volume);
906 while ((o = pa_hashmap_iterate(s->thread_info.outputs, &state, NULL))) {
907 pa_source_output_assert_ref(o);
909 if (!o->thread_info.direct_on_input)
910 pa_source_output_push(o, &vchunk);
913 pa_memblock_unref(vchunk.memblock);
916 while ((o = pa_hashmap_iterate(s->thread_info.outputs, &state, NULL))) {
917 pa_source_output_assert_ref(o);
919 if (!o->thread_info.direct_on_input)
920 pa_source_output_push(o, chunk);
925 /* Called from IO thread context */
926 void pa_source_post_direct(pa_source*s, pa_source_output *o, const pa_memchunk *chunk) {
927 pa_source_assert_ref(s);
928 pa_source_assert_io_context(s);
929 pa_assert(PA_SOURCE_IS_LINKED(s->thread_info.state));
930 pa_source_output_assert_ref(o);
931 pa_assert(o->thread_info.direct_on_input);
934 if (s->thread_info.state == PA_SOURCE_SUSPENDED)
937 if (s->thread_info.soft_muted || !pa_cvolume_is_norm(&s->thread_info.soft_volume)) {
938 pa_memchunk vchunk = *chunk;
940 pa_memblock_ref(vchunk.memblock);
941 pa_memchunk_make_writable(&vchunk, 0);
943 if (s->thread_info.soft_muted || pa_cvolume_is_muted(&s->thread_info.soft_volume))
944 pa_silence_memchunk(&vchunk, &s->sample_spec);
946 pa_volume_memchunk(&vchunk, &s->sample_spec, &s->thread_info.soft_volume);
948 pa_source_output_push(o, &vchunk);
950 pa_memblock_unref(vchunk.memblock);
952 pa_source_output_push(o, chunk);
955 /* Called from main thread */
956 pa_bool_t pa_source_update_rate(pa_source *s, uint32_t rate, pa_bool_t passthrough)
958 if (s->update_rate) {
959 uint32_t desired_rate = rate;
960 uint32_t default_rate = s->default_sample_rate;
961 uint32_t alternate_rate = s->alternate_sample_rate;
964 pa_bool_t use_alternate = FALSE;
966 if (PA_UNLIKELY(default_rate == alternate_rate)) {
967 pa_log_warn("Default and alternate sample rates are the same.");
971 if (PA_SOURCE_IS_RUNNING(s->state)) {
972 pa_log_info("Cannot update rate, SOURCE_IS_RUNNING, will keep using %u Hz",
973 s->sample_spec.rate);
977 if (PA_UNLIKELY (desired_rate < 8000 ||
978 desired_rate > PA_RATE_MAX))
982 pa_assert(default_rate % 4000 || default_rate % 11025);
983 pa_assert(alternate_rate % 4000 || alternate_rate % 11025);
985 if (default_rate % 4000) {
986 /* default is a 11025 multiple */
987 if ((alternate_rate % 4000 == 0) && (desired_rate % 4000 == 0))
990 /* default is 4000 multiple */
991 if ((alternate_rate % 11025 == 0) && (desired_rate % 11025 == 0))
996 desired_rate = alternate_rate;
998 desired_rate = default_rate;
1000 desired_rate = rate; /* use stream sampling rate, discard default/alternate settings */
1003 if (desired_rate == s->sample_spec.rate)
1006 if (!passthrough && pa_source_used_by(s) > 0)
1009 pa_log_debug("Suspending source %s due to changing the sample rate.", s->name);
1010 pa_source_suspend(s, TRUE, PA_SUSPEND_IDLE); /* needed before rate update, will be resumed automatically */
1012 if (s->update_rate(s, desired_rate) == TRUE) {
1013 pa_log_info("Changed sampling rate successfully ");
1015 PA_IDXSET_FOREACH(o, s->outputs, idx) {
1016 if (o->state == PA_SOURCE_OUTPUT_CORKED)
1017 pa_source_output_update_rate(o);
1025 /* Called from main thread */
1026 pa_usec_t pa_source_get_latency(pa_source *s) {
1029 pa_source_assert_ref(s);
1030 pa_assert_ctl_context();
1031 pa_assert(PA_SOURCE_IS_LINKED(s->state));
1033 if (s->state == PA_SOURCE_SUSPENDED)
1036 if (!(s->flags & PA_SOURCE_LATENCY))
1039 pa_assert_se(pa_asyncmsgq_send(s->asyncmsgq, PA_MSGOBJECT(s), PA_SOURCE_MESSAGE_GET_LATENCY, &usec, 0, NULL) == 0);
1041 /* usec is unsigned, so check that the offset can be added to usec without
1043 if (-s->latency_offset <= (int64_t) usec)
1044 usec += s->latency_offset;
1051 /* Called from IO thread */
1052 pa_usec_t pa_source_get_latency_within_thread(pa_source *s) {
1056 pa_source_assert_ref(s);
1057 pa_source_assert_io_context(s);
1058 pa_assert(PA_SOURCE_IS_LINKED(s->thread_info.state));
1060 /* The returned value is supposed to be in the time domain of the sound card! */
1062 if (s->thread_info.state == PA_SOURCE_SUSPENDED)
1065 if (!(s->flags & PA_SOURCE_LATENCY))
1068 o = PA_MSGOBJECT(s);
1070 /* FIXME: We probably should make this a proper vtable callback instead of going through process_msg() */
1072 if (o->process_msg(o, PA_SOURCE_MESSAGE_GET_LATENCY, &usec, 0, NULL) < 0)
1075 /* usec is unsigned, so check that the offset can be added to usec without
1077 if (-s->thread_info.latency_offset <= (int64_t) usec)
1078 usec += s->thread_info.latency_offset;
1085 /* Called from the main thread (and also from the IO thread while the main
1086 * thread is waiting).
1088 * When a source uses volume sharing, it never has the PA_SOURCE_FLAT_VOLUME flag
1089 * set. Instead, flat volume mode is detected by checking whether the root source
1090 * has the flag set. */
1091 pa_bool_t pa_source_flat_volume_enabled(pa_source *s) {
1092 pa_source_assert_ref(s);
1094 s = pa_source_get_master(s);
1097 return (s->flags & PA_SOURCE_FLAT_VOLUME);
1102 /* Called from the main thread (and also from the IO thread while the main
1103 * thread is waiting). */
1104 pa_source *pa_source_get_master(pa_source *s) {
1105 pa_source_assert_ref(s);
1107 while (s && (s->flags & PA_SOURCE_SHARE_VOLUME_WITH_MASTER)) {
1108 if (PA_UNLIKELY(!s->output_from_master))
1111 s = s->output_from_master->source;
1117 /* Called from main context */
1118 pa_bool_t pa_source_is_passthrough(pa_source *s) {
1120 pa_source_assert_ref(s);
1122 /* NB Currently only monitor sources support passthrough mode */
1123 return (s->monitor_of && pa_sink_is_passthrough(s->monitor_of));
1126 /* Called from main context */
1127 void pa_source_enter_passthrough(pa_source *s) {
1130 /* set the volume to NORM */
1131 s->saved_volume = *pa_source_get_volume(s, TRUE);
1132 s->saved_save_volume = s->save_volume;
1134 pa_cvolume_set(&volume, s->sample_spec.channels, PA_MIN(s->base_volume, PA_VOLUME_NORM));
1135 pa_source_set_volume(s, &volume, TRUE, FALSE);
1138 /* Called from main context */
1139 void pa_source_leave_passthrough(pa_source *s) {
1140 /* Restore source volume to what it was before we entered passthrough mode */
1141 pa_source_set_volume(s, &s->saved_volume, TRUE, s->saved_save_volume);
1143 pa_cvolume_init(&s->saved_volume);
1144 s->saved_save_volume = FALSE;
1147 /* Called from main context. */
1148 static void compute_reference_ratio(pa_source_output *o) {
1150 pa_cvolume remapped;
1153 pa_assert(pa_source_flat_volume_enabled(o->source));
1156 * Calculates the reference ratio from the source's reference
1157 * volume. This basically calculates:
1159 * o->reference_ratio = o->volume / o->source->reference_volume
1162 remapped = o->source->reference_volume;
1163 pa_cvolume_remap(&remapped, &o->source->channel_map, &o->channel_map);
1165 o->reference_ratio.channels = o->sample_spec.channels;
1167 for (c = 0; c < o->sample_spec.channels; c++) {
1169 /* We don't update when the source volume is 0 anyway */
1170 if (remapped.values[c] <= PA_VOLUME_MUTED)
1173 /* Don't update the reference ratio unless necessary */
1174 if (pa_sw_volume_multiply(
1175 o->reference_ratio.values[c],
1176 remapped.values[c]) == o->volume.values[c])
1179 o->reference_ratio.values[c] = pa_sw_volume_divide(
1180 o->volume.values[c],
1181 remapped.values[c]);
1185 /* Called from main context. Only called for the root source in volume sharing
1186 * cases, except for internal recursive calls. */
1187 static void compute_reference_ratios(pa_source *s) {
1189 pa_source_output *o;
1191 pa_source_assert_ref(s);
1192 pa_assert_ctl_context();
1193 pa_assert(PA_SOURCE_IS_LINKED(s->state));
1194 pa_assert(pa_source_flat_volume_enabled(s));
1196 PA_IDXSET_FOREACH(o, s->outputs, idx) {
1197 compute_reference_ratio(o);
1199 if (o->destination_source && (o->destination_source->flags & PA_SOURCE_SHARE_VOLUME_WITH_MASTER))
1200 compute_reference_ratios(o->destination_source);
1204 /* Called from main context. Only called for the root source in volume sharing
1205 * cases, except for internal recursive calls. */
1206 static void compute_real_ratios(pa_source *s) {
1207 pa_source_output *o;
1210 pa_source_assert_ref(s);
1211 pa_assert_ctl_context();
1212 pa_assert(PA_SOURCE_IS_LINKED(s->state));
1213 pa_assert(pa_source_flat_volume_enabled(s));
1215 PA_IDXSET_FOREACH(o, s->outputs, idx) {
1217 pa_cvolume remapped;
1219 if (o->destination_source && (o->destination_source->flags & PA_SOURCE_SHARE_VOLUME_WITH_MASTER)) {
1220 /* The origin source uses volume sharing, so this input's real ratio
1221 * is handled as a special case - the real ratio must be 0 dB, and
1222 * as a result i->soft_volume must equal i->volume_factor. */
1223 pa_cvolume_reset(&o->real_ratio, o->real_ratio.channels);
1224 o->soft_volume = o->volume_factor;
1226 compute_real_ratios(o->destination_source);
1232 * This basically calculates:
1234 * i->real_ratio := i->volume / s->real_volume
1235 * i->soft_volume := i->real_ratio * i->volume_factor
1238 remapped = s->real_volume;
1239 pa_cvolume_remap(&remapped, &s->channel_map, &o->channel_map);
1241 o->real_ratio.channels = o->sample_spec.channels;
1242 o->soft_volume.channels = o->sample_spec.channels;
1244 for (c = 0; c < o->sample_spec.channels; c++) {
1246 if (remapped.values[c] <= PA_VOLUME_MUTED) {
1247 /* We leave o->real_ratio untouched */
1248 o->soft_volume.values[c] = PA_VOLUME_MUTED;
1252 /* Don't lose accuracy unless necessary */
1253 if (pa_sw_volume_multiply(
1254 o->real_ratio.values[c],
1255 remapped.values[c]) != o->volume.values[c])
1257 o->real_ratio.values[c] = pa_sw_volume_divide(
1258 o->volume.values[c],
1259 remapped.values[c]);
1261 o->soft_volume.values[c] = pa_sw_volume_multiply(
1262 o->real_ratio.values[c],
1263 o->volume_factor.values[c]);
1266 /* We don't copy the soft_volume to the thread_info data
1267 * here. That must be done by the caller */
1271 static pa_cvolume *cvolume_remap_minimal_impact(
1273 const pa_cvolume *template,
1274 const pa_channel_map *from,
1275 const pa_channel_map *to) {
1280 pa_assert(template);
1283 pa_assert(pa_cvolume_compatible_with_channel_map(v, from));
1284 pa_assert(pa_cvolume_compatible_with_channel_map(template, to));
1286 /* Much like pa_cvolume_remap(), but tries to minimize impact when
1287 * mapping from source output to source volumes:
1289 * If template is a possible remapping from v it is used instead
1290 * of remapping anew.
1292 * If the channel maps don't match we set an all-channel volume on
1293 * the source to ensure that changing a volume on one stream has no
1294 * effect that cannot be compensated for in another stream that
1295 * does not have the same channel map as the source. */
1297 if (pa_channel_map_equal(from, to))
1301 if (pa_cvolume_equal(pa_cvolume_remap(&t, to, from), v)) {
1306 pa_cvolume_set(v, to->channels, pa_cvolume_max(v));
1310 /* Called from main thread. Only called for the root source in volume sharing
1311 * cases, except for internal recursive calls. */
1312 static void get_maximum_output_volume(pa_source *s, pa_cvolume *max_volume, const pa_channel_map *channel_map) {
1313 pa_source_output *o;
1316 pa_source_assert_ref(s);
1317 pa_assert(max_volume);
1318 pa_assert(channel_map);
1319 pa_assert(pa_source_flat_volume_enabled(s));
1321 PA_IDXSET_FOREACH(o, s->outputs, idx) {
1322 pa_cvolume remapped;
1324 if (o->destination_source && (o->destination_source->flags & PA_SOURCE_SHARE_VOLUME_WITH_MASTER)) {
1325 get_maximum_output_volume(o->destination_source, max_volume, channel_map);
1327 /* Ignore this output. The origin source uses volume sharing, so this
1328 * output's volume will be set to be equal to the root source's real
1329 * volume. Obviously this output's current volume must not then
1330 * affect what the root source's real volume will be. */
1334 remapped = o->volume;
1335 cvolume_remap_minimal_impact(&remapped, max_volume, &o->channel_map, channel_map);
1336 pa_cvolume_merge(max_volume, max_volume, &remapped);
1340 /* Called from main thread. Only called for the root source in volume sharing
1341 * cases, except for internal recursive calls. */
1342 static pa_bool_t has_outputs(pa_source *s) {
1343 pa_source_output *o;
1346 pa_source_assert_ref(s);
1348 PA_IDXSET_FOREACH(o, s->outputs, idx) {
1349 if (!o->destination_source || !(o->destination_source->flags & PA_SOURCE_SHARE_VOLUME_WITH_MASTER) || has_outputs(o->destination_source))
1356 /* Called from main thread. Only called for the root source in volume sharing
1357 * cases, except for internal recursive calls. */
1358 static void update_real_volume(pa_source *s, const pa_cvolume *new_volume, pa_channel_map *channel_map) {
1359 pa_source_output *o;
1362 pa_source_assert_ref(s);
1363 pa_assert(new_volume);
1364 pa_assert(channel_map);
1366 s->real_volume = *new_volume;
1367 pa_cvolume_remap(&s->real_volume, channel_map, &s->channel_map);
1369 PA_IDXSET_FOREACH(o, s->outputs, idx) {
1370 if (o->destination_source && (o->destination_source->flags & PA_SOURCE_SHARE_VOLUME_WITH_MASTER)) {
1371 if (pa_source_flat_volume_enabled(s)) {
1372 pa_cvolume old_volume = o->volume;
1374 /* Follow the root source's real volume. */
1375 o->volume = *new_volume;
1376 pa_cvolume_remap(&o->volume, channel_map, &o->channel_map);
1377 compute_reference_ratio(o);
1379 /* The volume changed, let's tell people so */
1380 if (!pa_cvolume_equal(&old_volume, &o->volume)) {
1381 if (o->volume_changed)
1382 o->volume_changed(o);
1384 pa_subscription_post(o->core, PA_SUBSCRIPTION_EVENT_SOURCE_OUTPUT|PA_SUBSCRIPTION_EVENT_CHANGE, o->index);
1388 update_real_volume(o->destination_source, new_volume, channel_map);
1393 /* Called from main thread. Only called for the root source in shared volume
1395 static void compute_real_volume(pa_source *s) {
1396 pa_source_assert_ref(s);
1397 pa_assert_ctl_context();
1398 pa_assert(PA_SOURCE_IS_LINKED(s->state));
1399 pa_assert(pa_source_flat_volume_enabled(s));
1400 pa_assert(!(s->flags & PA_SOURCE_SHARE_VOLUME_WITH_MASTER));
1402 /* This determines the maximum volume of all streams and sets
1403 * s->real_volume accordingly. */
1405 if (!has_outputs(s)) {
1406 /* In the special case that we have no source outputs we leave the
1407 * volume unmodified. */
1408 update_real_volume(s, &s->reference_volume, &s->channel_map);
1412 pa_cvolume_mute(&s->real_volume, s->channel_map.channels);
1414 /* First let's determine the new maximum volume of all outputs
1415 * connected to this source */
1416 get_maximum_output_volume(s, &s->real_volume, &s->channel_map);
1417 update_real_volume(s, &s->real_volume, &s->channel_map);
1419 /* Then, let's update the real ratios/soft volumes of all outputs
1420 * connected to this source */
1421 compute_real_ratios(s);
1424 /* Called from main thread. Only called for the root source in shared volume
1425 * cases, except for internal recursive calls. */
1426 static void propagate_reference_volume(pa_source *s) {
1427 pa_source_output *o;
1430 pa_source_assert_ref(s);
1431 pa_assert_ctl_context();
1432 pa_assert(PA_SOURCE_IS_LINKED(s->state));
1433 pa_assert(pa_source_flat_volume_enabled(s));
1435 /* This is called whenever the source volume changes that is not
1436 * caused by a source output volume change. We need to fix up the
1437 * source output volumes accordingly */
1439 PA_IDXSET_FOREACH(o, s->outputs, idx) {
1440 pa_cvolume old_volume;
1442 if (o->destination_source && (o->destination_source->flags & PA_SOURCE_SHARE_VOLUME_WITH_MASTER)) {
1443 propagate_reference_volume(o->destination_source);
1445 /* Since the origin source uses volume sharing, this output's volume
1446 * needs to be updated to match the root source's real volume, but
1447 * that will be done later in update_shared_real_volume(). */
1451 old_volume = o->volume;
1453 /* This basically calculates:
1455 * o->volume := o->reference_volume * o->reference_ratio */
1457 o->volume = s->reference_volume;
1458 pa_cvolume_remap(&o->volume, &s->channel_map, &o->channel_map);
1459 pa_sw_cvolume_multiply(&o->volume, &o->volume, &o->reference_ratio);
1461 /* The volume changed, let's tell people so */
1462 if (!pa_cvolume_equal(&old_volume, &o->volume)) {
1464 if (o->volume_changed)
1465 o->volume_changed(o);
1467 pa_subscription_post(o->core, PA_SUBSCRIPTION_EVENT_SOURCE_OUTPUT|PA_SUBSCRIPTION_EVENT_CHANGE, o->index);
1472 /* Called from main thread. Only called for the root source in volume sharing
1473 * cases, except for internal recursive calls. The return value indicates
1474 * whether any reference volume actually changed. */
1475 static pa_bool_t update_reference_volume(pa_source *s, const pa_cvolume *v, const pa_channel_map *channel_map, pa_bool_t save) {
1477 pa_bool_t reference_volume_changed;
1478 pa_source_output *o;
1481 pa_source_assert_ref(s);
1482 pa_assert(PA_SOURCE_IS_LINKED(s->state));
1484 pa_assert(channel_map);
1485 pa_assert(pa_cvolume_valid(v));
1488 pa_cvolume_remap(&volume, channel_map, &s->channel_map);
1490 reference_volume_changed = !pa_cvolume_equal(&volume, &s->reference_volume);
1491 s->reference_volume = volume;
1493 s->save_volume = (!reference_volume_changed && s->save_volume) || save;
1495 if (reference_volume_changed)
1496 pa_subscription_post(s->core, PA_SUBSCRIPTION_EVENT_SOURCE|PA_SUBSCRIPTION_EVENT_CHANGE, s->index);
1497 else if (!(s->flags & PA_SOURCE_SHARE_VOLUME_WITH_MASTER))
1498 /* If the root source's volume doesn't change, then there can't be any
1499 * changes in the other source in the source tree either.
1501 * It's probably theoretically possible that even if the root source's
1502 * volume changes slightly, some filter source doesn't change its volume
1503 * due to rounding errors. If that happens, we still want to propagate
1504 * the changed root source volume to the sources connected to the
1505 * intermediate source that didn't change its volume. This theoretical
1506 * possibility is the reason why we have that !(s->flags &
1507 * PA_SOURCE_SHARE_VOLUME_WITH_MASTER) condition. Probably nobody would
1508 * notice even if we returned here FALSE always if
1509 * reference_volume_changed is FALSE. */
1512 PA_IDXSET_FOREACH(o, s->outputs, idx) {
1513 if (o->destination_source && (o->destination_source->flags & PA_SOURCE_SHARE_VOLUME_WITH_MASTER))
1514 update_reference_volume(o->destination_source, v, channel_map, FALSE);
1520 /* Called from main thread */
1521 void pa_source_set_volume(
1523 const pa_cvolume *volume,
1527 pa_cvolume new_reference_volume;
1528 pa_source *root_source;
1530 pa_source_assert_ref(s);
1531 pa_assert_ctl_context();
1532 pa_assert(PA_SOURCE_IS_LINKED(s->state));
1533 pa_assert(!volume || pa_cvolume_valid(volume));
1534 pa_assert(volume || pa_source_flat_volume_enabled(s));
1535 pa_assert(!volume || volume->channels == 1 || pa_cvolume_compatible(volume, &s->sample_spec));
1537 /* make sure we don't change the volume in PASSTHROUGH mode ...
1538 * ... *except* if we're being invoked to reset the volume to ensure 0 dB gain */
1539 if (pa_source_is_passthrough(s) && (!volume || !pa_cvolume_is_norm(volume))) {
1540 pa_log_warn("Cannot change volume, source is monitor of a PASSTHROUGH sink");
1544 /* In case of volume sharing, the volume is set for the root source first,
1545 * from which it's then propagated to the sharing sources. */
1546 root_source = pa_source_get_master(s);
1548 if (PA_UNLIKELY(!root_source))
1551 /* As a special exception we accept mono volumes on all sources --
1552 * even on those with more complex channel maps */
1555 if (pa_cvolume_compatible(volume, &s->sample_spec))
1556 new_reference_volume = *volume;
1558 new_reference_volume = s->reference_volume;
1559 pa_cvolume_scale(&new_reference_volume, pa_cvolume_max(volume));
1562 pa_cvolume_remap(&new_reference_volume, &s->channel_map, &root_source->channel_map);
1564 if (update_reference_volume(root_source, &new_reference_volume, &root_source->channel_map, save)) {
1565 if (pa_source_flat_volume_enabled(root_source)) {
1566 /* OK, propagate this volume change back to the outputs */
1567 propagate_reference_volume(root_source);
1569 /* And now recalculate the real volume */
1570 compute_real_volume(root_source);
1572 update_real_volume(root_source, &root_source->reference_volume, &root_source->channel_map);
1576 /* If volume is NULL we synchronize the source's real and
1577 * reference volumes with the stream volumes. */
1579 pa_assert(pa_source_flat_volume_enabled(root_source));
1581 /* Ok, let's determine the new real volume */
1582 compute_real_volume(root_source);
1584 /* Let's 'push' the reference volume if necessary */
1585 pa_cvolume_merge(&new_reference_volume, &s->reference_volume, &root_source->real_volume);
1586 /* If the source and it's root don't have the same number of channels, we need to remap */
1587 if (s != root_source && !pa_channel_map_equal(&s->channel_map, &root_source->channel_map))
1588 pa_cvolume_remap(&new_reference_volume, &s->channel_map, &root_source->channel_map);
1589 update_reference_volume(root_source, &new_reference_volume, &root_source->channel_map, save);
1591 /* Now that the reference volume is updated, we can update the streams'
1592 * reference ratios. */
1593 compute_reference_ratios(root_source);
1596 if (root_source->set_volume) {
1597 /* If we have a function set_volume(), then we do not apply a
1598 * soft volume by default. However, set_volume() is free to
1599 * apply one to root_source->soft_volume */
1601 pa_cvolume_reset(&root_source->soft_volume, root_source->sample_spec.channels);
1602 if (!(root_source->flags & PA_SOURCE_DEFERRED_VOLUME))
1603 root_source->set_volume(root_source);
1606 /* If we have no function set_volume(), then the soft volume
1607 * becomes the real volume */
1608 root_source->soft_volume = root_source->real_volume;
1610 /* This tells the source that soft volume and/or real volume changed */
1612 pa_assert_se(pa_asyncmsgq_send(root_source->asyncmsgq, PA_MSGOBJECT(root_source), PA_SOURCE_MESSAGE_SET_SHARED_VOLUME, NULL, 0, NULL) == 0);
1615 /* Called from the io thread if sync volume is used, otherwise from the main thread.
1616 * Only to be called by source implementor */
1617 void pa_source_set_soft_volume(pa_source *s, const pa_cvolume *volume) {
1619 pa_source_assert_ref(s);
1620 pa_assert(!(s->flags & PA_SOURCE_SHARE_VOLUME_WITH_MASTER));
1622 if (s->flags & PA_SOURCE_DEFERRED_VOLUME)
1623 pa_source_assert_io_context(s);
1625 pa_assert_ctl_context();
1628 pa_cvolume_reset(&s->soft_volume, s->sample_spec.channels);
1630 s->soft_volume = *volume;
1632 if (PA_SOURCE_IS_LINKED(s->state) && !(s->flags & PA_SOURCE_DEFERRED_VOLUME))
1633 pa_assert_se(pa_asyncmsgq_send(s->asyncmsgq, PA_MSGOBJECT(s), PA_SOURCE_MESSAGE_SET_VOLUME, NULL, 0, NULL) == 0);
1635 s->thread_info.soft_volume = s->soft_volume;
1638 /* Called from the main thread. Only called for the root source in volume sharing
1639 * cases, except for internal recursive calls. */
1640 static void propagate_real_volume(pa_source *s, const pa_cvolume *old_real_volume) {
1641 pa_source_output *o;
1644 pa_source_assert_ref(s);
1645 pa_assert(old_real_volume);
1646 pa_assert_ctl_context();
1647 pa_assert(PA_SOURCE_IS_LINKED(s->state));
1649 /* This is called when the hardware's real volume changes due to
1650 * some external event. We copy the real volume into our
1651 * reference volume and then rebuild the stream volumes based on
1652 * i->real_ratio which should stay fixed. */
1654 if (!(s->flags & PA_SOURCE_SHARE_VOLUME_WITH_MASTER)) {
1655 if (pa_cvolume_equal(old_real_volume, &s->real_volume))
1658 /* 1. Make the real volume the reference volume */
1659 update_reference_volume(s, &s->real_volume, &s->channel_map, TRUE);
1662 if (pa_source_flat_volume_enabled(s)) {
1664 PA_IDXSET_FOREACH(o, s->outputs, idx) {
1665 pa_cvolume old_volume = o->volume;
1667 /* 2. Since the source's reference and real volumes are equal
1668 * now our ratios should be too. */
1669 o->reference_ratio = o->real_ratio;
1671 /* 3. Recalculate the new stream reference volume based on the
1672 * reference ratio and the sink's reference volume.
1674 * This basically calculates:
1676 * o->volume = s->reference_volume * o->reference_ratio
1678 * This is identical to propagate_reference_volume() */
1679 o->volume = s->reference_volume;
1680 pa_cvolume_remap(&o->volume, &s->channel_map, &o->channel_map);
1681 pa_sw_cvolume_multiply(&o->volume, &o->volume, &o->reference_ratio);
1683 /* Notify if something changed */
1684 if (!pa_cvolume_equal(&old_volume, &o->volume)) {
1686 if (o->volume_changed)
1687 o->volume_changed(o);
1689 pa_subscription_post(o->core, PA_SUBSCRIPTION_EVENT_SOURCE_OUTPUT|PA_SUBSCRIPTION_EVENT_CHANGE, o->index);
1692 if (o->destination_source && (o->destination_source->flags & PA_SOURCE_SHARE_VOLUME_WITH_MASTER))
1693 propagate_real_volume(o->destination_source, old_real_volume);
1697 /* Something got changed in the hardware. It probably makes sense
1698 * to save changed hw settings given that hw volume changes not
1699 * triggered by PA are almost certainly done by the user. */
1700 if (!(s->flags & PA_SOURCE_SHARE_VOLUME_WITH_MASTER))
1701 s->save_volume = TRUE;
1704 /* Called from io thread */
1705 void pa_source_update_volume_and_mute(pa_source *s) {
1707 pa_source_assert_io_context(s);
1709 pa_asyncmsgq_post(pa_thread_mq_get()->outq, PA_MSGOBJECT(s), PA_SOURCE_MESSAGE_UPDATE_VOLUME_AND_MUTE, NULL, 0, NULL, NULL);
1712 /* Called from main thread */
1713 const pa_cvolume *pa_source_get_volume(pa_source *s, pa_bool_t force_refresh) {
1714 pa_source_assert_ref(s);
1715 pa_assert_ctl_context();
1716 pa_assert(PA_SOURCE_IS_LINKED(s->state));
1718 if (s->refresh_volume || force_refresh) {
1719 struct pa_cvolume old_real_volume;
1721 pa_assert(!(s->flags & PA_SOURCE_SHARE_VOLUME_WITH_MASTER));
1723 old_real_volume = s->real_volume;
1725 if (!(s->flags & PA_SOURCE_DEFERRED_VOLUME) && s->get_volume)
1728 pa_assert_se(pa_asyncmsgq_send(s->asyncmsgq, PA_MSGOBJECT(s), PA_SOURCE_MESSAGE_GET_VOLUME, NULL, 0, NULL) == 0);
1730 update_real_volume(s, &s->real_volume, &s->channel_map);
1731 propagate_real_volume(s, &old_real_volume);
1734 return &s->reference_volume;
1737 /* Called from main thread. In volume sharing cases, only the root source may
1739 void pa_source_volume_changed(pa_source *s, const pa_cvolume *new_real_volume) {
1740 pa_cvolume old_real_volume;
1742 pa_source_assert_ref(s);
1743 pa_assert_ctl_context();
1744 pa_assert(PA_SOURCE_IS_LINKED(s->state));
1745 pa_assert(!(s->flags & PA_SOURCE_SHARE_VOLUME_WITH_MASTER));
1747 /* The source implementor may call this if the volume changed to make sure everyone is notified */
1749 old_real_volume = s->real_volume;
1750 update_real_volume(s, new_real_volume, &s->channel_map);
1751 propagate_real_volume(s, &old_real_volume);
1754 /* Called from main thread */
1755 void pa_source_set_mute(pa_source *s, pa_bool_t mute, pa_bool_t save) {
1756 pa_bool_t old_muted;
1758 pa_source_assert_ref(s);
1759 pa_assert_ctl_context();
1760 pa_assert(PA_SOURCE_IS_LINKED(s->state));
1762 old_muted = s->muted;
1764 s->save_muted = (old_muted == s->muted && s->save_muted) || save;
1766 if (!(s->flags & PA_SOURCE_DEFERRED_VOLUME) && s->set_mute)
1769 pa_assert_se(pa_asyncmsgq_send(s->asyncmsgq, PA_MSGOBJECT(s), PA_SOURCE_MESSAGE_SET_MUTE, NULL, 0, NULL) == 0);
1771 if (old_muted != s->muted)
1772 pa_subscription_post(s->core, PA_SUBSCRIPTION_EVENT_SOURCE|PA_SUBSCRIPTION_EVENT_CHANGE, s->index);
1775 /* Called from main thread */
1776 pa_bool_t pa_source_get_mute(pa_source *s, pa_bool_t force_refresh) {
1778 pa_source_assert_ref(s);
1779 pa_assert_ctl_context();
1780 pa_assert(PA_SOURCE_IS_LINKED(s->state));
1782 if (s->refresh_muted || force_refresh) {
1783 pa_bool_t old_muted = s->muted;
1785 if (!(s->flags & PA_SOURCE_DEFERRED_VOLUME) && s->get_mute)
1788 pa_assert_se(pa_asyncmsgq_send(s->asyncmsgq, PA_MSGOBJECT(s), PA_SOURCE_MESSAGE_GET_MUTE, NULL, 0, NULL) == 0);
1790 if (old_muted != s->muted) {
1791 s->save_muted = TRUE;
1793 pa_subscription_post(s->core, PA_SUBSCRIPTION_EVENT_SOURCE|PA_SUBSCRIPTION_EVENT_CHANGE, s->index);
1795 /* Make sure the soft mute status stays in sync */
1796 pa_assert_se(pa_asyncmsgq_send(s->asyncmsgq, PA_MSGOBJECT(s), PA_SOURCE_MESSAGE_SET_MUTE, NULL, 0, NULL) == 0);
1803 /* Called from main thread */
1804 void pa_source_mute_changed(pa_source *s, pa_bool_t new_muted) {
1805 pa_source_assert_ref(s);
1806 pa_assert_ctl_context();
1807 pa_assert(PA_SOURCE_IS_LINKED(s->state));
1809 /* The source implementor may call this if the mute state changed to make sure everyone is notified */
1811 if (s->muted == new_muted)
1814 s->muted = new_muted;
1815 s->save_muted = TRUE;
1817 pa_subscription_post(s->core, PA_SUBSCRIPTION_EVENT_SOURCE|PA_SUBSCRIPTION_EVENT_CHANGE, s->index);
1820 /* Called from main thread */
1821 pa_bool_t pa_source_update_proplist(pa_source *s, pa_update_mode_t mode, pa_proplist *p) {
1822 pa_source_assert_ref(s);
1823 pa_assert_ctl_context();
1826 pa_proplist_update(s->proplist, mode, p);
1828 if (PA_SOURCE_IS_LINKED(s->state)) {
1829 pa_hook_fire(&s->core->hooks[PA_CORE_HOOK_SOURCE_PROPLIST_CHANGED], s);
1830 pa_subscription_post(s->core, PA_SUBSCRIPTION_EVENT_SOURCE|PA_SUBSCRIPTION_EVENT_CHANGE, s->index);
1836 /* Called from main thread */
1837 /* FIXME -- this should be dropped and be merged into pa_source_update_proplist() */
1838 void pa_source_set_description(pa_source *s, const char *description) {
1840 pa_source_assert_ref(s);
1841 pa_assert_ctl_context();
1843 if (!description && !pa_proplist_contains(s->proplist, PA_PROP_DEVICE_DESCRIPTION))
1846 old = pa_proplist_gets(s->proplist, PA_PROP_DEVICE_DESCRIPTION);
1848 if (old && description && pa_streq(old, description))
1852 pa_proplist_sets(s->proplist, PA_PROP_DEVICE_DESCRIPTION, description);
1854 pa_proplist_unset(s->proplist, PA_PROP_DEVICE_DESCRIPTION);
1856 if (PA_SOURCE_IS_LINKED(s->state)) {
1857 pa_subscription_post(s->core, PA_SUBSCRIPTION_EVENT_SOURCE|PA_SUBSCRIPTION_EVENT_CHANGE, s->index);
1858 pa_hook_fire(&s->core->hooks[PA_CORE_HOOK_SOURCE_PROPLIST_CHANGED], s);
1862 /* Called from main thread */
1863 unsigned pa_source_linked_by(pa_source *s) {
1864 pa_source_assert_ref(s);
1865 pa_assert_ctl_context();
1866 pa_assert(PA_SOURCE_IS_LINKED(s->state));
1868 return pa_idxset_size(s->outputs);
1871 /* Called from main thread */
1872 unsigned pa_source_used_by(pa_source *s) {
1875 pa_source_assert_ref(s);
1876 pa_assert_ctl_context();
1877 pa_assert(PA_SOURCE_IS_LINKED(s->state));
1879 ret = pa_idxset_size(s->outputs);
1880 pa_assert(ret >= s->n_corked);
1882 return ret - s->n_corked;
1885 /* Called from main thread */
1886 unsigned pa_source_check_suspend(pa_source *s) {
1888 pa_source_output *o;
1891 pa_source_assert_ref(s);
1892 pa_assert_ctl_context();
1894 if (!PA_SOURCE_IS_LINKED(s->state))
1899 PA_IDXSET_FOREACH(o, s->outputs, idx) {
1900 pa_source_output_state_t st;
1902 st = pa_source_output_get_state(o);
1904 /* We do not assert here. It is perfectly valid for a source output to
1905 * be in the INIT state (i.e. created, marked done but not yet put)
1906 * and we should not care if it's unlinked as it won't contribute
1907 * towards our busy status.
1909 if (!PA_SOURCE_OUTPUT_IS_LINKED(st))
1912 if (st == PA_SOURCE_OUTPUT_CORKED)
1915 if (o->flags & PA_SOURCE_OUTPUT_DONT_INHIBIT_AUTO_SUSPEND)
1924 /* Called from the IO thread */
1925 static void sync_output_volumes_within_thread(pa_source *s) {
1926 pa_source_output *o;
1929 pa_source_assert_ref(s);
1930 pa_source_assert_io_context(s);
1932 PA_HASHMAP_FOREACH(o, s->thread_info.outputs, state) {
1933 if (pa_cvolume_equal(&o->thread_info.soft_volume, &o->soft_volume))
1936 o->thread_info.soft_volume = o->soft_volume;
1937 //pa_source_output_request_rewind(o, 0, TRUE, FALSE, FALSE);
1941 /* Called from the IO thread. Only called for the root source in volume sharing
1942 * cases, except for internal recursive calls. */
1943 static void set_shared_volume_within_thread(pa_source *s) {
1944 pa_source_output *o;
1947 pa_source_assert_ref(s);
1949 PA_MSGOBJECT(s)->process_msg(PA_MSGOBJECT(s), PA_SOURCE_MESSAGE_SET_VOLUME_SYNCED, NULL, 0, NULL);
1951 PA_HASHMAP_FOREACH(o, s->thread_info.outputs, state) {
1952 if (o->destination_source && (o->destination_source->flags & PA_SOURCE_SHARE_VOLUME_WITH_MASTER))
1953 set_shared_volume_within_thread(o->destination_source);
1957 /* Called from IO thread, except when it is not */
1958 int pa_source_process_msg(pa_msgobject *object, int code, void *userdata, int64_t offset, pa_memchunk *chunk) {
1959 pa_source *s = PA_SOURCE(object);
1960 pa_source_assert_ref(s);
1962 switch ((pa_source_message_t) code) {
1964 case PA_SOURCE_MESSAGE_ADD_OUTPUT: {
1965 pa_source_output *o = PA_SOURCE_OUTPUT(userdata);
1967 pa_hashmap_put(s->thread_info.outputs, PA_UINT32_TO_PTR(o->index), pa_source_output_ref(o));
1969 if (o->direct_on_input) {
1970 o->thread_info.direct_on_input = o->direct_on_input;
1971 pa_hashmap_put(o->thread_info.direct_on_input->thread_info.direct_outputs, PA_UINT32_TO_PTR(o->index), o);
1974 pa_assert(!o->thread_info.attached);
1975 o->thread_info.attached = TRUE;
1980 pa_source_output_set_state_within_thread(o, o->state);
1982 if (o->thread_info.requested_source_latency != (pa_usec_t) -1)
1983 pa_source_output_set_requested_latency_within_thread(o, o->thread_info.requested_source_latency);
1985 pa_source_output_update_max_rewind(o, s->thread_info.max_rewind);
1987 /* We don't just invalidate the requested latency here,
1988 * because if we are in a move we might need to fix up the
1989 * requested latency. */
1990 pa_source_output_set_requested_latency_within_thread(o, o->thread_info.requested_source_latency);
1992 /* In flat volume mode we need to update the volume as
1994 return object->process_msg(object, PA_SOURCE_MESSAGE_SET_SHARED_VOLUME, NULL, 0, NULL);
1997 case PA_SOURCE_MESSAGE_REMOVE_OUTPUT: {
1998 pa_source_output *o = PA_SOURCE_OUTPUT(userdata);
2000 pa_source_output_set_state_within_thread(o, o->state);
2005 pa_assert(o->thread_info.attached);
2006 o->thread_info.attached = FALSE;
2008 if (o->thread_info.direct_on_input) {
2009 pa_hashmap_remove(o->thread_info.direct_on_input->thread_info.direct_outputs, PA_UINT32_TO_PTR(o->index));
2010 o->thread_info.direct_on_input = NULL;
2013 if (pa_hashmap_remove(s->thread_info.outputs, PA_UINT32_TO_PTR(o->index)))
2014 pa_source_output_unref(o);
2016 pa_source_invalidate_requested_latency(s, TRUE);
2018 /* In flat volume mode we need to update the volume as
2020 return object->process_msg(object, PA_SOURCE_MESSAGE_SET_SHARED_VOLUME, NULL, 0, NULL);
2023 case PA_SOURCE_MESSAGE_SET_SHARED_VOLUME: {
2024 pa_source *root_source = pa_source_get_master(s);
2026 if (PA_LIKELY(root_source))
2027 set_shared_volume_within_thread(root_source);
2032 case PA_SOURCE_MESSAGE_SET_VOLUME_SYNCED:
2034 if (s->flags & PA_SOURCE_DEFERRED_VOLUME) {
2036 pa_source_volume_change_push(s);
2038 /* Fall through ... */
2040 case PA_SOURCE_MESSAGE_SET_VOLUME:
2042 if (!pa_cvolume_equal(&s->thread_info.soft_volume, &s->soft_volume)) {
2043 s->thread_info.soft_volume = s->soft_volume;
2046 /* Fall through ... */
2048 case PA_SOURCE_MESSAGE_SYNC_VOLUMES:
2049 sync_output_volumes_within_thread(s);
2052 case PA_SOURCE_MESSAGE_GET_VOLUME:
2054 if ((s->flags & PA_SOURCE_DEFERRED_VOLUME) && s->get_volume) {
2056 pa_source_volume_change_flush(s);
2057 pa_sw_cvolume_divide(&s->thread_info.current_hw_volume, &s->real_volume, &s->soft_volume);
2060 /* In case source implementor reset SW volume. */
2061 if (!pa_cvolume_equal(&s->thread_info.soft_volume, &s->soft_volume)) {
2062 s->thread_info.soft_volume = s->soft_volume;
2067 case PA_SOURCE_MESSAGE_SET_MUTE:
2069 if (s->thread_info.soft_muted != s->muted) {
2070 s->thread_info.soft_muted = s->muted;
2073 if (s->flags & PA_SOURCE_DEFERRED_VOLUME && s->set_mute)
2078 case PA_SOURCE_MESSAGE_GET_MUTE:
2080 if (s->flags & PA_SOURCE_DEFERRED_VOLUME && s->get_mute)
2085 case PA_SOURCE_MESSAGE_SET_STATE: {
2087 pa_bool_t suspend_change =
2088 (s->thread_info.state == PA_SOURCE_SUSPENDED && PA_SOURCE_IS_OPENED(PA_PTR_TO_UINT(userdata))) ||
2089 (PA_SOURCE_IS_OPENED(s->thread_info.state) && PA_PTR_TO_UINT(userdata) == PA_SOURCE_SUSPENDED);
2091 s->thread_info.state = PA_PTR_TO_UINT(userdata);
2093 if (suspend_change) {
2094 pa_source_output *o;
2097 while ((o = pa_hashmap_iterate(s->thread_info.outputs, &state, NULL)))
2098 if (o->suspend_within_thread)
2099 o->suspend_within_thread(o, s->thread_info.state == PA_SOURCE_SUSPENDED);
2105 case PA_SOURCE_MESSAGE_DETACH:
2107 /* Detach all streams */
2108 pa_source_detach_within_thread(s);
2111 case PA_SOURCE_MESSAGE_ATTACH:
2113 /* Reattach all streams */
2114 pa_source_attach_within_thread(s);
2117 case PA_SOURCE_MESSAGE_GET_REQUESTED_LATENCY: {
2119 pa_usec_t *usec = userdata;
2120 *usec = pa_source_get_requested_latency_within_thread(s);
2122 /* Yes, that's right, the IO thread will see -1 when no
2123 * explicit requested latency is configured, the main
2124 * thread will see max_latency */
2125 if (*usec == (pa_usec_t) -1)
2126 *usec = s->thread_info.max_latency;
2131 case PA_SOURCE_MESSAGE_SET_LATENCY_RANGE: {
2132 pa_usec_t *r = userdata;
2134 pa_source_set_latency_range_within_thread(s, r[0], r[1]);
2139 case PA_SOURCE_MESSAGE_GET_LATENCY_RANGE: {
2140 pa_usec_t *r = userdata;
2142 r[0] = s->thread_info.min_latency;
2143 r[1] = s->thread_info.max_latency;
2148 case PA_SOURCE_MESSAGE_GET_FIXED_LATENCY:
2150 *((pa_usec_t*) userdata) = s->thread_info.fixed_latency;
2153 case PA_SOURCE_MESSAGE_SET_FIXED_LATENCY:
2155 pa_source_set_fixed_latency_within_thread(s, (pa_usec_t) offset);
2158 case PA_SOURCE_MESSAGE_GET_MAX_REWIND:
2160 *((size_t*) userdata) = s->thread_info.max_rewind;
2163 case PA_SOURCE_MESSAGE_SET_MAX_REWIND:
2165 pa_source_set_max_rewind_within_thread(s, (size_t) offset);
2168 case PA_SOURCE_MESSAGE_GET_LATENCY:
2170 if (s->monitor_of) {
2171 *((pa_usec_t*) userdata) = 0;
2175 /* Implementors need to overwrite this implementation! */
2178 case PA_SOURCE_MESSAGE_SET_PORT:
2180 pa_assert(userdata);
2182 struct source_message_set_port *msg_data = userdata;
2183 msg_data->ret = s->set_port(s, msg_data->port);
2187 case PA_SOURCE_MESSAGE_UPDATE_VOLUME_AND_MUTE:
2188 /* This message is sent from IO-thread and handled in main thread. */
2189 pa_assert_ctl_context();
2191 /* Make sure we're not messing with main thread when no longer linked */
2192 if (!PA_SOURCE_IS_LINKED(s->state))
2195 pa_source_get_volume(s, TRUE);
2196 pa_source_get_mute(s, TRUE);
2199 case PA_SOURCE_MESSAGE_SET_LATENCY_OFFSET:
2200 s->thread_info.latency_offset = offset;
2203 case PA_SOURCE_MESSAGE_MAX:
2210 /* Called from main thread */
2211 int pa_source_suspend_all(pa_core *c, pa_bool_t suspend, pa_suspend_cause_t cause) {
2216 pa_core_assert_ref(c);
2217 pa_assert_ctl_context();
2218 pa_assert(cause != 0);
2220 for (source = PA_SOURCE(pa_idxset_first(c->sources, &idx)); source; source = PA_SOURCE(pa_idxset_next(c->sources, &idx))) {
2223 if (source->monitor_of)
2226 if ((r = pa_source_suspend(source, suspend, cause)) < 0)
2233 /* Called from main thread */
2234 void pa_source_detach(pa_source *s) {
2235 pa_source_assert_ref(s);
2236 pa_assert_ctl_context();
2237 pa_assert(PA_SOURCE_IS_LINKED(s->state));
2239 pa_assert_se(pa_asyncmsgq_send(s->asyncmsgq, PA_MSGOBJECT(s), PA_SOURCE_MESSAGE_DETACH, NULL, 0, NULL) == 0);
2242 /* Called from main thread */
2243 void pa_source_attach(pa_source *s) {
2244 pa_source_assert_ref(s);
2245 pa_assert_ctl_context();
2246 pa_assert(PA_SOURCE_IS_LINKED(s->state));
2248 pa_assert_se(pa_asyncmsgq_send(s->asyncmsgq, PA_MSGOBJECT(s), PA_SOURCE_MESSAGE_ATTACH, NULL, 0, NULL) == 0);
2251 /* Called from IO thread */
2252 void pa_source_detach_within_thread(pa_source *s) {
2253 pa_source_output *o;
2256 pa_source_assert_ref(s);
2257 pa_source_assert_io_context(s);
2258 pa_assert(PA_SOURCE_IS_LINKED(s->thread_info.state));
2260 PA_HASHMAP_FOREACH(o, s->thread_info.outputs, state)
2265 /* Called from IO thread */
2266 void pa_source_attach_within_thread(pa_source *s) {
2267 pa_source_output *o;
2270 pa_source_assert_ref(s);
2271 pa_source_assert_io_context(s);
2272 pa_assert(PA_SOURCE_IS_LINKED(s->thread_info.state));
2274 PA_HASHMAP_FOREACH(o, s->thread_info.outputs, state)
2279 /* Called from IO thread */
2280 pa_usec_t pa_source_get_requested_latency_within_thread(pa_source *s) {
2281 pa_usec_t result = (pa_usec_t) -1;
2282 pa_source_output *o;
2285 pa_source_assert_ref(s);
2286 pa_source_assert_io_context(s);
2288 if (!(s->flags & PA_SOURCE_DYNAMIC_LATENCY))
2289 return PA_CLAMP(s->thread_info.fixed_latency, s->thread_info.min_latency, s->thread_info.max_latency);
2291 if (s->thread_info.requested_latency_valid)
2292 return s->thread_info.requested_latency;
2294 PA_HASHMAP_FOREACH(o, s->thread_info.outputs, state)
2295 if (o->thread_info.requested_source_latency != (pa_usec_t) -1 &&
2296 (result == (pa_usec_t) -1 || result > o->thread_info.requested_source_latency))
2297 result = o->thread_info.requested_source_latency;
2299 if (result != (pa_usec_t) -1)
2300 result = PA_CLAMP(result, s->thread_info.min_latency, s->thread_info.max_latency);
2302 if (PA_SOURCE_IS_LINKED(s->thread_info.state)) {
2303 /* Only cache this if we are fully set up */
2304 s->thread_info.requested_latency = result;
2305 s->thread_info.requested_latency_valid = TRUE;
2311 /* Called from main thread */
2312 pa_usec_t pa_source_get_requested_latency(pa_source *s) {
2315 pa_source_assert_ref(s);
2316 pa_assert_ctl_context();
2317 pa_assert(PA_SOURCE_IS_LINKED(s->state));
2319 if (s->state == PA_SOURCE_SUSPENDED)
2322 pa_assert_se(pa_asyncmsgq_send(s->asyncmsgq, PA_MSGOBJECT(s), PA_SOURCE_MESSAGE_GET_REQUESTED_LATENCY, &usec, 0, NULL) == 0);
2327 /* Called from IO thread */
2328 void pa_source_set_max_rewind_within_thread(pa_source *s, size_t max_rewind) {
2329 pa_source_output *o;
2332 pa_source_assert_ref(s);
2333 pa_source_assert_io_context(s);
2335 if (max_rewind == s->thread_info.max_rewind)
2338 s->thread_info.max_rewind = max_rewind;
2340 if (PA_SOURCE_IS_LINKED(s->thread_info.state))
2341 PA_HASHMAP_FOREACH(o, s->thread_info.outputs, state)
2342 pa_source_output_update_max_rewind(o, s->thread_info.max_rewind);
2345 /* Called from main thread */
2346 void pa_source_set_max_rewind(pa_source *s, size_t max_rewind) {
2347 pa_source_assert_ref(s);
2348 pa_assert_ctl_context();
2350 if (PA_SOURCE_IS_LINKED(s->state))
2351 pa_assert_se(pa_asyncmsgq_send(s->asyncmsgq, PA_MSGOBJECT(s), PA_SOURCE_MESSAGE_SET_MAX_REWIND, NULL, max_rewind, NULL) == 0);
2353 pa_source_set_max_rewind_within_thread(s, max_rewind);
2356 /* Called from IO thread */
2357 void pa_source_invalidate_requested_latency(pa_source *s, pa_bool_t dynamic) {
2358 pa_source_output *o;
2361 pa_source_assert_ref(s);
2362 pa_source_assert_io_context(s);
2364 if ((s->flags & PA_SOURCE_DYNAMIC_LATENCY))
2365 s->thread_info.requested_latency_valid = FALSE;
2369 if (PA_SOURCE_IS_LINKED(s->thread_info.state)) {
2371 if (s->update_requested_latency)
2372 s->update_requested_latency(s);
2374 while ((o = pa_hashmap_iterate(s->thread_info.outputs, &state, NULL)))
2375 if (o->update_source_requested_latency)
2376 o->update_source_requested_latency(o);
2380 pa_sink_invalidate_requested_latency(s->monitor_of, dynamic);
2383 /* Called from main thread */
2384 void pa_source_set_latency_range(pa_source *s, pa_usec_t min_latency, pa_usec_t max_latency) {
2385 pa_source_assert_ref(s);
2386 pa_assert_ctl_context();
2388 /* min_latency == 0: no limit
2389 * min_latency anything else: specified limit
2391 * Similar for max_latency */
2393 if (min_latency < ABSOLUTE_MIN_LATENCY)
2394 min_latency = ABSOLUTE_MIN_LATENCY;
2396 if (max_latency <= 0 ||
2397 max_latency > ABSOLUTE_MAX_LATENCY)
2398 max_latency = ABSOLUTE_MAX_LATENCY;
2400 pa_assert(min_latency <= max_latency);
2402 /* Hmm, let's see if someone forgot to set PA_SOURCE_DYNAMIC_LATENCY here... */
2403 pa_assert((min_latency == ABSOLUTE_MIN_LATENCY &&
2404 max_latency == ABSOLUTE_MAX_LATENCY) ||
2405 (s->flags & PA_SOURCE_DYNAMIC_LATENCY));
2407 if (PA_SOURCE_IS_LINKED(s->state)) {
2413 pa_assert_se(pa_asyncmsgq_send(s->asyncmsgq, PA_MSGOBJECT(s), PA_SOURCE_MESSAGE_SET_LATENCY_RANGE, r, 0, NULL) == 0);
2415 pa_source_set_latency_range_within_thread(s, min_latency, max_latency);
2418 /* Called from main thread */
2419 void pa_source_get_latency_range(pa_source *s, pa_usec_t *min_latency, pa_usec_t *max_latency) {
2420 pa_source_assert_ref(s);
2421 pa_assert_ctl_context();
2422 pa_assert(min_latency);
2423 pa_assert(max_latency);
2425 if (PA_SOURCE_IS_LINKED(s->state)) {
2426 pa_usec_t r[2] = { 0, 0 };
2428 pa_assert_se(pa_asyncmsgq_send(s->asyncmsgq, PA_MSGOBJECT(s), PA_SOURCE_MESSAGE_GET_LATENCY_RANGE, r, 0, NULL) == 0);
2430 *min_latency = r[0];
2431 *max_latency = r[1];
2433 *min_latency = s->thread_info.min_latency;
2434 *max_latency = s->thread_info.max_latency;
2438 /* Called from IO thread, and from main thread before pa_source_put() is called */
2439 void pa_source_set_latency_range_within_thread(pa_source *s, pa_usec_t min_latency, pa_usec_t max_latency) {
2440 pa_source_assert_ref(s);
2441 pa_source_assert_io_context(s);
2443 pa_assert(min_latency >= ABSOLUTE_MIN_LATENCY);
2444 pa_assert(max_latency <= ABSOLUTE_MAX_LATENCY);
2445 pa_assert(min_latency <= max_latency);
2447 /* Hmm, let's see if someone forgot to set PA_SOURCE_DYNAMIC_LATENCY here... */
2448 pa_assert((min_latency == ABSOLUTE_MIN_LATENCY &&
2449 max_latency == ABSOLUTE_MAX_LATENCY) ||
2450 (s->flags & PA_SOURCE_DYNAMIC_LATENCY) ||
2453 if (s->thread_info.min_latency == min_latency &&
2454 s->thread_info.max_latency == max_latency)
2457 s->thread_info.min_latency = min_latency;
2458 s->thread_info.max_latency = max_latency;
2460 if (PA_SOURCE_IS_LINKED(s->thread_info.state)) {
2461 pa_source_output *o;
2464 PA_HASHMAP_FOREACH(o, s->thread_info.outputs, state)
2465 if (o->update_source_latency_range)
2466 o->update_source_latency_range(o);
2469 pa_source_invalidate_requested_latency(s, FALSE);
2472 /* Called from main thread, before the source is put */
2473 void pa_source_set_fixed_latency(pa_source *s, pa_usec_t latency) {
2474 pa_source_assert_ref(s);
2475 pa_assert_ctl_context();
2477 if (s->flags & PA_SOURCE_DYNAMIC_LATENCY) {
2478 pa_assert(latency == 0);
2482 if (latency < ABSOLUTE_MIN_LATENCY)
2483 latency = ABSOLUTE_MIN_LATENCY;
2485 if (latency > ABSOLUTE_MAX_LATENCY)
2486 latency = ABSOLUTE_MAX_LATENCY;
2488 if (PA_SOURCE_IS_LINKED(s->state))
2489 pa_assert_se(pa_asyncmsgq_send(s->asyncmsgq, PA_MSGOBJECT(s), PA_SOURCE_MESSAGE_SET_FIXED_LATENCY, NULL, (int64_t) latency, NULL) == 0);
2491 s->thread_info.fixed_latency = latency;
2494 /* Called from main thread */
2495 pa_usec_t pa_source_get_fixed_latency(pa_source *s) {
2498 pa_source_assert_ref(s);
2499 pa_assert_ctl_context();
2501 if (s->flags & PA_SOURCE_DYNAMIC_LATENCY)
2504 if (PA_SOURCE_IS_LINKED(s->state))
2505 pa_assert_se(pa_asyncmsgq_send(s->asyncmsgq, PA_MSGOBJECT(s), PA_SOURCE_MESSAGE_GET_FIXED_LATENCY, &latency, 0, NULL) == 0);
2507 latency = s->thread_info.fixed_latency;
2512 /* Called from IO thread */
2513 void pa_source_set_fixed_latency_within_thread(pa_source *s, pa_usec_t latency) {
2514 pa_source_assert_ref(s);
2515 pa_source_assert_io_context(s);
2517 if (s->flags & PA_SOURCE_DYNAMIC_LATENCY) {
2518 pa_assert(latency == 0);
2522 pa_assert(latency >= ABSOLUTE_MIN_LATENCY);
2523 pa_assert(latency <= ABSOLUTE_MAX_LATENCY);
2525 if (s->thread_info.fixed_latency == latency)
2528 s->thread_info.fixed_latency = latency;
2530 if (PA_SOURCE_IS_LINKED(s->thread_info.state)) {
2531 pa_source_output *o;
2534 PA_HASHMAP_FOREACH(o, s->thread_info.outputs, state)
2535 if (o->update_source_fixed_latency)
2536 o->update_source_fixed_latency(o);
2539 pa_source_invalidate_requested_latency(s, FALSE);
2542 /* Called from main thread */
2543 void pa_source_set_latency_offset(pa_source *s, int64_t offset) {
2544 pa_source_assert_ref(s);
2546 s->latency_offset = offset;
2548 if (PA_SOURCE_IS_LINKED(s->state))
2549 pa_assert_se(pa_asyncmsgq_send(s->asyncmsgq, PA_MSGOBJECT(s), PA_SOURCE_MESSAGE_SET_LATENCY_OFFSET, NULL, offset, NULL) == 0);
2551 s->thread_info.latency_offset = offset;
2554 /* Called from main thread */
2555 size_t pa_source_get_max_rewind(pa_source *s) {
2557 pa_assert_ctl_context();
2558 pa_source_assert_ref(s);
2560 if (!PA_SOURCE_IS_LINKED(s->state))
2561 return s->thread_info.max_rewind;
2563 pa_assert_se(pa_asyncmsgq_send(s->asyncmsgq, PA_MSGOBJECT(s), PA_SOURCE_MESSAGE_GET_MAX_REWIND, &r, 0, NULL) == 0);
2568 /* Called from main context */
2569 int pa_source_set_port(pa_source *s, const char *name, pa_bool_t save) {
2570 pa_device_port *port;
2573 pa_source_assert_ref(s);
2574 pa_assert_ctl_context();
2577 pa_log_debug("set_port() operation not implemented for source %u \"%s\"", s->index, s->name);
2578 return -PA_ERR_NOTIMPLEMENTED;
2582 return -PA_ERR_NOENTITY;
2584 if (!(port = pa_hashmap_get(s->ports, name)))
2585 return -PA_ERR_NOENTITY;
2587 if (s->active_port == port) {
2588 s->save_port = s->save_port || save;
2592 if (s->flags & PA_SOURCE_DEFERRED_VOLUME) {
2593 struct source_message_set_port msg = { .port = port, .ret = 0 };
2594 pa_assert_se(pa_asyncmsgq_send(s->asyncmsgq, PA_MSGOBJECT(s), PA_SOURCE_MESSAGE_SET_PORT, &msg, 0, NULL) == 0);
2598 ret = s->set_port(s, port);
2601 return -PA_ERR_NOENTITY;
2603 pa_subscription_post(s->core, PA_SUBSCRIPTION_EVENT_SOURCE|PA_SUBSCRIPTION_EVENT_CHANGE, s->index);
2605 pa_log_info("Changed port of source %u \"%s\" to %s", s->index, s->name, port->name);
2607 s->active_port = port;
2608 s->save_port = save;
2610 pa_hook_fire(&s->core->hooks[PA_CORE_HOOK_SOURCE_PORT_CHANGED], s);
2615 PA_STATIC_FLIST_DECLARE(pa_source_volume_change, 0, pa_xfree);
2617 /* Called from the IO thread. */
2618 static pa_source_volume_change *pa_source_volume_change_new(pa_source *s) {
2619 pa_source_volume_change *c;
2620 if (!(c = pa_flist_pop(PA_STATIC_FLIST_GET(pa_source_volume_change))))
2621 c = pa_xnew(pa_source_volume_change, 1);
2623 PA_LLIST_INIT(pa_source_volume_change, c);
2625 pa_cvolume_reset(&c->hw_volume, s->sample_spec.channels);
2629 /* Called from the IO thread. */
2630 static void pa_source_volume_change_free(pa_source_volume_change *c) {
2632 if (pa_flist_push(PA_STATIC_FLIST_GET(pa_source_volume_change), c) < 0)
2636 /* Called from the IO thread. */
2637 void pa_source_volume_change_push(pa_source *s) {
2638 pa_source_volume_change *c = NULL;
2639 pa_source_volume_change *nc = NULL;
2640 uint32_t safety_margin = s->thread_info.volume_change_safety_margin;
2642 const char *direction = NULL;
2645 nc = pa_source_volume_change_new(s);
2647 /* NOTE: There is already more different volumes in pa_source that I can remember.
2648 * Adding one more volume for HW would get us rid of this, but I am trying
2649 * to survive with the ones we already have. */
2650 pa_sw_cvolume_divide(&nc->hw_volume, &s->real_volume, &s->soft_volume);
2652 if (!s->thread_info.volume_changes && pa_cvolume_equal(&nc->hw_volume, &s->thread_info.current_hw_volume)) {
2653 pa_log_debug("Volume not changing");
2654 pa_source_volume_change_free(nc);
2658 nc->at = pa_source_get_latency_within_thread(s);
2659 nc->at += pa_rtclock_now() + s->thread_info.volume_change_extra_delay;
2661 if (s->thread_info.volume_changes_tail) {
2662 for (c = s->thread_info.volume_changes_tail; c; c = c->prev) {
2663 /* If volume is going up let's do it a bit late. If it is going
2664 * down let's do it a bit early. */
2665 if (pa_cvolume_avg(&nc->hw_volume) > pa_cvolume_avg(&c->hw_volume)) {
2666 if (nc->at + safety_margin > c->at) {
2667 nc->at += safety_margin;
2672 else if (nc->at - safety_margin > c->at) {
2673 nc->at -= safety_margin;
2681 if (pa_cvolume_avg(&nc->hw_volume) > pa_cvolume_avg(&s->thread_info.current_hw_volume)) {
2682 nc->at += safety_margin;
2685 nc->at -= safety_margin;
2688 PA_LLIST_PREPEND(pa_source_volume_change, s->thread_info.volume_changes, nc);
2691 PA_LLIST_INSERT_AFTER(pa_source_volume_change, s->thread_info.volume_changes, c, nc);
2694 pa_log_debug("Volume going %s to %d at %llu", direction, pa_cvolume_avg(&nc->hw_volume), (long long unsigned) nc->at);
2696 /* We can ignore volume events that came earlier but should happen later than this. */
2697 PA_LLIST_FOREACH(c, nc->next) {
2698 pa_log_debug("Volume change to %d at %llu was dropped", pa_cvolume_avg(&c->hw_volume), (long long unsigned) c->at);
2699 pa_source_volume_change_free(c);
2702 s->thread_info.volume_changes_tail = nc;
2705 /* Called from the IO thread. */
2706 static void pa_source_volume_change_flush(pa_source *s) {
2707 pa_source_volume_change *c = s->thread_info.volume_changes;
2709 s->thread_info.volume_changes = NULL;
2710 s->thread_info.volume_changes_tail = NULL;
2712 pa_source_volume_change *next = c->next;
2713 pa_source_volume_change_free(c);
2718 /* Called from the IO thread. */
2719 pa_bool_t pa_source_volume_change_apply(pa_source *s, pa_usec_t *usec_to_next) {
2721 pa_bool_t ret = FALSE;
2725 if (!s->thread_info.volume_changes || !PA_SOURCE_IS_LINKED(s->state)) {
2731 pa_assert(s->write_volume);
2733 now = pa_rtclock_now();
2735 while (s->thread_info.volume_changes && now >= s->thread_info.volume_changes->at) {
2736 pa_source_volume_change *c = s->thread_info.volume_changes;
2737 PA_LLIST_REMOVE(pa_source_volume_change, s->thread_info.volume_changes, c);
2738 pa_log_debug("Volume change to %d at %llu was written %llu usec late",
2739 pa_cvolume_avg(&c->hw_volume), (long long unsigned) c->at, (long long unsigned) (now - c->at));
2741 s->thread_info.current_hw_volume = c->hw_volume;
2742 pa_source_volume_change_free(c);
2748 if (s->thread_info.volume_changes) {
2750 *usec_to_next = s->thread_info.volume_changes->at - now;
2751 if (pa_log_ratelimit(PA_LOG_DEBUG))
2752 pa_log_debug("Next volume change in %lld usec", (long long) (s->thread_info.volume_changes->at - now));
2757 s->thread_info.volume_changes_tail = NULL;
2763 /* Called from the main thread */
2764 /* Gets the list of formats supported by the source. The members and idxset must
2765 * be freed by the caller. */
2766 pa_idxset* pa_source_get_formats(pa_source *s) {
2771 if (s->get_formats) {
2772 /* Source supports format query, all is good */
2773 ret = s->get_formats(s);
2775 /* Source doesn't support format query, so assume it does PCM */
2776 pa_format_info *f = pa_format_info_new();
2777 f->encoding = PA_ENCODING_PCM;
2779 ret = pa_idxset_new(NULL, NULL);
2780 pa_idxset_put(ret, f, NULL);
2786 /* Called from the main thread */
2787 /* Checks if the source can accept this format */
2788 pa_bool_t pa_source_check_format(pa_source *s, pa_format_info *f)
2790 pa_idxset *formats = NULL;
2791 pa_bool_t ret = FALSE;
2796 formats = pa_source_get_formats(s);
2799 pa_format_info *finfo_device;
2802 PA_IDXSET_FOREACH(finfo_device, formats, i) {
2803 if (pa_format_info_is_compatible(finfo_device, f)) {
2809 pa_idxset_free(formats, (pa_free2_cb_t) pa_format_info_free2, NULL);
2815 /* Called from the main thread */
2816 /* Calculates the intersection between formats supported by the source and
2817 * in_formats, and returns these, in the order of the source's formats. */
2818 pa_idxset* pa_source_check_formats(pa_source *s, pa_idxset *in_formats) {
2819 pa_idxset *out_formats = pa_idxset_new(NULL, NULL), *source_formats = NULL;
2820 pa_format_info *f_source, *f_in;
2825 if (!in_formats || pa_idxset_isempty(in_formats))
2828 source_formats = pa_source_get_formats(s);
2830 PA_IDXSET_FOREACH(f_source, source_formats, i) {
2831 PA_IDXSET_FOREACH(f_in, in_formats, j) {
2832 if (pa_format_info_is_compatible(f_source, f_in))
2833 pa_idxset_put(out_formats, pa_format_info_copy(f_in), NULL);
2839 pa_idxset_free(source_formats, (pa_free2_cb_t) pa_format_info_free2, NULL);