1 // SPDX-License-Identifier: GPL-2.0+
3 * Generic bounce buffer implementation
5 * Copyright (C) 2012 Marek Vasut <marex@denx.de>
13 #include <bouncebuf.h>
14 #include <asm/cache.h>
16 static int addr_aligned(struct bounce_buffer *state)
18 const ulong align_mask = ARCH_DMA_MINALIGN - 1;
20 /* Check if start is aligned */
21 if ((ulong)state->user_buffer & align_mask) {
22 debug("Unaligned buffer address %p\n", state->user_buffer);
26 /* Check if length is aligned */
27 if (state->len != state->len_aligned) {
28 debug("Unaligned buffer length %zu\n", state->len);
36 int bounce_buffer_start_extalign(struct bounce_buffer *state, void *data,
37 size_t len, unsigned int flags,
39 int (*addr_is_aligned)(struct bounce_buffer *state))
41 state->user_buffer = data;
42 state->bounce_buffer = data;
44 state->len_aligned = roundup(len, alignment);
47 if (!addr_is_aligned(state)) {
48 state->bounce_buffer = memalign(alignment,
50 if (!state->bounce_buffer)
53 if (state->flags & GEN_BB_READ)
54 memcpy(state->bounce_buffer, state->user_buffer,
59 * Flush data to RAM so DMA reads can pick it up,
60 * and any CPU writebacks don't race with DMA writes
62 flush_dcache_range((unsigned long)state->bounce_buffer,
63 (unsigned long)(state->bounce_buffer) +
69 int bounce_buffer_start(struct bounce_buffer *state, void *data,
70 size_t len, unsigned int flags)
72 return bounce_buffer_start_extalign(state, data, len, flags,
77 int bounce_buffer_stop(struct bounce_buffer *state)
79 if (state->flags & GEN_BB_WRITE) {
80 /* Invalidate cache so that CPU can see any newly DMA'd data */
81 invalidate_dcache_range((unsigned long)state->bounce_buffer,
82 (unsigned long)(state->bounce_buffer) +
86 if (state->bounce_buffer == state->user_buffer)
89 if (state->flags & GEN_BB_WRITE)
90 memcpy(state->user_buffer, state->bounce_buffer, state->len);
92 free(state->bounce_buffer);