1 // SPDX-License-Identifier: GPL-2.0+
3 * Generic bounce buffer implementation
5 * Copyright (C) 2012 Marek Vasut <marex@denx.de>
12 #include <bouncebuf.h>
13 #include <asm/cache.h>
15 static int addr_aligned(struct bounce_buffer *state)
17 const ulong align_mask = ARCH_DMA_MINALIGN - 1;
19 /* Check if start is aligned */
20 if ((ulong)state->user_buffer & align_mask) {
21 debug("Unaligned buffer address %p\n", state->user_buffer);
25 /* Check if length is aligned */
26 if (state->len != state->len_aligned) {
27 debug("Unaligned buffer length %zu\n", state->len);
35 int bounce_buffer_start_extalign(struct bounce_buffer *state, void *data,
36 size_t len, unsigned int flags,
38 int (*addr_is_aligned)(struct bounce_buffer *state))
40 state->user_buffer = data;
41 state->bounce_buffer = data;
43 state->len_aligned = roundup(len, alignment);
46 if (!addr_is_aligned(state)) {
47 state->bounce_buffer = memalign(alignment,
49 if (!state->bounce_buffer)
52 if (state->flags & GEN_BB_READ)
53 memcpy(state->bounce_buffer, state->user_buffer,
58 * Flush data to RAM so DMA reads can pick it up,
59 * and any CPU writebacks don't race with DMA writes
61 flush_dcache_range((unsigned long)state->bounce_buffer,
62 (unsigned long)(state->bounce_buffer) +
68 int bounce_buffer_start(struct bounce_buffer *state, void *data,
69 size_t len, unsigned int flags)
71 return bounce_buffer_start_extalign(state, data, len, flags,
76 int bounce_buffer_stop(struct bounce_buffer *state)
78 if (state->flags & GEN_BB_WRITE) {
79 /* Invalidate cache so that CPU can see any newly DMA'd data */
80 invalidate_dcache_range((unsigned long)state->bounce_buffer,
81 (unsigned long)(state->bounce_buffer) +
85 if (state->bounce_buffer == state->user_buffer)
88 if (state->flags & GEN_BB_WRITE)
89 memcpy(state->user_buffer, state->bounce_buffer, state->len);
91 free(state->bounce_buffer);