1 // SPDX-License-Identifier: GPL-2.0+
3 * Generic bounce buffer implementation
5 * Copyright (C) 2012 Marek Vasut <marex@denx.de>
12 #include <bouncebuf.h>
14 static int addr_aligned(struct bounce_buffer *state)
16 const ulong align_mask = ARCH_DMA_MINALIGN - 1;
18 /* Check if start is aligned */
19 if ((ulong)state->user_buffer & align_mask) {
20 debug("Unaligned buffer address %p\n", state->user_buffer);
24 /* Check if length is aligned */
25 if (state->len != state->len_aligned) {
26 debug("Unaligned buffer length %zu\n", state->len);
34 int bounce_buffer_start_extalign(struct bounce_buffer *state, void *data,
35 size_t len, unsigned int flags,
37 int (*addr_is_aligned)(struct bounce_buffer *state))
39 state->user_buffer = data;
40 state->bounce_buffer = data;
42 state->len_aligned = roundup(len, alignment);
45 if (!addr_is_aligned(state)) {
46 state->bounce_buffer = memalign(alignment,
48 if (!state->bounce_buffer)
51 if (state->flags & GEN_BB_READ)
52 memcpy(state->bounce_buffer, state->user_buffer,
57 * Flush data to RAM so DMA reads can pick it up,
58 * and any CPU writebacks don't race with DMA writes
60 flush_dcache_range((unsigned long)state->bounce_buffer,
61 (unsigned long)(state->bounce_buffer) +
67 int bounce_buffer_start(struct bounce_buffer *state, void *data,
68 size_t len, unsigned int flags)
70 return bounce_buffer_start_extalign(state, data, len, flags,
75 int bounce_buffer_stop(struct bounce_buffer *state)
77 if (state->flags & GEN_BB_WRITE) {
78 /* Invalidate cache so that CPU can see any newly DMA'd data */
79 invalidate_dcache_range((unsigned long)state->bounce_buffer,
80 (unsigned long)(state->bounce_buffer) +
84 if (state->bounce_buffer == state->user_buffer)
87 if (state->flags & GEN_BB_WRITE)
88 memcpy(state->user_buffer, state->bounce_buffer, state->len);
90 free(state->bounce_buffer);