1 // SPDX-License-Identifier: GPL-2.0+
3 * Generic bounce buffer implementation
5 * Copyright (C) 2012 Marek Vasut <marex@denx.de>
13 #include <bouncebuf.h>
14 #include <asm/cache.h>
15 #include <linux/dma-mapping.h>
17 static int addr_aligned(struct bounce_buffer *state)
19 const ulong align_mask = ARCH_DMA_MINALIGN - 1;
21 /* Check if start is aligned */
22 if ((ulong)state->user_buffer & align_mask) {
23 debug("Unaligned buffer address %p\n", state->user_buffer);
27 /* Check if length is aligned */
28 if (state->len != state->len_aligned) {
29 debug("Unaligned buffer length %zu\n", state->len);
37 int bounce_buffer_start_extalign(struct bounce_buffer *state, void *data,
38 size_t len, unsigned int flags,
40 int (*addr_is_aligned)(struct bounce_buffer *state))
42 state->user_buffer = data;
43 state->bounce_buffer = data;
45 state->len_aligned = roundup(len, alignment);
48 if (!addr_is_aligned(state)) {
49 state->bounce_buffer = memalign(alignment,
51 if (!state->bounce_buffer)
54 if (state->flags & GEN_BB_READ)
55 memcpy(state->bounce_buffer, state->user_buffer,
60 * Flush data to RAM so DMA reads can pick it up,
61 * and any CPU writebacks don't race with DMA writes
63 dma_map_single(state->bounce_buffer,
70 int bounce_buffer_start(struct bounce_buffer *state, void *data,
71 size_t len, unsigned int flags)
73 return bounce_buffer_start_extalign(state, data, len, flags,
78 int bounce_buffer_stop(struct bounce_buffer *state)
80 if (state->flags & GEN_BB_WRITE) {
81 /* Invalidate cache so that CPU can see any newly DMA'd data */
82 dma_unmap_single((dma_addr_t)state->bounce_buffer,
87 if (state->bounce_buffer == state->user_buffer)
90 if (state->flags & GEN_BB_WRITE)
91 memcpy(state->user_buffer, state->bounce_buffer, state->len);
93 free(state->bounce_buffer);