if (!entry)
return;
+ /* handle vmalloc and linear addresses */
+ if (!is_vmalloc_addr(virt) && !virt_to_page(virt))
+ return;
+
entry->type = dma_debug_coherent;
entry->dev = dev;
- entry->pfn = page_to_pfn(virt_to_page(virt));
entry->offset = offset_in_page(virt);
entry->size = size;
entry->dev_addr = dma_addr;
entry->direction = DMA_BIDIRECTIONAL;
+ if (is_vmalloc_addr(virt))
+ entry->pfn = vmalloc_to_pfn(virt);
+ else
+ entry->pfn = page_to_pfn(virt_to_page(virt));
+
add_dma_entry(entry);
}
EXPORT_SYMBOL(debug_dma_alloc_coherent);
struct dma_debug_entry ref = {
.type = dma_debug_coherent,
.dev = dev,
- .pfn = page_to_pfn(virt_to_page(virt)),
.offset = offset_in_page(virt),
.dev_addr = addr,
.size = size,
.direction = DMA_BIDIRECTIONAL,
};
+ /* handle vmalloc and linear addresses */
+ if (!is_vmalloc_addr(virt) && !virt_to_page(virt))
+ return;
+
+ if (is_vmalloc_addr(virt))
+ ref.pfn = vmalloc_to_pfn(virt);
+ else
+ ref.pfn = page_to_pfn(virt_to_page(virt));
+
if (unlikely(dma_debug_disabled()))
return;