@@ -1495,14 +1495,22 @@ void debug_dma_alloc_coherent(struct device *dev, size_t size,
if (!entry)
return;
+ /* handle vmalloc and linear addresses */
+ if (!is_vmalloc_addr(virt) && !virt_addr_valid(virt))
+ return;
+
entry->type = dma_debug_coherent;
entry->dev = dev;
- entry->pfn = page_to_pfn(virt_to_page(virt));
entry->offset = offset_in_page(virt);
entry->size = size;
entry->dev_addr = dma_addr;
entry->direction = DMA_BIDIRECTIONAL;
+ if (is_vmalloc_addr(virt))
+ entry->pfn = vmalloc_to_pfn(virt);
+ else
+ entry->pfn = page_to_pfn(virt_to_page(virt));
+
add_dma_entry(entry);
}
EXPORT_SYMBOL(debug_dma_alloc_coherent);
@@ -1513,13 +1521,21 @@ void debug_dma_free_coherent(struct device *dev, size_t size,
struct dma_debug_entry ref = {
.type = dma_debug_coherent,
.dev = dev,
- .pfn = page_to_pfn(virt_to_page(virt)),
.offset = offset_in_page(virt),
.dev_addr = addr,
.size = size,
.direction = DMA_BIDIRECTIONAL,
};
+ /* handle vmalloc and linear addresses */
+ if (!is_vmalloc_addr(virt) && !virt_addr_valid(virt))
+ return;
+
+ if (is_vmalloc_addr(virt))
+ ref.pfn = vmalloc_to_pfn(virt);
+ else
+ ref.pfn = page_to_pfn(virt_to_page(virt));
+
if (unlikely(dma_debug_disabled()))
return;