dma_cap_set(DMA_XOR_VAL, dma->cap_mask);
dma->device_prep_dma_xor_val = ioat3_prep_xor_val;
}
+
if (cap & IOAT_CAP_PQ) {
is_raid_device = true;
dma_set_maxpq(dma, 8, 0);
- dma->pq_align = 6;
+ if (is_xeon_cb32(pdev))
+ dma->pq_align = 6;
+ else
+ dma->pq_align = 0;
dma_cap_set(DMA_PQ, dma->cap_mask);
dma->device_prep_dma_pq = ioat3_prep_pq;
if (!(cap & IOAT_CAP_XOR)) {
dma->max_xor = 8;
- dma->xor_align = 6;
+ if (is_xeon_cb32(pdev))
+ dma->xor_align = 6;
+ else
+ dma->xor_align = 0;
dma_cap_set(DMA_XOR, dma->cap_mask);
dma->device_prep_dma_xor = ioat3_prep_pqxor;
dma->device_prep_dma_xor_val = ioat3_prep_pqxor_val;
}
}
+
if (is_raid_device && (cap & IOAT_CAP_FILL_BLOCK)) {
dma_cap_set(DMA_MEMSET, dma->cap_mask);
dma->device_prep_dma_memset = ioat3_prep_memset_lock;