buffer->access->set_bytes_per_datum(buffer, bytes);
}
+static void iio_free_scan_mask(struct iio_dev *indio_dev,
+ const unsigned long *mask)
+{
+ /* If the mask is dynamically allocated free it, otherwise do nothing */
+ if (!indio_dev->available_scan_masks)
+ kfree(mask);
+}
+
static int __iio_update_buffers(struct iio_dev *indio_dev,
struct iio_buffer *insert_buffer,
struct iio_buffer *remove_buffer)
/* If no buffers in list, we are done */
if (list_empty(&indio_dev->buffer_list)) {
indio_dev->currentmode = INDIO_DIRECT_MODE;
- if (indio_dev->available_scan_masks == NULL)
- kfree(old_mask);
+ iio_free_scan_mask(indio_dev, old_mask);
return 0;
}
compound_mask = kcalloc(BITS_TO_LONGS(indio_dev->masklength),
sizeof(long), GFP_KERNEL);
if (compound_mask == NULL) {
- if (indio_dev->available_scan_masks == NULL)
- kfree(old_mask);
+ iio_free_scan_mask(indio_dev, old_mask);
return -ENOMEM;
}
indio_dev->scan_timestamp = 0;
iio_scan_mask_match(indio_dev->available_scan_masks,
indio_dev->masklength,
compound_mask);
+ kfree(compound_mask);
if (indio_dev->active_scan_mask == NULL) {
/*
* Roll back.
success = -EINVAL;
}
else {
- kfree(compound_mask);
ret = -EINVAL;
return ret;
}
}
}
- if (indio_dev->available_scan_masks)
- kfree(compound_mask);
- else
- kfree(old_mask);
+ iio_free_scan_mask(indio_dev, old_mask);
return success;
error_remove_inserted:
if (insert_buffer)
iio_buffer_deactivate(insert_buffer);
+ iio_free_scan_mask(indio_dev, indio_dev->active_scan_mask);
indio_dev->active_scan_mask = old_mask;
- kfree(compound_mask);
return ret;
}