* @pwr_down: bitmask which contains hether a channel is powered down or not
* @ctrl: software shadow of the channel ctrl registers
* @channels: iio channel spec for the device
+ * @lock lock to protect the data buffer during SPI ops
* @data: spi transfer buffers
*/
struct ad5755_state {
unsigned int pwr_down;
unsigned int ctrl[AD5755_NUM_CHANNELS];
struct iio_chan_spec channels[AD5755_NUM_CHANNELS];
+ struct mutex lock;
/*
* DMA (thus cache coherency maintenance) requires the
static int ad5755_write(struct iio_dev *indio_dev, unsigned int reg,
unsigned int val)
{
+ struct ad5755_state *st = iio_priv(indio_dev);
int ret;
- mutex_lock(&indio_dev->mlock);
+ mutex_lock(&st->lock);
ret = ad5755_write_unlocked(indio_dev, reg, val);
- mutex_unlock(&indio_dev->mlock);
+ mutex_unlock(&st->lock);
return ret;
}
static int ad5755_write_ctrl(struct iio_dev *indio_dev, unsigned int channel,
unsigned int reg, unsigned int val)
{
+ struct ad5755_state *st = iio_priv(indio_dev);
int ret;
- mutex_lock(&indio_dev->mlock);
+ mutex_lock(&st->lock);
ret = ad5755_write_ctrl_unlocked(indio_dev, channel, reg, val);
- mutex_unlock(&indio_dev->mlock);
+ mutex_unlock(&st->lock);
return ret;
}
},
};
- mutex_lock(&indio_dev->mlock);
+ mutex_lock(&st->lock);
st->data[0].d32 = cpu_to_be32(AD5755_READ_FLAG | (addr << 16));
st->data[1].d32 = cpu_to_be32(AD5755_NOOP);
if (ret >= 0)
ret = be32_to_cpu(st->data[1].d32) & 0xffff;
- mutex_unlock(&indio_dev->mlock);
+ mutex_unlock(&st->lock);
return ret;
}
struct ad5755_state *st = iio_priv(indio_dev);
unsigned int mask = BIT(channel);
- mutex_lock(&indio_dev->mlock);
+ mutex_lock(&st->lock);
if ((bool)(st->pwr_down & mask) == pwr_down)
goto out_unlock;
}
out_unlock:
- mutex_unlock(&indio_dev->mlock);
+ mutex_unlock(&st->lock);
return 0;
}
indio_dev->modes = INDIO_DIRECT_MODE;
indio_dev->num_channels = AD5755_NUM_CHANNELS;
+ mutex_init(&st->lock);
+
if (spi->dev.of_node)
pdata = ad5755_parse_dt(&spi->dev);
else