* lock, let's make sure it's still not referenced before freeing it.
*/
if (p_atomic_read(&bo->refcnt) == 0) {
- assert(!bo->writer_syncobj);
+ assert(!p_atomic_read_relaxed(&bo->writer_syncobj));
if (dev->debug & AGX_DBG_TRACE)
agxdecode_track_free(bo);
int prime_fd;
/* Syncobj handle of the current writer, if any */
- int writer_syncobj;
+ uint32_t writer_syncobj;
/* Globally unique value (system wide) for tracing. Exists for resources,
* command buffers, GPU submissions, segments, segmentent lists, encoders,
/* If there is a pending writer to this BO, import it into the buffer
* for implicit sync.
*/
- if (bo->writer_syncobj) {
+ uint32_t writer_syncobj = p_atomic_read_relaxed(&bo->writer_syncobj);
+ if (writer_syncobj) {
int out_sync_fd = -1;
- int ret = drmSyncobjExportSyncFile(bo->dev->fd, bo->writer_syncobj,
- &out_sync_fd);
+ int ret =
+ drmSyncobjExportSyncFile(bo->dev->fd, writer_syncobj, &out_sync_fd);
assert(ret >= 0);
assert(out_sync_fd >= 0);
/* There is no more writer on this context for anything we wrote */
struct agx_batch *writer = agx_writer_get(ctx, handle);
- if (writer == batch) {
- assert(bo->writer_syncobj == batch->syncobj);
+ if (writer == batch)
agx_writer_remove(ctx, handle);
- }
- if (bo->writer_syncobj == batch->syncobj)
- bo->writer_syncobj = 0;
+ p_atomic_cmpxchg(&bo->writer_syncobj, batch->syncobj, 0);
agx_bo_unreference(agx_lookup_bo(dev, handle));
}
/* But any BOs written by active batches are ours */
assert(writer == batch && "exclusive writer");
- bo->writer_syncobj = batch->syncobj;
+ p_atomic_set(&bo->writer_syncobj, batch->syncobj);
}
free(in_syncs);