mlir::shape::ToExtentTensorOp::areCastCompatible didn't allow the input
to have a static dimension, but that is allowed.
return false;
if (auto inputTensor = inputs[0].dyn_cast<RankedTensorType>()) {
if (!inputTensor.getElementType().isa<IndexType>() ||
- inputTensor.getRank() != 1 || !inputTensor.isDynamicDim(0))
+ inputTensor.getRank() != 1)
return false;
} else if (!inputs[0].isa<ShapeType>()) {
return false;
return %0 : tensor<3xindex>
}
+func @test_identity_to_extent_tensor(%arg: tensor<3xindex>) -> tensor<3xindex> {
+ %0 = shape.to_extent_tensor %arg : tensor<3xindex> -> tensor<3xindex>
+ return %0 : tensor<3xindex>
+}
+
func @test_from_extent_tensor(%arg: tensor<?xindex>) -> !shape.shape {
%0 = shape.from_extent_tensor %arg : tensor<?xindex>
return %0 : !shape.shape