As a convenience to the user, top-level sequence ops can optionally be used as matchers: the op type is specified by the type of the block argument.
This is similar to how pass pipeline targets can be specified on the command line (`-pass-pipeline='builtin.module(func.func(...))`).
Differential Revision: https://reviews.llvm.org/D153121
dialect. Operand omission is only allowed for sequences not contained in
another sequence.
+ The type of the block argument must match the type of the operand. If the
+ sequence is a top-level transform (without an operand), it can be used for
+ matching operations if the specified type within the top-level container
+ payload IR (including the container op itself). E.g.:
+
+ ```mlir
+ transform.sequence failures(propagate) {
+ ^bb1(%arg1: !transform.any_op):
+ // %arg1 is mapped to the top-level container of the payload IR, which is
+ // typically a module
+ }
+
+ transform.sequence failures(propagate) {
+ ^bb1(%arg1: !transform.op<"func.func>"):
+ // %arg1 is mapped to all "func.func" ops within and including the
+ // top-level container of the payload IR. Nested operations that have the
+ // specified op type are not included.
+ }
+ ```
+
The body of the sequence terminates with an implicit or explicit
`transform.yield` op. The operands of the terminator are returned as the
results of the sequence op.
<< " were provided to the interpreter";
}
- targets.push_back(state.getTopLevel());
+ // Top-level transforms can be used for matching. If no concrete operation
+ // type is specified, the block argument is mapped to the top-level op.
+ // Otherwise, it is mapped to all ops of the specified type within the
+ // top-level op (including the top-level op itself). Once an op is added as
+ // a target, its descendants are not explored any further.
+ BlockArgument bbArg = region.front().getArgument(0);
+ if (auto bbArgType = dyn_cast<transform::OperationType>(bbArg.getType())) {
+ state.getTopLevel()->walk<WalkOrder::PreOrder>([&](Operation *op) {
+ if (op->getName().getStringRef() == bbArgType.getOperationName()) {
+ targets.push_back(op);
+ return WalkResult::skip();
+ }
+ return WalkResult::advance();
+ });
+ } else {
+ targets.push_back(state.getTopLevel());
+ }
+
for (unsigned i = 0, e = state.getNumTopLevelMappings(); i < e; ++i)
extraMappings.push_back(llvm::to_vector(state.getTopLevelMapping(i)));
}
// RUN: mlir-opt -split-input-file -test-transform-dialect-interpreter %s | FileCheck %s
transform.sequence failures(propagate) {
-^bb1(%module_op: !transform.any_op):
- %func_op = transform.structured.match ops{["func.func"]} in %module_op
- : (!transform.any_op) -> !transform.any_op
+^bb1(%func_op: !transform.op<"func.func">):
transform.apply_patterns to %func_op {
transform.apply_patterns.tensor.fold_tensor_empty
- } : !transform.any_op
+ } : !transform.op<"func.func">
}
// CHECK: #[[$MAP:.+]] = affine_map<()[s0] -> (s0 floordiv 28)>
// -----
transform.sequence failures(propagate) {
-^bb1(%module_op: !transform.any_op):
- %func_op = transform.structured.match ops{["func.func"]} in %module_op
- : (!transform.any_op) -> !transform.any_op
+^bb1(%func_op: !transform.op<"func.func">):
transform.apply_patterns to %func_op {
transform.apply_patterns.tensor.fold_tensor_empty
{fold_single_use_only = true}
- } : !transform.any_op
+ } : !transform.op<"func.func">
}
func.func @double_use_of_tensor_empty(%arg0: index, %arg1: index)
// RUN: mlir-opt -split-input-file -test-transform-dialect-interpreter %s | FileCheck %s
transform.sequence failures(propagate) {
-^bb1(%module_op: !transform.any_op):
- %func_op = transform.structured.match ops{["func.func"]} in %module_op
- : (!transform.any_op) -> !transform.any_op
+^bb1(%func_op: !transform.op<"func.func">):
transform.apply_patterns to %func_op {
transform.apply_patterns.tensor.rewrite_as_constant
- } : !transform.any_op
+ } : !transform.op<"func.func">
}
// CHECK-LABEL: func @tensor_generate_constant(
}
transform.sequence failures(propagate) {
-^bb1(%module_op: !transform.any_op):
- %func_op = transform.structured.match ops{["func.func"]} in %module_op
- : (!transform.any_op) -> !transform.any_op
+^bb1(%func_op: !transform.op<"func.func">):
transform.apply_patterns to %func_op {
transform.apply_patterns.vector.lower_contraction lowering_strategy = "outerproduct"
- } : !transform.any_op
+ } : !transform.op<"func.func">
}
// CHECK: vector.transpose %[[INPUT]], [1, 0, 2] : vector<3x4x5xf32> to vector<4x3x5xf32>
transform.sequence failures(propagate) {
-^bb1(%module_op: !transform.any_op):
- %func_op = transform.structured.match ops{["func.func"]} in %module_op
- : (!transform.any_op) -> !transform.any_op
+^bb1(%func_op: !transform.op<"func.func">):
transform.apply_patterns to %func_op {
transform.apply_patterns.vector.lower_multi_reduction lowering_strategy = "innerreduction"
- } : !transform.any_op
+ } : !transform.op<"func.func">
}
// CHECK: return %{{.+}}
transform.sequence failures(propagate) {
-^bb1(%module_op: !transform.any_op):
- %func_op = transform.structured.match ops{["func.func"]} in %module_op
- : (!transform.any_op) -> !transform.any_op
+^bb1(%func_op: !transform.op<"func.func">):
transform.apply_patterns to %func_op {
transform.apply_patterns.vector.lower_multi_reduction lowering_strategy = "innerparallel"
- } : !transform.any_op
+ } : !transform.op<"func.func">
}
// CHECK: vector.transfer_write %[[SHCAST]], %[[SUBVIEW]]{{.*}} : vector<f32>, memref<f32>
transform.sequence failures(propagate) {
-^bb1(%module_op: !transform.any_op):
- %func_op = transform.structured.match ops{["func.func"]} in %module_op
- : (!transform.any_op) -> !transform.any_op
+^bb1(%func_op: !transform.op<"func.func">):
transform.apply_patterns to %func_op {
transform.apply_patterns.vector.rank_reducing_subview_patterns
- } : !transform.any_op
+ } : !transform.op<"func.func">
}
}
transform.sequence failures(propagate) {
-^bb1(%module_op: !transform.any_op):
- %func_op = transform.structured.match ops{["func.func"]} in %module_op
- : (!transform.any_op) -> !transform.any_op
+^bb1(%func_op: !transform.op<"func.func">):
transform.apply_patterns to %func_op {
transform.apply_patterns.vector.split_transfer_full_partial split_transfer_strategy = "linalg-copy"
- } : !transform.any_op
+ } : !transform.op<"func.func">
}
// -----
// CHECK: }
transform.sequence failures(propagate) {
-^bb1(%module_op: !transform.any_op):
- %func_op = transform.structured.match ops{["func.func"]} in %module_op
- : (!transform.any_op) -> !transform.any_op
+^bb1(%func_op: !transform.op<"func.func">):
transform.apply_patterns to %func_op {
transform.apply_patterns.vector.split_transfer_full_partial split_transfer_strategy = "linalg-copy"
- } : !transform.any_op
+ } : !transform.op<"func.func">
}
// -----
// CHECK: }
transform.sequence failures(propagate) {
-^bb1(%module_op: !transform.any_op):
- %func_op = transform.structured.match ops{["func.func"]} in %module_op
- : (!transform.any_op) -> !transform.any_op
+^bb1(%func_op: !transform.op<"func.func">):
transform.apply_patterns to %func_op {
transform.apply_patterns.vector.split_transfer_full_partial split_transfer_strategy = "linalg-copy"
- } : !transform.any_op
+ } : !transform.op<"func.func">
}
}
transform.sequence failures(propagate) {
-^bb1(%module_op: !transform.any_op):
- %func_op = transform.structured.match ops{["func.func"]} in %module_op
- : (!transform.any_op) -> !transform.any_op
+^bb1(%func_op: !transform.op<"func.func">):
transform.apply_patterns to %func_op {
transform.apply_patterns.vector.split_transfer_full_partial split_transfer_strategy = "vector-transfer"
- } : !transform.any_op
+ } : !transform.op<"func.func">
}
// -----
transform.sequence failures(propagate) {
-^bb1(%module_op: !transform.any_op):
- %func_op = transform.structured.match ops{["func.func"]} in %module_op
- : (!transform.any_op) -> !transform.any_op
+^bb1(%func_op: !transform.op<"func.func">):
transform.apply_patterns to %func_op {
transform.apply_patterns.vector.split_transfer_full_partial split_transfer_strategy = "vector-transfer"
- } : !transform.any_op
+ } : !transform.op<"func.func">
}
// -----
// CHECK: }
transform.sequence failures(propagate) {
-^bb1(%module_op: !transform.any_op):
- %func_op = transform.structured.match ops{["func.func"]} in %module_op
- : (!transform.any_op) -> !transform.any_op
+^bb1(%func_op: !transform.op<"func.func">):
transform.apply_patterns to %func_op {
transform.apply_patterns.vector.split_transfer_full_partial split_transfer_strategy = "vector-transfer"
- } : !transform.any_op
+ } : !transform.op<"func.func">
}
// -----
}
transform.sequence failures(propagate) {
-^bb1(%module_op: !transform.any_op):
- %func_op = transform.structured.match ops{["func.func"]} in %module_op
- : (!transform.any_op) -> !transform.any_op
+^bb1(%func_op: !transform.op<"func.func">):
transform.apply_patterns to %func_op {
transform.apply_patterns.vector.split_transfer_full_partial split_transfer_strategy = "vector-transfer"
- } : !transform.any_op
+ } : !transform.op<"func.func">
}
// RUN: mlir-opt -split-input-file -test-transform-dialect-interpreter %s | FileCheck %s
transform.sequence failures(propagate) {
-^bb1(%module_op: !transform.any_op):
- %func_op = transform.structured.match ops{["func.func"]} in %module_op
- : (!transform.any_op) -> !transform.any_op
+^bb1(%func_op: !transform.op<"func.func">):
transform.apply_patterns to %func_op {
transform.apply_patterns.vector.fold_tensor_slice_into_transfer
- } : !transform.any_op
+ } : !transform.op<"func.func">
}
// CHECK-LABEL: func @transfer_read_of_extract_slice(
transform.sequence failures(propagate) {
-^bb1(%module_op: !transform.any_op):
- %func_op = transform.structured.match ops{["func.func"]} in %module_op
- : (!transform.any_op) -> !transform.any_op
+^bb1(%func_op: !transform.op<"func.func">):
transform.apply_patterns to %func_op {
transform.apply_patterns.vector.lower_transfer max_transfer_rank = 99
transform.apply_patterns.vector.transfer_permutation_patterns
- } : !transform.any_op
+ } : !transform.op<"func.func">
}
// -----
}
transform.sequence failures(propagate) {
-^bb1(%module_op: !transform.any_op):
- %func_op = transform.structured.match ops{["func.func"]} in %module_op
- : (!transform.any_op) -> !transform.any_op
+^bb1(%func_op: !transform.op<"func.func">):
transform.apply_patterns to %func_op {
transform.apply_patterns.vector.lower_transfer max_transfer_rank = 99
transform.apply_patterns.vector.transfer_permutation_patterns
- } : !transform.any_op
+ } : !transform.op<"func.func">
}
}
transform.sequence failures(propagate) {
-^bb1(%module_op: !transform.any_op):
- %func_op = transform.structured.match ops{["func.func"]} in %module_op
- : (!transform.any_op) -> !transform.any_op
+^bb1(%func_op: !transform.op<"func.func">):
transform.apply_patterns to %func_op {
transform.apply_patterns.vector.lower_transpose lowering_strategy = "eltwise"
- } : !transform.any_op
+ } : !transform.op<"func.func">
}
// -----
transform.sequence failures(propagate) {
-^bb1(%module_op: !transform.any_op):
- %func_op = transform.structured.match ops{["func.func"]} in %module_op
- : (!transform.any_op) -> !transform.any_op
+^bb1(%func_op: !transform.op<"func.func">):
transform.apply_patterns to %func_op {
transform.apply_patterns.vector.lower_transpose lowering_strategy = "shuffle_1d"
- } : !transform.any_op
+ } : !transform.op<"func.func">
}
// -----
transform.sequence failures(propagate) {
-^bb1(%module_op: !transform.any_op):
- %func_op = transform.structured.match ops{["func.func"]} in %module_op
- : (!transform.any_op) -> !transform.any_op
+^bb1(%func_op: !transform.op<"func.func">):
transform.apply_patterns to %func_op {
transform.apply_patterns.vector.lower_transpose lowering_strategy = "flat_transpose"
- } : !transform.any_op
+ } : !transform.op<"func.func">
}
// -----
}
transform.sequence failures(propagate) {
-^bb1(%module_op: !transform.any_op):
- %func_op = transform.structured.match ops{["func.func"]} in %module_op
- : (!transform.any_op) -> !transform.any_op
+^bb1(%func_op: !transform.op<"func.func">):
transform.apply_patterns to %func_op {
transform.apply_patterns.vector.lower_transpose avx2_lowering_strategy = true
- } : !transform.any_op
+ } : !transform.op<"func.func">
}
// -----
}
transform.sequence failures(propagate) {
-^bb1(%module_op: !transform.any_op):
- %func_op = transform.structured.match ops{["func.func"]} in %module_op
- : (!transform.any_op) -> !transform.any_op
+^bb1(%func_op: !transform.op<"func.func">):
transform.apply_patterns to %func_op {
transform.apply_patterns.vector.lower_transpose lowering_strategy = "shuffle_16x16"
- } : !transform.any_op
+ } : !transform.op<"func.func">
}
// -----
}
transform.sequence failures(propagate) {
-^bb1(%module_op: !transform.any_op):
- %func_op = transform.structured.match ops{["func.func"]} in %module_op
- : (!transform.any_op) -> !transform.any_op
+^bb1(%func_op: !transform.op<"func.func">):
transform.apply_patterns to %func_op {
transform.apply_patterns.vector.lower_transpose lowering_strategy = "shuffle_16x16"
- } : !transform.any_op
+ } : !transform.op<"func.func">
}
}
transform.sequence failures(propagate) {
-^bb1(%module_op: !transform.any_op):
- %func_op = transform.structured.match ops{["func.func"]} in %module_op
- : (!transform.any_op) -> !transform.any_op
+^bb1(%func_op: !transform.op<"func.func">):
transform.apply_patterns to %func_op {
transform.apply_patterns.vector.lower_transpose lowering_strategy = "shuffle_16x16"
- } : !transform.any_op
+ } : !transform.op<"func.func">
}