summaryrefslogtreecommitdiff
path: root/mlir/lib/Dialect/Linalg/Transforms/Promotion.cpp
diff options
context:
space:
mode:
authorNicolas Vasilache <nicolas.vasilache@gmail.com>2022-07-13 08:09:38 -0700
committerNicolas Vasilache <nicolas.vasilache@gmail.com>2022-07-14 05:29:27 -0700
commit5a0011360c9cea0fdc182c8c66ed3bc774a50835 (patch)
treef97fcafc88626be7bbf827b981920dbc010186b2 /mlir/lib/Dialect/Linalg/Transforms/Promotion.cpp
parentb7e77ff25fb2412f6ab6d6cc756666b0e2f97bd3 (diff)
downloadllvm-5a0011360c9cea0fdc182c8c66ed3bc774a50835.tar.gz
[mlir][Linalg] Retire LinalgPromotion pattern
This revision removes the LinalgPromotion pattern and adds a `transform.structured.promotion` op. Since the LinalgPromotion transform allows the injection of arbitrary C++ via lambdas, the current transform op does not handle it. It is left for future work to decide what the right transform op control is for those cases. Note the underlying implementation remains unchanged and the mechanism is still controllable by lambdas from the API. During this refactoring it was also determined that the `dynamicBuffers` option does not actually connect to a change of behavior in the algorithm. This also exhibits that the related test is wrong (and dangerous). Both the option and the test are therefore removed. Lastly, a test that connects patterns using the filter-based mechanism is removed: all the independent pieces are already tested separately. Context: https://discourse.llvm.org/t/psa-retire-linalg-filter-based-patterns/63785 Differential Revision: https://reviews.llvm.org/D129649
Diffstat (limited to 'mlir/lib/Dialect/Linalg/Transforms/Promotion.cpp')
-rw-r--r--mlir/lib/Dialect/Linalg/Transforms/Promotion.cpp39
1 files changed, 1 insertions, 38 deletions
diff --git a/mlir/lib/Dialect/Linalg/Transforms/Promotion.cpp b/mlir/lib/Dialect/Linalg/Transforms/Promotion.cpp
index 1ae858d470fe..8ce2a348c951 100644
--- a/mlir/lib/Dialect/Linalg/Transforms/Promotion.cpp
+++ b/mlir/lib/Dialect/Linalg/Transforms/Promotion.cpp
@@ -135,9 +135,6 @@ struct LinalgOpInstancePromotionOptions {
CopyCallbackFn copyInFn;
CopyCallbackFn copyOutFn;
- /// Allow the use of dynamically-sized buffers.
- bool dynamicBuffers;
-
/// Alignment of promoted buffer.
Optional<unsigned> alignment;
};
@@ -145,8 +142,7 @@ struct LinalgOpInstancePromotionOptions {
LinalgOpInstancePromotionOptions::LinalgOpInstancePromotionOptions(
LinalgOp linalgOp, const LinalgPromotionOptions &options)
- : subViews(), dynamicBuffers(options.dynamicBuffers),
- alignment(options.alignment) {
+ : subViews(), alignment(options.alignment) {
assert(linalgOp.hasBufferSemantics() && "revisit usage of shaped operand");
auto vUseFullTileBuffers =
options.useFullTileBuffers.value_or(llvm::SmallBitVector());
@@ -394,36 +390,3 @@ mlir::linalg::promoteSubViews(OpBuilder &builder, LinalgOp linalgOp,
return failure();
return res;
}
-
-namespace {
-struct LinalgPromotionPass : public LinalgPromotionBase<LinalgPromotionPass> {
- LinalgPromotionPass() = default;
- LinalgPromotionPass(bool dynamicBuffers, bool useAlloca) {
- this->dynamicBuffers = dynamicBuffers;
- this->useAlloca = useAlloca;
- }
-
- void runOnOperation() override {
- getOperation().walk([&](LinalgOp op) {
- auto options = LinalgPromotionOptions()
- .setDynamicBuffers(dynamicBuffers)
- .setUseAlloca(useAlloca);
- if (failed(promoteSubviewsPrecondition(op, options)))
- return;
- LLVM_DEBUG(llvm::dbgs() << "Promote: " << *(op.getOperation()) << "\n");
- ImplicitLocOpBuilder b(op.getLoc(), op);
- // TODO: signalPassFailure() ?
- (void)promoteSubViews(b, op, options);
- });
- }
-};
-} // namespace
-
-// TODO: support more transformation options in the pass.
-std::unique_ptr<OperationPass<func::FuncOp>>
-mlir::createLinalgPromotionPass(bool dynamicBuffers, bool useAlloca) {
- return std::make_unique<LinalgPromotionPass>(dynamicBuffers, useAlloca);
-}
-std::unique_ptr<OperationPass<func::FuncOp>> mlir::createLinalgPromotionPass() {
- return std::make_unique<LinalgPromotionPass>();
-}