Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion src/relax/analysis/graph_partitioner.cc
Original file line number Diff line number Diff line change
Expand Up @@ -286,7 +286,7 @@ size_t GraphPartitioner::CountFusedArgs(const IndexedForwardGraph& graph,
}

void GraphPartitioner::InitGroups(const IndexedForwardGraph& graph) {
auto args_counter = [this](const tvm::Object* obj) {
auto args_counter = [](const tvm::Object* obj) {
size_t args_num = 0;
if (auto call_node = GetRef<ObjectRef>(obj).as<CallNode>()) {
for (auto& it : call_node->args) {
Expand Down
2 changes: 1 addition & 1 deletion src/relax/ir/expr.cc
Original file line number Diff line number Diff line change
Expand Up @@ -621,7 +621,7 @@ Function::Function(Array<Var> params, Expr body, Optional<StructInfo> ret_struct
ObjectPtr<FunctionNode> n = make_object<FunctionNode>();
n->params = std::move(params);
n->body = std::move(body);
n->ret_struct_info = std::move(ret_struct_info.value());
n->ret_struct_info = ret_struct_info.value();
n->is_pure = is_pure;
n->struct_info_ = std::move(func_sinfo);
n->attrs = std::move(attrs);
Expand Down
5 changes: 2 additions & 3 deletions src/relax/op/nn/attention.cc
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,6 @@
#include "attention.h"

#include <utility>
#include <vector>

namespace tvm {
namespace relax {
Expand All @@ -37,8 +36,8 @@ Expr attention(Expr query, Expr key, Expr value, Optional<Expr> bias, Optional<F

if (bias) {
return Call(Op::Get("relax.nn.attention_bias"),
{std::move(query), std::move(key), std::move(value), std::move(bias.value())},
Attrs(attrs), {});
{std::move(query), std::move(key), std::move(value), bias.value()}, Attrs(attrs),
{});
}
return Call(Op::Get("relax.nn.attention"), {std::move(query), std::move(key), std::move(value)},
Attrs(attrs), {});
Expand Down
4 changes: 2 additions & 2 deletions src/relax/op/nn/convolution.cc
Original file line number Diff line number Diff line change
Expand Up @@ -595,7 +595,7 @@ Expr conv1d_transpose(Expr data, Expr weight, Array<IntImm> strides, Array<IntIm
attrs->groups = groups;
attrs->data_layout = data_layout;
attrs->kernel_layout = std::move(kernel_layout);
attrs->out_layout = std::move(out_layout.value_or(data_layout));
attrs->out_layout = out_layout.value_or(data_layout);
attrs->out_dtype = std::move(out_dtype.value_or(DataType::Void()));
const Op& op = Op::Get("relax.nn.conv1d_transpose");
return Call(op, {data, weight}, Attrs(attrs), {});
Expand Down Expand Up @@ -732,7 +732,7 @@ Expr conv2d_transpose(Expr data, Expr weight, Array<IntImm> strides, Array<IntIm
attrs->groups = groups;
attrs->data_layout = data_layout;
attrs->kernel_layout = std::move(kernel_layout);
attrs->out_layout = std::move(out_layout.value_or(data_layout));
attrs->out_layout = out_layout.value_or(data_layout);
attrs->out_dtype = std::move(out_dtype.value_or(DataType::Void()));
const Op& op = Op::Get("relax.nn.conv2d_transpose");
return Call(op, {data, weight}, Attrs(attrs), {});
Expand Down
4 changes: 2 additions & 2 deletions src/relax/op/nn/nn.cc
Original file line number Diff line number Diff line change
Expand Up @@ -905,8 +905,8 @@ Expr nll_loss(Expr predictions, Expr targets, Optional<Expr> weights, String red

static const Op& op = Op::Get("relax.nn.nll_loss");
if (weights.defined()) {
return Call(op, {std::move(predictions), std::move(targets), std::move(weights.value())},
Attrs{attrs}, {});
return Call(op, {std::move(predictions), std::move(targets), weights.value()}, Attrs{attrs},
{});
} else {
return Call(op, {std::move(predictions), std::move(targets)}, Attrs{attrs}, {});
}
Expand Down
7 changes: 3 additions & 4 deletions src/relax/op/tensor/grad.cc
Original file line number Diff line number Diff line change
Expand Up @@ -101,10 +101,9 @@ Expr nll_loss_backward(Expr output_grad, Expr predictions, Expr targets, Optiona

static const Op& op = Op::Get("relax.grad.nll_loss_backward");
if (weights.defined()) {
return Call(op,
{std::move(output_grad), std::move(predictions), std::move(targets),
std::move(weights.value())},
Attrs{attrs}, {});
return Call(
op, {std::move(output_grad), std::move(predictions), std::move(targets), weights.value()},
Attrs{attrs}, {});
} else {
return Call(op, {std::move(output_grad), std::move(predictions), std::move(targets)},
Attrs{attrs}, {});
Expand Down
2 changes: 1 addition & 1 deletion src/relax/transform/convert_layout.cc
Original file line number Diff line number Diff line change
Expand Up @@ -127,7 +127,7 @@ class LayoutConvertMutator : public ExprMutator {
ObjectPtr<LayoutTransformAttrs> attrs = make_object<LayoutTransformAttrs>();
Array<IntImm> axis_separator;
Array<IntImm> input_axis_separator;
attrs->index_map = std::move(Downcast<IndexMap>(LoadJSON(SaveJSON(index_map))));
attrs->index_map = Downcast<IndexMap>(LoadJSON(SaveJSON(index_map)));
attrs->axis_separators = std::move(axis_separator);
attrs->input_axis_separators = std::move(input_axis_separator);
const Op& layout_transform_op_ = Op::Get("relax.layout_transform");
Expand Down
2 changes: 1 addition & 1 deletion src/relax/transform/fuse_tir.cc
Original file line number Diff line number Diff line change
Expand Up @@ -846,7 +846,7 @@ class FusedTIRConstructor : public ExprVisitor {
if (is_inplace) {
const auto* attrs = call->attrs.as<CallTIRInplaceAttrs>();
CHECK(attrs) << "Must have CallTIRInplaceAttrs for an in-place call";
output_idxs = std::move(GetInplaceOutputIndices(attrs->inplace_indices, num_inputs));
output_idxs = GetInplaceOutputIndices(attrs->inplace_indices, num_inputs);
} else {
for (size_t i = 0; i < output_size; i++) {
output_idxs.push_back(num_inputs + i);
Expand Down
2 changes: 1 addition & 1 deletion src/relax/transform/legalize_ops.cc
Original file line number Diff line number Diff line change
Expand Up @@ -63,7 +63,7 @@ class LegalizeMutator : public ExprMutator {
bool enable_warning)
: ExprMutator(mod), mod_(std::move(mod)), enable_warning_(enable_warning) {
if (cmap) {
cmap_ = std::move(cmap.value());
cmap_ = cmap.value();
}
}

Expand Down
2 changes: 1 addition & 1 deletion src/relax/transform/remove_purity_checking.cc
Original file line number Diff line number Diff line change
Expand Up @@ -36,7 +36,7 @@ class PurityRemover : public ExprMutator {
bool purity = func->is_pure;
auto ret = func;
if (purity) {
ret = std::move(WithAttr<Function>(func, relax::attr::kForcePure, true));
ret = WithAttr<Function>(func, relax::attr::kForcePure, true);
}
auto new_body = VisitExpr(ret->body);
if (!new_body.same_as(ret->body)) {
Expand Down
4 changes: 2 additions & 2 deletions src/relax/transform/to_mixed_precision.cc
Original file line number Diff line number Diff line change
Expand Up @@ -532,7 +532,7 @@ class ToMixedPrecisionRewriter : public ExprMutator {
return;
}
ObjectPtr<TupleNode> new_tuple = make_object<TupleNode>(*tuple_node);
new_tuple->fields = std::move(RemapArgs(tuple_node->fields));
new_tuple->fields = RemapArgs(tuple_node->fields);
new_tuple->struct_info_ = std::nullopt;
Expr new_value = builder_->Normalize(Tuple(new_tuple));
if (!binding->var->IsInstance<DataflowVarNode>()) {
Expand Down Expand Up @@ -600,7 +600,7 @@ class ToMixedPrecisionRewriter : public ExprMutator {

Expr ToMixedPrecision(const Function& f, const DataType& out_dtype,
Optional<Array<String>> fp16_input_names) {
VarDTypeMap only_fp16_map = std::move(DTypeDecisionCollector::Collect(f, out_dtype));
VarDTypeMap only_fp16_map = DTypeDecisionCollector::Collect(f, out_dtype);
std::unordered_set<std::string> fp16_input_names_set;
if (fp16_input_names) {
fp16_input_names_set.insert(fp16_input_names.value().begin(), fp16_input_names.value().end());
Expand Down
6 changes: 3 additions & 3 deletions src/script/printer/relax/function.cc
Original file line number Diff line number Diff line change
Expand Up @@ -50,9 +50,9 @@ TVM_STATIC_IR_FUNCTOR(IRDocsifier, vtable)
// if we are binding a local definition, then calling d->Define
// will result in a repeated definition and an incorrect displayed name
if (Optional<String> name = GetBindingName(d)) {
func_name = std::move(IdDoc(name.value()));
func_name = IdDoc(name.value());
} else {
func_name = std::move(IdDoc(FindFunctionName(d, n).value_or("main")));
func_name = IdDoc(FindFunctionName(d, n).value_or("main"));
}
(*f)->AddDispatchToken(d, "relax");
(*f)->is_func = true;
Expand Down Expand Up @@ -118,7 +118,7 @@ TVM_STATIC_IR_FUNCTOR(IRDocsifier, vtable)
dec_values.push_back(LiteralDoc::Boolean(true, Optional<ObjectPath>()));
}
if (dec_keys.size()) {
decorator = std::move(decorator->Call(pos_args, dec_keys, dec_values));
decorator = decorator->Call(pos_args, dec_keys, dec_values);
}

// Step 6. Print body
Expand Down
4 changes: 2 additions & 2 deletions src/script/printer/tir/function.cc
Original file line number Diff line number Diff line change
Expand Up @@ -190,8 +190,8 @@ TVM_STATIC_IR_FUNCTOR(IRDocsifier, vtable)
// mark private if there is no global symbol
if (!func->attrs.defined() || !func->attrs->dict.count(tvm::attr::kGlobalSymbol)) {
Array<ExprDoc> pos_args;
decorator = std::move(decorator->Call(pos_args, {"private"},
{LiteralDoc::Boolean(true, Optional<ObjectPath>())}));
decorator = decorator->Call(pos_args, {"private"},
{LiteralDoc::Boolean(true, Optional<ObjectPath>())});
}

return HeaderWrapper(d, FunctionDoc(
Expand Down
2 changes: 1 addition & 1 deletion src/tir/ir/data_type_rewriter.cc
Original file line number Diff line number Diff line change
Expand Up @@ -557,7 +557,7 @@ Stmt IndexDataTypeRewriter::VisitStmt_(const ForNode* op) {
auto old_thread_binding = op->thread_binding.value();
auto* ptr = old_thread_binding.CopyOnWrite();
ptr->var = old_thread_binding->var.copy_with_dtype(new_loop_var.dtype());
n->thread_binding = std::move(Optional<IterVar>(std::move(old_thread_binding)));
n->thread_binding = Optional<IterVar>(std::move(old_thread_binding));
}
n->body = new_body;
return new_for;
Expand Down
6 changes: 3 additions & 3 deletions src/tir/schedule/primitive/cache_read_write.cc
Original file line number Diff line number Diff line change
Expand Up @@ -986,7 +986,7 @@ class CacheReadRewriter : public StmtExprMutator {
ObjectPtr<BufferLoadNode> n = make_object<BufferLoadNode>(*load);
n->buffer = info_->write_buffer;
if (!cache_full_region_) {
n->indices = std::move(RewriteIndices(load->indices));
n->indices = RewriteIndices(load->indices);
}
return PrimExpr(n);
}
Expand Down Expand Up @@ -1257,7 +1257,7 @@ class CacheWriteRewriter : public StmtExprMutator {
auto n = CopyOnWrite(stmt.get());
n->buffer = info_->read_buffer;
if (!cache_full_region_) {
n->indices = std::move(RewriteIndices(n->indices));
n->indices = RewriteIndices(n->indices);
}
return Stmt(n);
} else {
Expand All @@ -1270,7 +1270,7 @@ class CacheWriteRewriter : public StmtExprMutator {
ObjectPtr<BufferLoadNode> n = make_object<BufferLoadNode>(*load);
n->buffer = info_->read_buffer;
if (!cache_full_region_) {
n->indices = std::move(RewriteIndices(n->indices));
n->indices = RewriteIndices(n->indices);
}
return PrimExpr(n);
}
Expand Down
4 changes: 2 additions & 2 deletions src/tir/schedule/primitive/compute_inline.cc
Original file line number Diff line number Diff line change
Expand Up @@ -378,8 +378,8 @@ class BaseInliner : public StmtExprMutator {
if (!is_scope_root && (std::any_of(reads.begin(), reads.end(), f_access_inline_buffer) ||
std::any_of(writes.begin(), writes.end(), f_access_inline_buffer))) {
Array<Array<BufferRegion>> inspected = GetBlockReadWriteRegion(block, buffer_var_map_);
reads = std::move(inspected[0]);
writes = std::move(inspected[1]);
reads = inspected[0];
writes = inspected[1];
}
// Step 3. Assemble the result
BlockNode* n = block.CopyOnWrite();
Expand Down
2 changes: 1 addition & 1 deletion src/tir/schedule/primitive/loop_transformation.cc
Original file line number Diff line number Diff line change
Expand Up @@ -616,7 +616,7 @@ class BlockMutator : public StmtExprMutator {

if (!op->loop_var.same_as(new_var)) {
// If the partioned loop contains nested for loop, then create new iteration variable instance
res.CopyOnWrite()->body = std::move(tir::Substitute(res->body, {{op->loop_var, new_var}}));
res.CopyOnWrite()->body = tir::Substitute(res->body, {{op->loop_var, new_var}});
res.CopyOnWrite()->loop_var = new_var;
}
return res;
Expand Down
2 changes: 1 addition & 1 deletion src/tir/transforms/profile_instrumentation.cc
Original file line number Diff line number Diff line change
Expand Up @@ -160,7 +160,7 @@ class InstrumentIntrin : public StmtMutator {

void GetLoopInfo(PrimFuncNode* op) {
LoopAnalyzer analzer;
loops_ = std::move(analzer.Analyze(op->body));
loops_ = analzer.Analyze(op->body);
}

Stmt VisitStmt_(const SeqStmtNode* op) final {
Expand Down
Loading