Skip to content

Commit 45bff9b

Browse files
authored
Delete unused tracing functions after codegen migration. (#9240)
1 parent b6d8468 commit 45bff9b

File tree

4 files changed

+0
-97
lines changed

4 files changed

+0
-97
lines changed

torch_xla/csrc/ops/ops.cpp

Lines changed: 0 additions & 71 deletions
Original file line numberDiff line numberDiff line change
@@ -116,26 +116,6 @@ torch::lazy::NodePtr Logit(const torch::lazy::Value& input,
116116
torch::lazy::MHash(eps));
117117
}
118118

119-
torch::lazy::NodePtr SgnOp(const torch::lazy::Value& input) {
120-
auto lower_fn = [](const XlaNode& node,
121-
LoweringContext* loctx) -> XlaOpVector {
122-
xla::XlaOp xla_input = loctx->GetOutputOp(node.operand(0));
123-
return node.ReturnOp(BuildSgn(xla_input), loctx);
124-
};
125-
return GenericOp(torch::lazy::OpKind(at::aten::sgn), {input},
126-
GetXlaShape(input), std::move(lower_fn));
127-
}
128-
129-
torch::lazy::NodePtr SignOp(const torch::lazy::Value& input) {
130-
auto lower_fn = [](const XlaNode& node,
131-
LoweringContext* loctx) -> XlaOpVector {
132-
xla::XlaOp xla_input = loctx->GetOutputOp(node.operand(0));
133-
return node.ReturnOp(BuildSign(xla_input), loctx);
134-
};
135-
return GenericOp(torch::lazy::OpKind(at::aten::sign), {input},
136-
GetXlaShape(input), std::move(lower_fn));
137-
}
138-
139119
torch::lazy::NodePtr Prelu(const torch::lazy::Value& input,
140120
const torch::lazy::Value& weight) {
141121
auto lower_fn = [](const XlaNode& node,
@@ -169,57 +149,6 @@ torch::lazy::NodePtr PreluBackward(const torch::lazy::Value& grad,
169149
std::move(lower_fn), /*num_outputs=*/2);
170150
}
171151

172-
torch::lazy::NodePtr LogSigmoid(const torch::lazy::Value& input) {
173-
auto lower_fn = [](const XlaNode& node,
174-
LoweringContext* loctx) -> XlaOpVector {
175-
xla::XlaOp xla_input = loctx->GetOutputOp(node.operand(0));
176-
return node.ReturnOps(BuildLogSigmoid(xla_input), loctx);
177-
};
178-
return GenericOp(torch::lazy::OpKind(at::aten::log_sigmoid), {input},
179-
GetXlaShape(input), std::move(lower_fn), /*num_outputs=*/2);
180-
}
181-
182-
torch::lazy::NodePtr SiLU(const torch::lazy::Value& input) {
183-
auto lower_fn = [](const XlaNode& node,
184-
LoweringContext* loctx) -> XlaOpVector {
185-
xla::XlaOp xla_input = loctx->GetOutputOp(node.operand(0));
186-
return node.ReturnOp(xla_input * BuildSigmoid(xla_input), loctx);
187-
};
188-
return GenericOp(torch::lazy::OpKind(at::aten::silu), {input},
189-
GetXlaShape(input), std::move(lower_fn));
190-
}
191-
192-
torch::lazy::NodePtr SiLUBackward(const torch::lazy::Value& grad_output,
193-
const torch::lazy::Value& input) {
194-
auto lower_fn = [](const XlaNode& node,
195-
LoweringContext* loctx) -> XlaOpVector {
196-
xla::XlaOp xla_grad_output = loctx->GetOutputOp(node.operand(0));
197-
xla::XlaOp xla_input = loctx->GetOutputOp(node.operand(1));
198-
return node.ReturnOp(BuildSiLUBackward(xla_grad_output, xla_input), loctx);
199-
};
200-
auto lower_for_shape_fn =
201-
[](absl::Span<const xla::XlaOp> operands) -> xla::XlaOp {
202-
return BuildSiLUBackward(operands[0], operands[1]);
203-
};
204-
return GenericOp(
205-
torch::lazy::OpKind(at::aten::silu_backward), {grad_output, input},
206-
[&]() {
207-
return InferOutputShape({GetXlaShape(grad_output), GetXlaShape(input)},
208-
lower_for_shape_fn);
209-
},
210-
std::move(lower_fn));
211-
}
212-
213-
torch::lazy::NodePtr Sigmoid(const torch::lazy::Value& input) {
214-
auto lower_fn = [](const XlaNode& node,
215-
LoweringContext* loctx) -> XlaOpVector {
216-
xla::XlaOp xla_input = loctx->GetOutputOp(node.operand(0));
217-
return node.ReturnOp(BuildSigmoid(xla_input), loctx);
218-
};
219-
return GenericOp(torch::lazy::OpKind(at::aten::sigmoid), {input},
220-
GetXlaShape(input), std::move(lower_fn));
221-
}
222-
223152
torch::lazy::NodePtr SigmoidBackward(const torch::lazy::Value& grad_output,
224153
const torch::lazy::Value& output) {
225154
torch::lazy::Value scalar_1 = ScalarOp(1, GetXlaShape(output));

torch_xla/csrc/ops/ops.h

Lines changed: 0 additions & 13 deletions
Original file line numberDiff line numberDiff line change
@@ -80,10 +80,6 @@ torch::lazy::NodePtr Tan(const torch::lazy::Value& input);
8080

8181
torch::lazy::NodePtr Neg(const torch::lazy::Value& input);
8282

83-
torch::lazy::NodePtr SgnOp(const torch::lazy::Value& input);
84-
85-
torch::lazy::NodePtr SignOp(const torch::lazy::Value& input);
86-
8783
torch::lazy::NodePtr Min(const torch::lazy::Value& input,
8884
const torch::lazy::Value& other);
8985

@@ -114,15 +110,6 @@ torch::lazy::NodePtr Pow(const torch::lazy::Value& input,
114110
torch::lazy::NodePtr Fmod(const torch::lazy::Value& dividend,
115111
const torch::lazy::Value& divisor);
116112

117-
torch::lazy::NodePtr LogSigmoid(const torch::lazy::Value& input);
118-
119-
torch::lazy::NodePtr Sigmoid(const torch::lazy::Value& input);
120-
121-
torch::lazy::NodePtr SiLU(const torch::lazy::Value& input);
122-
123-
torch::lazy::NodePtr SiLUBackward(const torch::lazy::Value& grad_output,
124-
const torch::lazy::Value& input);
125-
126113
torch::lazy::NodePtr SigmoidBackward(const torch::lazy::Value& grad_output,
127114
const torch::lazy::Value& output);
128115

torch_xla/csrc/tensor_methods.cpp

Lines changed: 0 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -2000,11 +2000,6 @@ XLATensorPtr log_base(const XLATensorPtr& input, torch::lazy::OpKind op,
20002000
std::nullopt);
20012001
}
20022002

2003-
XLATensorPtr log_sigmoid(const XLATensorPtr& input) {
2004-
torch::lazy::NodePtr node = LogSigmoid(input->GetIrValue());
2005-
return input->CreateFrom(torch::lazy::Value(node, 0));
2006-
}
2007-
20082003
XLATensorPtr log_softmax(const XLATensorPtr& input, int64_t dim,
20092004
std::optional<at::ScalarType> dtype,
20102005
std::vector<torch::lazy::Shape>&& shapes) {
@@ -3075,10 +3070,6 @@ void selu_(XLATensorPtr& input) {
30753070
input->SetInPlaceIrValue(Selu(input->GetIrValue()));
30763071
}
30773072

3078-
XLATensorPtr sigmoid(const XLATensorPtr& input) {
3079-
return input->CreateFrom(Sigmoid(input->GetIrValue()));
3080-
}
3081-
30823073
XLATensorPtr sigmoid_backward(const XLATensorPtr& grad_output,
30833074
const XLATensorPtr& output) {
30843075
return grad_output->CreateFrom(

torch_xla/csrc/tensor_methods.h

Lines changed: 0 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -574,8 +574,6 @@ XLATensorPtr logit(const XLATensorPtr& input, std::optional<double> eps);
574574
XLATensorPtr log_base(const XLATensorPtr& input, torch::lazy::OpKind op,
575575
double base);
576576

577-
XLATensorPtr log_sigmoid(const XLATensorPtr& input);
578-
579577
XLATensorPtr log_softmax(const XLATensorPtr& input, int64_t dim,
580578
std::optional<at::ScalarType> dtype,
581579
std::vector<torch::lazy::Shape>&& shapes);
@@ -865,10 +863,8 @@ XLATensorPtr scatter_reduce(const XLATensorPtr& input, int64_t dim,
865863
XLATensorPtr select(const XLATensorPtr& input, int64_t dim, int64_t index);
866864

867865
void selu_(XLATensorPtr& input);
868-
869866
XLATensorPtr silu(const XLATensorPtr& input);
870867
XLATensorPtr silu_backward(XLATensorPtr& grad_output, XLATensorPtr& input);
871-
XLATensorPtr sigmoid(const XLATensorPtr& input);
872868
XLATensorPtr sigmoid_backward(const XLATensorPtr& grad_output,
873869
const XLATensorPtr& output);
874870

0 commit comments

Comments
 (0)