Skip to content

Commit d3edfb0

Browse files
committed
Delete unused tracing functions from ops.cpp.
- `SgnOp` and `SignOp` - Full codegen migration: #3577 - Mistakenly re-introduced: #3572 - `LogSigmoid` - Introduced: #3539 - Full codegen migration: #3743 - `SiLU` - Introduced: #2721 - Full codegen migration: #3780 - `SiLUBackward` - Introduced: #3195 - Full codegen migration: #3780 - `SeLU` - Introduced: #3547 - Full codegen migration: #3780 - `Sigmoid` - Introduced: 6a73deb (no PR record) - Full codegen migration: #6342
1 parent 6b00363 commit d3edfb0

File tree

2 files changed

+0
-96
lines changed

2 files changed

+0
-96
lines changed

torch_xla/csrc/ops/ops.cpp

Lines changed: 0 additions & 81 deletions
Original file line numberDiff line numberDiff line change
@@ -116,26 +116,6 @@ torch::lazy::NodePtr Logit(const torch::lazy::Value& input,
116116
torch::lazy::MHash(eps));
117117
}
118118

119-
torch::lazy::NodePtr SgnOp(const torch::lazy::Value& input) {
120-
auto lower_fn = [](const XlaNode& node,
121-
LoweringContext* loctx) -> XlaOpVector {
122-
xla::XlaOp xla_input = loctx->GetOutputOp(node.operand(0));
123-
return node.ReturnOp(BuildSgn(xla_input), loctx);
124-
};
125-
return GenericOp(torch::lazy::OpKind(at::aten::sgn), {input},
126-
GetXlaShape(input), std::move(lower_fn));
127-
}
128-
129-
torch::lazy::NodePtr SignOp(const torch::lazy::Value& input) {
130-
auto lower_fn = [](const XlaNode& node,
131-
LoweringContext* loctx) -> XlaOpVector {
132-
xla::XlaOp xla_input = loctx->GetOutputOp(node.operand(0));
133-
return node.ReturnOp(BuildSign(xla_input), loctx);
134-
};
135-
return GenericOp(torch::lazy::OpKind(at::aten::sign), {input},
136-
GetXlaShape(input), std::move(lower_fn));
137-
}
138-
139119
torch::lazy::NodePtr Prelu(const torch::lazy::Value& input,
140120
const torch::lazy::Value& weight) {
141121
auto lower_fn = [](const XlaNode& node,
@@ -169,57 +149,6 @@ torch::lazy::NodePtr PreluBackward(const torch::lazy::Value& grad,
169149
std::move(lower_fn), /*num_outputs=*/2);
170150
}
171151

172-
torch::lazy::NodePtr LogSigmoid(const torch::lazy::Value& input) {
173-
auto lower_fn = [](const XlaNode& node,
174-
LoweringContext* loctx) -> XlaOpVector {
175-
xla::XlaOp xla_input = loctx->GetOutputOp(node.operand(0));
176-
return node.ReturnOps(BuildLogSigmoid(xla_input), loctx);
177-
};
178-
return GenericOp(torch::lazy::OpKind(at::aten::log_sigmoid), {input},
179-
GetXlaShape(input), std::move(lower_fn), /*num_outputs=*/2);
180-
}
181-
182-
torch::lazy::NodePtr SiLU(const torch::lazy::Value& input) {
183-
auto lower_fn = [](const XlaNode& node,
184-
LoweringContext* loctx) -> XlaOpVector {
185-
xla::XlaOp xla_input = loctx->GetOutputOp(node.operand(0));
186-
return node.ReturnOp(xla_input * BuildSigmoid(xla_input), loctx);
187-
};
188-
return GenericOp(torch::lazy::OpKind(at::aten::silu), {input},
189-
GetXlaShape(input), std::move(lower_fn));
190-
}
191-
192-
torch::lazy::NodePtr SiLUBackward(const torch::lazy::Value& grad_output,
193-
const torch::lazy::Value& input) {
194-
auto lower_fn = [](const XlaNode& node,
195-
LoweringContext* loctx) -> XlaOpVector {
196-
xla::XlaOp xla_grad_output = loctx->GetOutputOp(node.operand(0));
197-
xla::XlaOp xla_input = loctx->GetOutputOp(node.operand(1));
198-
return node.ReturnOp(BuildSiLUBackward(xla_grad_output, xla_input), loctx);
199-
};
200-
auto lower_for_shape_fn =
201-
[](absl::Span<const xla::XlaOp> operands) -> xla::XlaOp {
202-
return BuildSiLUBackward(operands[0], operands[1]);
203-
};
204-
return GenericOp(
205-
torch::lazy::OpKind(at::aten::silu_backward), {grad_output, input},
206-
[&]() {
207-
return InferOutputShape({GetXlaShape(grad_output), GetXlaShape(input)},
208-
lower_for_shape_fn);
209-
},
210-
std::move(lower_fn));
211-
}
212-
213-
torch::lazy::NodePtr Sigmoid(const torch::lazy::Value& input) {
214-
auto lower_fn = [](const XlaNode& node,
215-
LoweringContext* loctx) -> XlaOpVector {
216-
xla::XlaOp xla_input = loctx->GetOutputOp(node.operand(0));
217-
return node.ReturnOp(BuildSigmoid(xla_input), loctx);
218-
};
219-
return GenericOp(torch::lazy::OpKind(at::aten::sigmoid), {input},
220-
GetXlaShape(input), std::move(lower_fn));
221-
}
222-
223152
torch::lazy::NodePtr SigmoidBackward(const torch::lazy::Value& grad_output,
224153
const torch::lazy::Value& output) {
225154
torch::lazy::Value scalar_1 = ScalarOp(1, GetXlaShape(output));
@@ -958,16 +887,6 @@ torch::lazy::NodePtr Softplus(const torch::lazy::Value& input,
958887
std::move(lower_fn));
959888
}
960889

961-
torch::lazy::NodePtr Selu(const torch::lazy::Value& input) {
962-
auto lower_fn = [](const XlaNode& node,
963-
LoweringContext* loctx) -> XlaOpVector {
964-
xla::XlaOp xla_input = loctx->GetOutputOp(node.operand(0));
965-
return node.ReturnOp(BuildSelu(xla_input), loctx);
966-
};
967-
return GenericOp(torch::lazy::OpKind(at::aten::selu), {input},
968-
GetXlaShape(input), std::move(lower_fn));
969-
}
970-
971890
torch::lazy::NodePtr ViewAsComplexCopy(const torch::lazy::Value& input) {
972891
auto lower_fn = [](const XlaNode& node,
973892
LoweringContext* loctx) -> XlaOpVector {

torch_xla/csrc/ops/ops.h

Lines changed: 0 additions & 15 deletions
Original file line numberDiff line numberDiff line change
@@ -80,10 +80,6 @@ torch::lazy::NodePtr Tan(const torch::lazy::Value& input);
8080

8181
torch::lazy::NodePtr Neg(const torch::lazy::Value& input);
8282

83-
torch::lazy::NodePtr SgnOp(const torch::lazy::Value& input);
84-
85-
torch::lazy::NodePtr SignOp(const torch::lazy::Value& input);
86-
8783
torch::lazy::NodePtr Min(const torch::lazy::Value& input,
8884
const torch::lazy::Value& other);
8985

@@ -114,15 +110,6 @@ torch::lazy::NodePtr Pow(const torch::lazy::Value& input,
114110
torch::lazy::NodePtr Fmod(const torch::lazy::Value& dividend,
115111
const torch::lazy::Value& divisor);
116112

117-
torch::lazy::NodePtr LogSigmoid(const torch::lazy::Value& input);
118-
119-
torch::lazy::NodePtr Sigmoid(const torch::lazy::Value& input);
120-
121-
torch::lazy::NodePtr SiLU(const torch::lazy::Value& input);
122-
123-
torch::lazy::NodePtr SiLUBackward(const torch::lazy::Value& grad_output,
124-
const torch::lazy::Value& input);
125-
126113
torch::lazy::NodePtr SigmoidBackward(const torch::lazy::Value& grad_output,
127114
const torch::lazy::Value& output);
128115

@@ -245,8 +232,6 @@ torch::lazy::NodePtr Softplus(const torch::lazy::Value& input,
245232
const torch::lazy::Value& beta,
246233
const torch::lazy::Value& threshold);
247234

248-
torch::lazy::NodePtr Selu(const torch::lazy::Value& input);
249-
250235
torch::lazy::NodePtr ViewAsComplexCopy(const torch::lazy::Value& input);
251236

252237
torch::lazy::NodePtr ViewAsRealCopy(const torch::lazy::Value& input);

0 commit comments

Comments
 (0)