Skip to content

Commit 2f9b449

Browse files
committed
update siplists and code after rebase
Signed-off-by: Anatoly Myachev <[email protected]>
1 parent 4a6f708 commit 2f9b449

File tree

6 files changed

+19
-12
lines changed

6 files changed

+19
-12
lines changed

python/triton/language/target_info.py

Lines changed: 0 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -22,12 +22,6 @@ def is_cuda():
2222
return target is not None and target.backend == "cuda"
2323

2424

25-
@constexpr_function
26-
def is_xpu():
27-
target = current_target()
28-
return target is not None and target.backend == "xpu"
29-
30-
3125
@constexpr_function
3226
def cuda_capability_geq(major, minor=0):
3327
"""

python/triton_kernels/tests/test_matmul.py

Lines changed: 0 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -297,8 +297,6 @@ def test_op(m, n, k, split_k, do_gather, do_scatter, fused_scatter, has_y_gammas
297297
elif is_xpu():
298298
if split_k > 1:
299299
pytest.skip("splitK hasn't been fully tested on INTEL GPU.")
300-
if "float8_e4m3fn" in act_dtype_str and "float8_e4m3fn" in weight_dtype_str:
301-
pytest.skip("FIXME")
302300

303301
if "float8_e4m3fnuz" in (weight_dtype_str, act_dtype_str) and not is_hip_cdna3():
304302
pytest.skip("float8_e4m3fnuz only tested on AMD CDNA3 Platform")

python/triton_kernels/triton_kernels/matmul_ogs.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -588,7 +588,7 @@ def matmul_ogs_torch(x, w, bias,
588588
else:
589589
idx = gather_indx.src_indx[lo:hi] // n_expts_act
590590
batch = i if is_input_batched else 0
591-
out = torch.matmul(round_x(x[batch, idx, :], torch.arange(lo, hi, device="xpu")).float(),
591+
out = torch.matmul(round_x(x[batch, idx, :], torch.arange(lo, hi, device=device)).float(),
592592
w[i].float())
593593
if bias is not None:
594594
out += bias[i, :] if betas is None else bias[i, :] * betas[lo:hi, None]

python/triton_kernels/triton_kernels/target_info.py

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -6,7 +6,6 @@
66
cuda_capability_geq,
77
is_cuda,
88
is_hip,
9-
is_xpu,
109
is_hip_cdna3,
1110
is_hip_cdna4,
1211
is_xpu,
Lines changed: 9 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,3 +1,11 @@
1-
tests/test_matmul.py::test_op
1+
# https://github.com/intel/intel-xpu-backend-for-triton/issues/5074
2+
tests/test_matmul.py::test_op[False-False-False-True-False-16-1000-400-400-ragged-float8_e4m3fn-float8_e4m3fn-3-1-1-1-False-None]
3+
tests/test_matmul.py::test_op[False-False-False-True-False-128-1000-400-400-ragged-float8_e4m3fn-float8_e4m3fn-3-1-1-1-False-None]
4+
tests/test_matmul.py::test_op[False-False-True-True-False-16-1000-400-400-ragged-float8_e4m3fn-float8_e4m3fn-3-1-1-1-False-None]
5+
tests/test_matmul.py::test_op[False-False-True-True-False-128-1000-400-400-ragged-float8_e4m3fn-float8_e4m3fn-3-1-1-1-False-None]
6+
tests/test_matmul.py::test_op[False-True-False-True-False-16-1000-400-400-ragged-float8_e4m3fn-float8_e4m3fn-3-1-1-1-False-None]
7+
tests/test_matmul.py::test_op[False-True-False-True-False-128-1000-400-400-ragged-float8_e4m3fn-float8_e4m3fn-3-1-1-1-False-None]
8+
tests/test_matmul.py::test_op[False-True-True-True-False-16-1000-400-400-ragged-float8_e4m3fn-float8_e4m3fn-3-1-1-1-False-None]
9+
tests/test_matmul.py::test_op[False-True-True-True-False-128-1000-400-400-ragged-float8_e4m3fn-float8_e4m3fn-3-1-1-1-False-None]
210
tests/test_matmul.py::test_fused_act
311
tests/test_matmul.py::test_zero_reduction_dim
Lines changed: 9 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,3 +1,11 @@
1-
tests/test_matmul.py::test_op
1+
# https://github.com/intel/intel-xpu-backend-for-triton/issues/5074
2+
tests/test_matmul.py::test_op[False-False-False-True-False-16-1000-400-400-ragged-float8_e4m3fn-float8_e4m3fn-3-1-1-1-False-None]
3+
tests/test_matmul.py::test_op[False-False-False-True-False-128-1000-400-400-ragged-float8_e4m3fn-float8_e4m3fn-3-1-1-1-False-None]
4+
tests/test_matmul.py::test_op[False-False-True-True-False-16-1000-400-400-ragged-float8_e4m3fn-float8_e4m3fn-3-1-1-1-False-None]
5+
tests/test_matmul.py::test_op[False-False-True-True-False-128-1000-400-400-ragged-float8_e4m3fn-float8_e4m3fn-3-1-1-1-False-None]
6+
tests/test_matmul.py::test_op[False-True-False-True-False-16-1000-400-400-ragged-float8_e4m3fn-float8_e4m3fn-3-1-1-1-False-None]
7+
tests/test_matmul.py::test_op[False-True-False-True-False-128-1000-400-400-ragged-float8_e4m3fn-float8_e4m3fn-3-1-1-1-False-None]
8+
tests/test_matmul.py::test_op[False-True-True-True-False-16-1000-400-400-ragged-float8_e4m3fn-float8_e4m3fn-3-1-1-1-False-None]
9+
tests/test_matmul.py::test_op[False-True-True-True-False-128-1000-400-400-ragged-float8_e4m3fn-float8_e4m3fn-3-1-1-1-False-None]
210
tests/test_matmul.py::test_fused_act
311
tests/test_matmul.py::test_zero_reduction_dim

0 commit comments

Comments
 (0)