@@ -990,6 +990,8 @@ func.func @test_slice_default_axes_and_slices(%arg0: !torch.vtensor<[20,10,5],f3
990990 return %0 : !torch.vtensor <[20 ,10 ,1 ],f32 >
991991}
992992
993+ // -----
994+
993995// CHECK-LABEL: func.func @test_slice_default_steps
994996func.func @test_slice_default_steps (%arg0: !torch.vtensor <[20 ,10 ,5 ],f32 >, %arg1: !torch.vtensor <[3 ],si64 >, %arg2: !torch.vtensor <[3 ],si64 >, %arg3: !torch.vtensor <[3 ],si64 >) -> !torch.vtensor <[20 ,10 ,1 ],f32 > attributes {torch.onnx_meta.ir_version = 7 : si64 , torch.onnx_meta.opset_version = 13 : si64 , torch.onnx_meta.producer_name = " backend-test" , torch.onnx_meta.producer_version = " " } {
995997 //CHECK: %[[NONE:.*]] = torch.constant.none
@@ -1036,6 +1038,9 @@ func.func @test_slice_default_steps(%arg0: !torch.vtensor<[20,10,5],f32>, %arg1:
10361038 %0 = torch.operator " onnx.Slice" (%arg0 , %arg1 , %arg2 , %arg3 ) : (!torch.vtensor <[20 ,10 ,5 ],f32 >, !torch.vtensor <[3 ],si64 >, !torch.vtensor <[3 ],si64 >, !torch.vtensor <[3 ],si64 >) -> !torch.vtensor <[20 ,10 ,1 ],f32 >
10371039 return %0 : !torch.vtensor <[20 ,10 ,1 ],f32 >
10381040}
1041+
1042+ // -----
1043+
10391044// CHECK-LABEL: func.func @test_reshape_negative_dim
10401045func.func @test_reshape_negative_dim (%arg0: !torch.vtensor <[2 ,3 ,4 ],f32 >, %arg1: !torch.vtensor <[3 ],si64 >) -> !torch.vtensor <[2 ,6 ,2 ],f32 > attributes {torch.onnx_meta.ir_version = 9 : si64 , torch.onnx_meta.opset_version = 19 : si64 , torch.onnx_meta.producer_name = " backend-test" , torch.onnx_meta.producer_version = " " } {
10411046 // CHECK: %[[INT0:.+]] = torch.constant.int 0
@@ -1069,6 +1074,8 @@ func.func @test_reshape_negative_dim(%arg0: !torch.vtensor<[2,3,4],f32>, %arg1:
10691074 return %0 : !torch.vtensor <[2 ,6 ,2 ],f32 >
10701075}
10711076
1077+ // -----
1078+
10721079// CHECK-LABEL: func.func @test_reshape_negative_extended_dims
10731080func.func @test_reshape_negative_extended_dims (%arg0: !torch.vtensor <[2 ,3 ,4 ],f32 >, %arg1: !torch.vtensor <[4 ],si64 >) -> !torch.vtensor <[1 ,2 ,3 ,4 ],f32 > attributes {torch.onnx_meta.ir_version = 9 : si64 , torch.onnx_meta.opset_version = 19 : si64 , torch.onnx_meta.producer_name = " backend-test" , torch.onnx_meta.producer_version = " " } {
10741081 // CHECK: %[[INT0:.+]] = torch.constant.int 0
@@ -1109,6 +1116,8 @@ func.func @test_reshape_negative_extended_dims(%arg0: !torch.vtensor<[2,3,4],f32
11091116 return %0 : !torch.vtensor <[1 ,2 ,3 ,4 ],f32 >
11101117}
11111118
1119+ // -----
1120+
11121121// CHECK-LABEL: func.func @test_reshape_one_dim
11131122func.func @test_reshape_one_dim (%arg0: !torch.vtensor <[2 ,3 ,4 ],f32 >, %arg1: !torch.vtensor <[1 ],si64 >) -> !torch.vtensor <[24 ],f32 > attributes {torch.onnx_meta.ir_version = 9 : si64 , torch.onnx_meta.opset_version = 19 : si64 , torch.onnx_meta.producer_name = " backend-test" , torch.onnx_meta.producer_version = " " } {
11141123 // CHECK: %[[INT0:.+]] = torch.constant.int 0
@@ -1126,6 +1135,8 @@ func.func @test_reshape_one_dim(%arg0: !torch.vtensor<[2,3,4],f32>, %arg1: !torc
11261135 return %0 : !torch.vtensor <[24 ],f32 >
11271136}
11281137
1138+ // -----
1139+
11291140// CHECK-LABEL: func.func @test_reshape_reduced_dims
11301141func.func @test_reshape_reduced_dims (%arg0: !torch.vtensor <[2 ,3 ,4 ],f32 >, %arg1: !torch.vtensor <[2 ],si64 >) -> !torch.vtensor <[2 ,12 ],f32 > attributes {torch.onnx_meta.ir_version = 9 : si64 , torch.onnx_meta.opset_version = 19 : si64 , torch.onnx_meta.producer_name = " backend-test" , torch.onnx_meta.producer_version = " " } {
11311142 // CHECK: %[[INT0:.+]] = torch.constant.int 0
@@ -1151,6 +1162,8 @@ func.func @test_reshape_reduced_dims(%arg0: !torch.vtensor<[2,3,4],f32>, %arg1:
11511162 return %0 : !torch.vtensor <[2 ,12 ],f32 >
11521163}
11531164
1165+ // -----
1166+
11541167// CHECK-LABEL: func.func @test_reshape_reordered_all_dims
11551168func.func @test_reshape_reordered_all_dims (%arg0: !torch.vtensor <[2 ,3 ,4 ],f32 >, %arg1: !torch.vtensor <[3 ],si64 >) -> !torch.vtensor <[4 ,2 ,3 ],f32 > attributes {torch.onnx_meta.ir_version = 9 : si64 , torch.onnx_meta.opset_version = 19 : si64 , torch.onnx_meta.producer_name = " backend-test" , torch.onnx_meta.producer_version = " " } {
11561169 // CHECK: %[[INT0:.+]] = torch.constant.int 0
@@ -1184,6 +1197,8 @@ func.func @test_reshape_reordered_all_dims(%arg0: !torch.vtensor<[2,3,4],f32>, %
11841197 return %0 : !torch.vtensor <[4 ,2 ,3 ],f32 >
11851198}
11861199
1200+ // -----
1201+
11871202// CHECK-LABEL: func.func @test_reshape_zero_and_negative_dim
11881203func.func @test_reshape_zero_and_negative_dim (%arg0: !torch.vtensor <[2 ,3 ,4 ],f32 >, %arg1: !torch.vtensor <[4 ],si64 >) -> !torch.vtensor <[2 ,3 ,1 ,4 ],f32 > attributes {torch.onnx_meta.ir_version = 9 : si64 , torch.onnx_meta.opset_version = 19 : si64 , torch.onnx_meta.producer_name = " backend-test" , torch.onnx_meta.producer_version = " " } {
11891204 // CHECK: %[[INT0:.+]] = torch.constant.int 0
@@ -1224,6 +1239,8 @@ func.func @test_reshape_zero_and_negative_dim(%arg0: !torch.vtensor<[2,3,4],f32>
12241239 return %0 : !torch.vtensor <[2 ,3 ,1 ,4 ],f32 >
12251240}
12261241
1242+ // -----
1243+
12271244// CHECK-LABEL: func.func @test_range_float64_type
12281245 func.func @test_range_float64_type (%arg0: !torch.vtensor <[],f64 >, %arg1: !torch.vtensor <[],f64 >, %arg2: !torch.vtensor <[],f64 >) -> !torch.vtensor <[2 ],f64 > attributes {torch.onnx_meta.ir_version = 6 : si64 , torch.onnx_meta.opset_version = 13 : si64 , torch.onnx_meta.producer_name = " backend-test" , torch.onnx_meta.producer_version = " " } {
12291246 // CHECK: %[[NONE:.*]] torch.constant.none
@@ -1235,6 +1252,8 @@ func.func @test_reshape_zero_and_negative_dim(%arg0: !torch.vtensor<[2,3,4],f32>
12351252 return %0 : !torch.vtensor <[2 ],f64 >
12361253 }
12371254
1255+ // -----
1256+
12381257// CHECK-LABEL: func.func @test_range_float32_type
12391258 func.func @test_range_float32_type (%arg0: !torch.vtensor <[],f32 >, %arg1: !torch.vtensor <[],f32 >, %arg2: !torch.vtensor <[],f32 >) -> !torch.vtensor <[2 ],f32 > attributes {torch.onnx_meta.ir_version = 6 : si64 , torch.onnx_meta.opset_version = 13 : si64 , torch.onnx_meta.producer_name = " backend-test" , torch.onnx_meta.producer_version = " " } {
12401259 // CHECK: %[[NONE:.*]] torch.constant.none
@@ -1246,6 +1265,8 @@ func.func @test_reshape_zero_and_negative_dim(%arg0: !torch.vtensor<[2,3,4],f32>
12461265 return %0 : !torch.vtensor <[2 ],f32 >
12471266 }
12481267
1268+ // -----
1269+
12491270// CHECK-LABEL: func.func @test_range_int64_type
12501271 func.func @test_range_int64_type (%arg0: !torch.vtensor <[],si64 >, %arg1: !torch.vtensor <[],si64 >, %arg2: !torch.vtensor <[],si64 >) -> !torch.vtensor <[2 ],si64 > attributes {torch.onnx_meta.ir_version = 6 : si64 , torch.onnx_meta.opset_version = 13 : si64 , torch.onnx_meta.producer_name = " backend-test" , torch.onnx_meta.producer_version = " " } {
12511272 // CHECK: %[[NONE:.*]] torch.constant.none
@@ -1257,6 +1278,8 @@ func.func @test_reshape_zero_and_negative_dim(%arg0: !torch.vtensor<[2,3,4],f32>
12571278 return %0 : !torch.vtensor <[2 ],si64 >
12581279 }
12591280
1281+ // -----
1282+
12601283// CHECK-LABEL: func.func @test_range_int32_type
12611284 func.func @test_range_int32_type (%arg0: !torch.vtensor <[],si32 >, %arg1: !torch.vtensor <[],si32 >, %arg2: !torch.vtensor <[],si32 >) -> !torch.vtensor <[2 ],si32 > attributes {torch.onnx_meta.ir_version = 6 : si64 , torch.onnx_meta.opset_version = 13 : si64 , torch.onnx_meta.producer_name = " backend-test" , torch.onnx_meta.producer_version = " " } {
12621285 // CHECK: %[[NONE:.*]] torch.constant.none
@@ -1268,6 +1291,8 @@ func.func @test_reshape_zero_and_negative_dim(%arg0: !torch.vtensor<[2,3,4],f32>
12681291 return %0 : !torch.vtensor <[2 ],si32 >
12691292 }
12701293
1294+ // -----
1295+
12711296 // CHECK-LABEL: func.func @test_range_int16_type
12721297 func.func @test_range_int16_type (%arg0: !torch.vtensor <[],si16 >, %arg1: !torch.vtensor <[],si16 >, %arg2: !torch.vtensor <[],si16 >) -> !torch.vtensor <[2 ],si16 > attributes {torch.onnx_meta.ir_version = 6 : si64 , torch.onnx_meta.opset_version = 13 : si64 , torch.onnx_meta.producer_name = " backend-test" , torch.onnx_meta.producer_version = " " } {
12731298 // CHECK: %[[NONE:.*]] torch.constant.none
@@ -1277,4 +1302,34 @@ func.func @test_reshape_zero_and_negative_dim(%arg0: !torch.vtensor<[2,3,4],f32>
12771302 // CHECK: torch.aten.arange.start_step %0, %1, %2, %none, %none, %none, %none : !torch.int, !torch.int, !torch.int, !torch.none, !torch.none, !torch.none, !torch.none -> !torch.vtensor<[2],si16>
12781303 %0 = torch.operator " onnx.Range" (%arg0 , %arg1 , %arg2 ) : (!torch.vtensor <[],si16 >, !torch.vtensor <[],si16 >, !torch.vtensor <[],si16 >) -> !torch.vtensor <[2 ],si16 >
12791304 return %0 : !torch.vtensor <[2 ],si16 >
1305+ }
1306+
1307+ // -----
1308+
1309+ // CHECK-LABEL : func.func @test_top_k
1310+ func.func @test_top_k (%arg0: !torch.vtensor <[3 ,4 ],f32 >, %arg1: !torch.vtensor <[1 ],si64 >) -> (!torch.vtensor <[3 ,3 ],f32 >, !torch.vtensor <[3 ,3 ],si64 >) attributes {torch.onnx_meta.ir_version = 6 : si64 , torch.onnx_meta.opset_version = 11 : si64 } {
1311+ // CHECK: %[[RESULTS:.*]]:2 = torch.operator "onnx.TopK"(%arg0, %arg1) {torch.onnx.axis = 1 : si64} : (!torch.vtensor<[3,4],f32>, !torch.vtensor<[1],si64>) -> (!torch.vtensor<[3,3],f32>, !torch.vtensor<[3,3],si64>)
1312+ // CHECK: return %[[RESULTS]]#0, %[[RESULTS]]#1 : !torch.vtensor<[3,3],f32>, !torch.vtensor<[3,3],si64>
1313+ %0:2 = torch.operator " onnx.TopK" (%arg0 , %arg1 ) {torch.onnx.axis = 1 : si64 } : (!torch.vtensor <[3 ,4 ],f32 >, !torch.vtensor <[1 ],si64 >) -> (!torch.vtensor <[3 ,3 ],f32 >, !torch.vtensor <[3 ,3 ],si64 >)
1314+ return %0#0 , %0#1 : !torch.vtensor <[3 ,3 ],f32 >, !torch.vtensor <[3 ,3 ],si64 >
12801315 }
1316+
1317+ // -----
1318+
1319+ // CHECK-LABEL: func.func @test_top_k_smallest
1320+ func.func @test_top_k_smallest (%arg0: !torch.vtensor <[3 ,4 ],f32 >, %arg1: !torch.vtensor <[1 ],si64 >) -> (!torch.vtensor <[3 ,3 ],f32 >, !torch.vtensor <[3 ,3 ],si64 >) attributes {torch.onnx_meta.ir_version = 6 : si64 , torch.onnx_meta.opset_version = 11 : si64 , torch.onnx_meta.producer_name = " backend-test" , torch.onnx_meta.producer_version = " " } {
1321+ // CHECK: %[[RESULTS:.*]]:2 = torch.operator "onnx.TopK"(%arg0, %arg1) {torch.onnx.axis = 1 : si64, torch.onnx.largest = 0 : si64, torch.onnx.sorted = 1 : si64} : (!torch.vtensor<[3,4],f32>, !torch.vtensor<[1],si64>) -> (!torch.vtensor<[3,3],f32>, !torch.vtensor<[3,3],si64>)
1322+ // CHECK: return %[[RESULTS]]#0, %[[RESULTS]]#1 : !torch.vtensor<[3,3],f32>, !torch.vtensor<[3,3],si64>
1323+ %0:2 = torch.operator " onnx.TopK" (%arg0 , %arg1 ) {torch.onnx.axis = 1 : si64 , torch.onnx.largest = 0 : si64 , torch.onnx.sorted = 1 : si64 } : (!torch.vtensor <[3 ,4 ],f32 >, !torch.vtensor <[1 ],si64 >) -> (!torch.vtensor <[3 ,3 ],f32 >, !torch.vtensor <[3 ,3 ],si64 >)
1324+ return %0#0 , %0#1 : !torch.vtensor <[3 ,3 ],f32 >, !torch.vtensor <[3 ,3 ],si64 >
1325+ }
1326+
1327+ // -----
1328+
1329+ // CHECK-LABEL: func.func @test_top_k_negative_axis
1330+ func.func @test_top_k_negative_axis (%arg0: !torch.vtensor <[3 ,4 ],f32 >, %arg1: !torch.vtensor <[1 ],si64 >) -> (!torch.vtensor <[3 ,3 ],f32 >, !torch.vtensor <[3 ,3 ],si64 >) attributes {torch.onnx_meta.ir_version = 6 : si64 , torch.onnx_meta.opset_version = 11 : si64 } {
1331+ // CHECK: %[[RESULTS:.*]]:2 = torch.operator "onnx.TopK"(%arg0, %arg1) {torch.onnx.axis = -1 : si64} : (!torch.vtensor<[3,4],f32>, !torch.vtensor<[1],si64>) -> (!torch.vtensor<[3,3],f32>, !torch.vtensor<[3,3],si64>)
1332+ // CHECK: return %[[RESULTS]]#0, %[[RESULTS]]#1 : !torch.vtensor<[3,3],f32>, !torch.vtensor<[3,3],si64>
1333+ %0:2 = torch.operator " onnx.TopK" (%arg0 , %arg1 ) {torch.onnx.axis = -1 : si64 } : (!torch.vtensor <[3 ,4 ],f32 >, !torch.vtensor <[1 ],si64 >) -> (!torch.vtensor <[3 ,3 ],f32 >, !torch.vtensor <[3 ,3 ],si64 >)
1334+ return %0#0 , %0#1 : !torch.vtensor <[3 ,3 ],f32 >, !torch.vtensor <[3 ,3 ],si64 >
1335+ }
0 commit comments