File tree Expand file tree Collapse file tree 6 files changed +9
-9
lines changed Expand file tree Collapse file tree 6 files changed +9
-9
lines changed Original file line number Diff line number Diff line change @@ -29,7 +29,7 @@ impl Model {
2929
3030 /// Get the pointer to the underlying [`ov_model_t`].
3131 #[ inline]
32- pub ( crate ) fn as_ptr ( & self ) -> * mut ov_model_t {
32+ pub ( crate ) fn as_ptr ( & self ) -> * const ov_model_t {
3333 self . ptr
3434 }
3535
@@ -55,7 +55,7 @@ impl Model {
5555 index,
5656 std:: ptr:: addr_of_mut!( node)
5757 ) ) ?;
58- Ok ( Node :: new ( node) )
58+ Ok ( Node :: from_ptr ( node) )
5959 }
6060
6161 /// Retrieve the output node by index.
@@ -66,7 +66,7 @@ impl Model {
6666 index,
6767 std:: ptr:: addr_of_mut!( node)
6868 ) ) ?;
69- Ok ( Node :: new ( node) )
69+ Ok ( Node :: from_ptr ( node) )
7070 }
7171
7272 /// Retrieve the constant output node by index.
@@ -77,7 +77,7 @@ impl Model {
7777 index,
7878 std:: ptr:: addr_of_mut!( node)
7979 ) ) ?;
80- Ok ( Node :: new ( node) )
80+ Ok ( Node :: from_ptr ( node) )
8181 }
8282
8383 /// Returns `true` if the model contains dynamic shapes.
Original file line number Diff line number Diff line change @@ -14,7 +14,7 @@ pub struct Node {
1414impl Node {
1515 /// Create a new [`Port`] from [`ov_output_const_port_t`].
1616 #[ inline]
17- pub ( crate ) fn new ( ptr : * mut ov_output_const_port_t ) -> Self {
17+ pub ( crate ) fn from_ptr ( ptr : * mut ov_output_const_port_t ) -> Self {
1818 Self { ptr }
1919 }
2020
Original file line number Diff line number Diff line change @@ -26,7 +26,7 @@ impl InferRequest {
2626 }
2727
2828 /// Assign a [`Tensor`] to the input on the model.
29- pub fn set_tensor ( & mut self , name : & str , tensor : Tensor ) -> Result < ( ) > {
29+ pub fn set_tensor ( & mut self , name : & str , tensor : & Tensor ) -> Result < ( ) > {
3030 try_unsafe ! ( ov_infer_request_set_tensor(
3131 self . ptr,
3232 cstr!( name) ,
Original file line number Diff line number Diff line change @@ -49,7 +49,7 @@ fn classify_alexnet() -> anyhow::Result<()> {
4949 // Compile the model and infer the results.
5050 let mut executable_model = core. compile_model ( & new_model, DeviceType :: CPU ) ?;
5151 let mut infer_request = executable_model. create_infer_request ( ) ?;
52- infer_request. set_tensor ( "data" , tensor) ?;
52+ infer_request. set_tensor ( "data" , & tensor) ?;
5353 infer_request. infer ( ) ?;
5454 let mut results = infer_request. get_tensor ( & output_port. get_name ( ) ?) ?;
5555
Original file line number Diff line number Diff line change @@ -46,7 +46,7 @@ fn classify_inception() -> anyhow::Result<()> {
4646 // Compile the model and infer the results.
4747 let mut executable_model = core. compile_model ( & new_model, DeviceType :: CPU ) ?;
4848 let mut infer_request = executable_model. create_infer_request ( ) ?;
49- infer_request. set_tensor ( "input" , tensor) ?;
49+ infer_request. set_tensor ( "input" , & tensor) ?;
5050 infer_request. infer ( ) ?;
5151 let mut results = infer_request. get_tensor ( & output_port. get_name ( ) ?) ?;
5252
Original file line number Diff line number Diff line change @@ -49,7 +49,7 @@ fn classify_mobilenet() -> anyhow::Result<()> {
4949 // Compile the model and infer the results.
5050 let mut executable_model = core. compile_model ( & new_model, DeviceType :: CPU ) ?;
5151 let mut infer_request = executable_model. create_infer_request ( ) ?;
52- infer_request. set_tensor ( "input" , tensor) ?;
52+ infer_request. set_tensor ( "input" , & tensor) ?;
5353 infer_request. infer ( ) ?;
5454 let mut results = infer_request. get_tensor ( & output_port. get_name ( ) ?) ?;
5555
You can’t perform that action at this time.
0 commit comments