Add CI
All checks were successful
continuous-integration/drone/push Build is passing

This commit is contained in:
Andrey Tkachenko 2022-03-09 13:44:32 +04:00
parent 860d59f37c
commit 4cafc5c453
5 changed files with 53 additions and 28 deletions

13
.drone.yml Normal file
View File

@ -0,0 +1,13 @@
kind: pipeline
name: default
steps:
- name: build
image: hub.aidev.ru/rust-onnxruntime
commands:
- cargo build --verbose --all
- name: fmt-check
image: hub.aidev.ru/rust-onnxruntime
commands:
- cargo fmt --all -- --check

View File

@ -25,7 +25,10 @@ fn main() -> Result<()> {
let so = SessionOptions::new()?;
println!("Available Providers: {:?}", SessionOptions::available_providers());
println!(
"Available Providers: {:?}",
SessionOptions::available_providers()
);
for path in &opt.onnx {
println!("model {:?}", path);

View File

@ -2,8 +2,8 @@ use std::ffi::CStr;
use std::time::{Duration, Instant};
use onnxruntime::*;
use structopt::{clap, StructOpt};
use std::path::PathBuf;
use structopt::{clap, StructOpt};
#[structopt(
name = "run",
@ -73,9 +73,13 @@ fn tensor_size(
(info.elem_type(), dims)
}
fn tensor_mut(name: &str, elem_type: OnnxTensorElementDataType, dims: &[usize]) -> Box<dyn AsMut<Val>> {
fn tensor_mut(
name: &str,
elem_type: OnnxTensorElementDataType,
dims: &[usize],
) -> Box<dyn AsMut<Val>> {
use OnnxTensorElementDataType::*;
println!("{:?} {} {:?}", elem_type, name, dims);
match elem_type {
@ -87,7 +91,8 @@ fn tensor_mut(name: &str, elem_type: OnnxTensorElementDataType, dims: &[usize])
}
fn load_image(filename: &str, height: usize, width: usize) -> Vec<f32> {
let img = image::open(filename).unwrap()
let img = image::open(filename)
.unwrap()
// .resize_exact(width as _, height as _, image::imageops::FilterType::Triangle)
.into_rgb();
@ -110,7 +115,13 @@ fn tensor_with_size(
println!("{:?} {} {:?}", ty, name, dims);
match ty {
Float => match name {
"input" => Box::new(Tensor::<f32>::new(&dims, load_image("/data/andrey_/Images/me.jpg", dims[2], dims[3])).unwrap()),
"input" => Box::new(
Tensor::<f32>::new(
&dims,
load_image("/data/andrey_/Images/me.jpg", dims[2], dims[3]),
)
.unwrap(),
),
_ => Box::new(Tensor::<f32>::init(&dims, 0.0).unwrap()),
},
Int64 => Box::new(Tensor::<i64>::init(&dims, 0).unwrap()),
@ -162,7 +173,11 @@ fn main() -> Result<()> {
for (i, input) in session.inputs().enumerate() {
if let Some(tensor_info) = input.tensor_info() {
input_names.push(input.name());
input_tensors.push(tensor_with_size(input.name().as_str(), &tensor_info, &mut map));
input_tensors.push(tensor_with_size(
input.name().as_str(),
&tensor_info,
&mut map,
));
} else {
println!("input {}: {:?} {:?}", i, &*input.name(), input.onnx_type());
}
@ -199,7 +214,7 @@ fn main() -> Result<()> {
let tensor = match res.pop().unwrap().as_tensor::<f32>() {
Ok(t) => t,
_ => panic!("something went wrong")
_ => panic!("something went wrong"),
};
println!("[{:?}] {}", tensor.dims(), before.elapsed().as_millis())

View File

@ -7,8 +7,9 @@ use std::ptr;
pub mod sys;
// Re-export enums
pub use sys::{
AllocatorType, ErrorCode, ExecutionMode, GraphOptimizationLevel, LoggingLevel, MemType,
OnnxTensorElementDataType, OnnxType, CUDAProviderOptions, CudnnConvAlgoSearch, OpenVINOProviderOptions
AllocatorType, CUDAProviderOptions, CudnnConvAlgoSearch, ErrorCode, ExecutionMode,
GraphOptimizationLevel, LoggingLevel, MemType, OnnxTensorElementDataType, OnnxType,
OpenVINOProviderOptions,
};
#[macro_use]
@ -160,7 +161,7 @@ impl SessionOptions {
let raw = call!(@unsafe @ptr CreateSessionOptions)?;
Ok(SessionOptions { raw })
}
pub fn available_providers() -> Vec<String> {
let mut providers_array: *mut *mut i8 = std::ptr::null_mut();
let mut providers_len: i32 = 0;
@ -168,10 +169,10 @@ impl SessionOptions {
call!(@unsafe @expect GetAvailableProviders, &mut providers_array, &mut providers_len);
let slice = unsafe { std::slice::from_raw_parts(providers_array, providers_len as usize) };
let mut res = Vec::new();
for i in slice {
res.push(unsafe {CStr::from_ptr(*i)}.to_string_lossy().to_string());
res.push(unsafe { CStr::from_ptr(*i) }.to_string_lossy().to_string());
}
call!(@unsafe @expect ReleaseAvailableProviders, providers_array, providers_len);
@ -221,9 +222,8 @@ impl SessionOptions {
pub fn add_tensorrt(&self, device_id: i32) {
let so = self.raw;
let status = unsafe {
crate::sys::SessionOptionsAppendExecutionProvider_Tensorrt(so, device_id)
};
let status =
unsafe { crate::sys::SessionOptionsAppendExecutionProvider_Tensorrt(so, device_id) };
if !status.is_null() {
panic!("!!!");
@ -241,7 +241,7 @@ impl SessionOptions {
fn set_session_log_id(log_id: &str) { SetSessionLogId };
fn en_prof(path: &CStr | .as_ptr()) { EnableProfiling };
fn set_execution_mode(mode: ExecutionMode) { SetSessionExecutionMode };
fn set_session_log_verbosity_level(verbosity_level: i32) { SetSessionLogVerbosityLevel };
fn set_session_log_severity_level(severity_level: i32) { SetSessionLogSeverityLevel };
fn set_session_graph_optimization_level(graph_optimization_level: GraphOptimizationLevel)
@ -493,7 +493,8 @@ impl Session {
assert_eq!(input_names.len(), inputs.len());
let output_size = output_names.len() as u64;
let mut raw_outputs: Box<[*mut sys::Value]> = (0..output_size).map(|_| ptr::null_mut()).collect();
let mut raw_outputs: Box<[*mut sys::Value]> =
(0..output_size).map(|_| ptr::null_mut()).collect();
call!(@unsafe
Run,
self.raw,
@ -506,12 +507,7 @@ impl Session {
raw_outputs.as_mut_ptr() as *mut *mut sys::Value
)?;
Ok(
raw_outputs
.into_iter()
.map(|v| Value { raw: *v })
.collect()
)
Ok(raw_outputs.into_iter().map(|v| Value { raw: *v }).collect())
}
}

View File

@ -42,11 +42,9 @@ impl Val {
pub fn as_slice<T: OrtType>(&self) -> Option<&[T]> {
let st = self.shape_and_type();
if st.elem_type() == T::onnx_type() {
let len = st.dims().into_iter().map(|x|x as usize).product();
let len = st.dims().into_iter().map(|x| x as usize).product();
let data = self.tensor_data();
Some(unsafe {
&*std::ptr::slice_from_raw_parts(data as *mut T, len)
})
Some(unsafe { &*std::ptr::slice_from_raw_parts(data as *mut T, len) })
} else {
None
}