-
Notifications
You must be signed in to change notification settings - Fork 3.4k
add csharp interface for vitisai #25827
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
base: rel-1.23.0
Are you sure you want to change the base?
Changes from all commits
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
Original file line number | Diff line number | Diff line change |
---|---|---|
|
@@ -5,6 +5,7 @@ | |
using System.Collections.Generic; | ||
using System.Runtime.InteropServices; | ||
using System.Text; | ||
using System.Linq; | ||
|
||
namespace Microsoft.ML.OnnxRuntime | ||
{ | ||
|
@@ -257,6 +258,56 @@ public void AppendExecutionProvider_CUDA(OrtCUDAProviderOptions cudaProviderOpti | |
#endif | ||
} | ||
|
||
/// <summary> | ||
/// Append a VitisAI EP instance (configured based on given provider options) to the native OrtSessionOptions instance | ||
/// </summary> | ||
/// <param name="options">Native OrtSessionOptions instance</param> | ||
/// <param name="keys">Native keys instance</param> | ||
/// <param name="values">Native values instance</param> | ||
/// <param name="numEntries">Native numEntries instance</param> | ||
public void AppendExecutionProvider_VitisAI(Dictionary<string, string> config) | ||
{ | ||
#if __MOBILE__ | ||
throw new NotSupportedException("The VitisAI Execution Provider is not supported in this build"); | ||
#else | ||
int count = config.Count; | ||
IntPtr[] keyPtrs = new IntPtr[count]; | ||
IntPtr[] valuePtrs = new IntPtr[count]; | ||
|
||
string[] keys = config.Keys.ToArray(); | ||
string[] values = config.Values.ToArray(); | ||
|
||
for (int i = 0; i < count; ++i) { | ||
keyPtrs[i] = Marshal.StringToHGlobalAnsi(keys[i]); | ||
valuePtrs[i] = Marshal.StringToHGlobalAnsi(values[i]); | ||
} | ||
|
||
IntPtr keysNative = Marshal.AllocHGlobal(IntPtr.Size * count); | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Global allocations are slow. The only advantage is that they do not need to be pinned. However, in this case keysNatives aren valuesNative are automatically pinned when being passed to native method So you will not need to allocate and copy to *Native arrays. This will also take care of the memory leak in case Copy() throws. The issue here, is that we need to convert strings to UTF-8 and not ANSI. Use utility functions to accomplish that. You will need to PIN those and obtain IntPtr which can be placed to keysPtrs and valuesPtrs. byte[] buffer = new byte[1024];
GCHandle handle = GCHandle.Alloc(buffer, GCHandleType.Pinned);
IntPtr ptr = handle.AddrOfPinnedObject();
// Use ptr...
handle.Free(); You will need to unpin those in finally block (Free()). This way Marshal functionality is not longer needed. |
||
IntPtr valuesNative = Marshal.AllocHGlobal(IntPtr.Size * count); | ||
|
||
Marshal.Copy(keyPtrs, 0, keysNative, count); | ||
Marshal.Copy(valuePtrs, 0, valuesNative, count); | ||
|
||
try | ||
{ | ||
UIntPtr numKeys = new UIntPtr((uint)count); | ||
NativeApiStatus.VerifySuccess( | ||
NativeMethods.SessionOptionsAppendExecutionProvider_VitisAI( | ||
handle, keysNative, valuesNative, numKeys)); | ||
} | ||
finally | ||
{ | ||
for (int i = 0; i < count; ++i) | ||
{ | ||
Marshal.FreeHGlobal(keyPtrs[i]); | ||
Marshal.FreeHGlobal(valuePtrs[i]); | ||
} | ||
Marshal.FreeHGlobal(keysNative); | ||
Marshal.FreeHGlobal(valuesNative); | ||
} | ||
#endif | ||
} | ||
|
||
/// <summary> | ||
/// Use only if you have the onnxruntime package specific to this Execution Provider. | ||
/// </summary> | ||
|
Uh oh!
There was an error while loading. Please reload this page.
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
We need to add a test to this functionality.