diff --git a/csharp/src/Microsoft.ML.OnnxRuntime/DisposableNamedOnnxValue.shared.cs b/csharp/src/Microsoft.ML.OnnxRuntime/DisposableNamedOnnxValue.shared.cs index 6d69f58d2041..a73bca7f3ec3 100644 --- a/csharp/src/Microsoft.ML.OnnxRuntime/DisposableNamedOnnxValue.shared.cs +++ b/csharp/src/Microsoft.ML.OnnxRuntime/DisposableNamedOnnxValue.shared.cs @@ -83,7 +83,7 @@ public class DisposableNamedOnnxValue : NamedOnnxValue, IDisposable /// Ctor /// /// Name of the output value - /// Managed object created to represent output value, such as DenseTensor + /// Managed object created to represent output value, such as DenseTensor{T}; /// List or Dictionary /// /// Tensor element type if value type is a Tensor @@ -133,7 +133,7 @@ private DisposableNamedOnnxValue(string name, Object value, MapHelper mapHelper, public TensorElementType ElementType { get; } /// - /// Overrides the base class method. With respect to pinnedMemoryHandle, it has no operation + /// Overrides the base class method. With respect to memoryHolder, it has no operation /// to do, as this class maintains a native buffer via _ortValueHolder and the memory will be /// disposed by it. This is the case when we are dealing with an OrtValue that is backed by native memory /// and not by pinned managed memory. @@ -142,7 +142,7 @@ private DisposableNamedOnnxValue(string name, Object value, MapHelper mapHelper, /// but the interface (derived from NamedOnnxValue) allows it to be passed as output and one of the test /// cases does it. Unless we deprecate and re-do the interface, we must support it. /// - /// always set to null + /// always set to null /// Native OrtValue handle internal override IntPtr InputToOrtValueHandle(NodeMetadata metadata, out IDisposable memoryHolder) { @@ -150,7 +150,7 @@ internal override IntPtr InputToOrtValueHandle(NodeMetadata metadata, out IDispo { throw new InvalidOperationException("The instance of this class does not own an OrtValue"); } - // PinnedMemoryHandle holds the default value as DisposableNamedOnnxValue + // memoryHolder holds the default value as DisposableNamedOnnxValue // doesn't hold any managed buffer (that needs to be pinned) memoryHolder = null; // Return non-owning instance of OrtValue diff --git a/csharp/src/Microsoft.ML.OnnxRuntime/InferenceSession.shared.cs b/csharp/src/Microsoft.ML.OnnxRuntime/InferenceSession.shared.cs index b21d036f61be..b62a3c50bfda 100644 --- a/csharp/src/Microsoft.ML.OnnxRuntime/InferenceSession.shared.cs +++ b/csharp/src/Microsoft.ML.OnnxRuntime/InferenceSession.shared.cs @@ -908,7 +908,7 @@ private static IntPtr ExtractOrtValueHandleForOutput(NamedOnnxValue output, Node /// /// names to convert to zero terminated utf8 and pin /// extractor functor that helps extracting names from inputs - /// inputs/outputs metadata + /// inputs/outputs metadata /// private static IntPtr[] LookupUtf8Names(IReadOnlyCollection values, NameExtractor nameExtractor, MetadataLookup metaLookup) @@ -1222,7 +1222,6 @@ private void Init(byte[] modelData, SessionOptions options, /// Initializes the session object with a native session handle /// /// Value of a native session object - /// Session options private void InitWithSessionHandle(IntPtr session) { _nativeHandle = session; @@ -2075,7 +2074,7 @@ public long Version /// /// Custom metadata key/value pairs /// - /// An instance of a Dictionary + /// An instance of a Dictionary{string,string} public Dictionary CustomMetadataMap { get diff --git a/csharp/src/Microsoft.ML.OnnxRuntime/ManagedProjections.shared.cs b/csharp/src/Microsoft.ML.OnnxRuntime/ManagedProjections.shared.cs index e512a8c2612a..13117f23e8ef 100644 --- a/csharp/src/Microsoft.ML.OnnxRuntime/ManagedProjections.shared.cs +++ b/csharp/src/Microsoft.ML.OnnxRuntime/ManagedProjections.shared.cs @@ -65,7 +65,7 @@ internal static OrtValue CreateProjection(NamedOnnxValue namedOnnxValue, NodeMet /// The function creates OrtValue objects for each element of the sequence /// and then creates an OrtValue for the whole sequence. /// - /// NamedOnnxValue containing a IEnumerable + /// NamedOnnxValue containing a IEnumerable{NamedOnnxValue} /// sequence metadata /// OrtValue that represents a sequence /// diff --git a/csharp/src/Microsoft.ML.OnnxRuntime/NamedOnnxValue.shared.cs b/csharp/src/Microsoft.ML.OnnxRuntime/NamedOnnxValue.shared.cs index d73c471bb9d7..48a10455588b 100644 --- a/csharp/src/Microsoft.ML.OnnxRuntime/NamedOnnxValue.shared.cs +++ b/csharp/src/Microsoft.ML.OnnxRuntime/NamedOnnxValue.shared.cs @@ -53,13 +53,13 @@ internal MapHelper(TensorBase keys, TensorBase values) /// Other sequences and maps. Although the OnnxValueType is exposed, /// the caller is supposed to know the actual data type contained. /// - /// The convention is that for tensors, it would contain a DenseTensor instance or - /// anything derived from Tensor. + /// The convention is that for tensors, it would contain a DenseTensor{T} instance or + /// anything derived from Tensor{T}. /// - /// For sequences, it would contain a IList where T is an instance of NamedOnnxValue that + /// For sequences, it would contain a IList{T} where T is an instance of NamedOnnxValue that /// would contain a tensor or another type. /// - /// For Maps, it would contain a IDictionary where K,V are primitive types or strings. + /// For Maps, it would contain a IDictionary{K, V} where K,V are primitive types or strings. /// /// public class NamedOnnxValue @@ -153,7 +153,7 @@ public static NamedOnnxValue CreateFromSequence(string name, IEnumerable v } /// - /// Instantiates NamedOnnxValue that contains IDictionary + /// Instantiates NamedOnnxValue that contains IDictionary{K, V} /// /// Keys type /// Values type @@ -225,7 +225,7 @@ public IDictionary AsDictionary() /// based on the pinned managed memory. The caller is responsible for Disposing /// both OrtValue and pinnedMemoryHandle /// - /// dispose after returned OrtValus is disposed + /// dispose after returned OrtValue is disposed /// The native OrtValue handle internal virtual IntPtr InputToOrtValueHandle(NodeMetadata metadata, out IDisposable memoryOwner) { @@ -272,12 +272,6 @@ internal virtual IntPtr OutputToOrtValueHandle(NodeMetadata metadata, out IDispo $" Use Run() overloads that return DisposableNamedOnnxValue to get access to all Onnx value types that may be returned as output."); } - /// - /// This method is used internally to feed dictionary keys - /// to create an OrtValue for map keys - /// - /// - /// DenseTensor" internal TensorBase GetDictionaryKeys() { if (ValueType != OnnxValueType.ONNX_TYPE_MAP) @@ -289,11 +283,6 @@ internal TensorBase GetDictionaryKeys() return _mapHelper.Keys; } - /// - /// - /// - /// - /// DenseTensor" internal TensorBase GetDictionaryValues() { if (ValueType != OnnxValueType.ONNX_TYPE_MAP) diff --git a/csharp/src/Microsoft.ML.OnnxRuntime/NativeMethods.shared.cs b/csharp/src/Microsoft.ML.OnnxRuntime/NativeMethods.shared.cs index 44d2222dbce1..b2a7b75891a2 100644 --- a/csharp/src/Microsoft.ML.OnnxRuntime/NativeMethods.shared.cs +++ b/csharp/src/Microsoft.ML.OnnxRuntime/NativeMethods.shared.cs @@ -1506,7 +1506,7 @@ IntPtr[] outputValues /* An array of output value pointers. Array must be alloca /// /// Destroy OrtIoBinding instance created by OrtCreateIoBinding /// - /// instance of OrtIoBinding + /// instance of OrtIoBinding [UnmanagedFunctionPointer(CallingConvention.Winapi)] public delegate void DOrtReleaseIoBinding(IntPtr /*(OrtIoBinding)*/ io_binding); @@ -1516,7 +1516,7 @@ IntPtr[] outputValues /* An array of output value pointers. Array must be alloca /// Bind OrtValue to the model input with the specified name /// If binding with the specified name already exists, it will be replaced /// - /// instance of OrtIoBinding + /// instance of OrtIoBinding /// model input name (utf-8) /// OrtValue that is used for input (may wrap arbitrary memory). /// The param instance is copied internally so this argument may be released. @@ -1544,7 +1544,7 @@ IntPtr[] outputValues /* An array of output value pointers. Array must be alloca /// Bind OrtValue to the model output with the specified name /// If binding with the specified name already exists, it will be replaced /// - /// instance of OrtIoBinding + /// instance of OrtIoBinding /// model output name (utf-8) /// OrtValue that is used for output (may wrap arbitrary memory). /// The param instance is copied internally so this argument may be released. @@ -1605,7 +1605,7 @@ IntPtr[] outputValues /* An array of output value pointers. Array must be alloca /// The function returns output values after the model has been run with RunWithBinding() /// It returns a natively allocated buffer of OrtValue pointers. All of the OrtValues must be individually /// released after no longer needed. You may use OrtValue disposable class to wrap the native handle and properly dispose it - /// in connection with DisposableList. All values are returned in the same order as they were bound. + /// in connection with DisposableList{T}. All values are returned in the same order as they were bound. /// The buffer that contains OrtValues must deallocated using the same allocator that was specified as an argument. /// You may use an instance OrtMemoryAllocation to properly dispose of the native memory. /// @@ -1643,9 +1643,7 @@ IntPtr[] outputValues /* An array of output value pointers. Array must be alloca /// /// Provides element-level access into a tensor. /// - /// a pointer to an array of index values that specify an element's location in the tensor data blob - /// length of location_values - /// a pointer to the element specified by location_values + /// instance of OrtIoBinding [UnmanagedFunctionPointer(CallingConvention.Winapi)] public delegate void DOrtTensorAt(IntPtr /*(OrtIoBinding)*/ io_binding); @@ -1656,10 +1654,11 @@ IntPtr[] outputValues /* An array of output value pointers. Array must be alloca /// sharing between multiple sessions that use the same env instance. /// Lifetime of the created allocator will be valid for the duration of the environment. /// Returns an error if an allocator with the same OrtMemoryInfo is already registered. + /// /// Native OrtEnv instance /// Native OrtMemoryInfo instance /// Native OrtArenaCfg instance - /// A pointer to native ortStatus indicating success/failure + /// A pointer to native ortStatus indicating success/failure [UnmanagedFunctionPointer(CallingConvention.Winapi)] public delegate IntPtr /*(OrtStatus*)*/ DOrtCreateAndRegisterAllocator(IntPtr /*(OrtEnv*)*/ env, IntPtr /*(const OrtMemoryInfo*)*/ memInfo, @@ -1890,7 +1889,7 @@ IntPtr[] outputValues /* An array of output value pointers. Array must be alloca public static DOrtFillStringTensor OrtFillStringTensor; /// \param value A tensor created from OrtCreateTensor... function. - /// \param index The index of the entry in the tensor to resize. + /// \param index The index of the entry in the tensor to resize. /// \param length_in_bytes Length to resize the string to. /// \param buffer The resized buffer. [UnmanagedFunctionPointer(CallingConvention.Winapi)] diff --git a/csharp/src/Microsoft.ML.OnnxRuntime/NativeOnnxValueHelper.shared.cs b/csharp/src/Microsoft.ML.OnnxRuntime/NativeOnnxValueHelper.shared.cs index 96afb48fcc35..fc14be00ee47 100644 --- a/csharp/src/Microsoft.ML.OnnxRuntime/NativeOnnxValueHelper.shared.cs +++ b/csharp/src/Microsoft.ML.OnnxRuntime/NativeOnnxValueHelper.shared.cs @@ -226,7 +226,7 @@ internal MarshaledString(string input) } /// - // Native allocation (UTF8-8 string length with terminating zero) + /// Native allocation (UTF8-8 string length with terminating zero) /// internal int Length { get; private set; } diff --git a/csharp/src/Microsoft.ML.OnnxRuntime/OrtEnv.shared.cs b/csharp/src/Microsoft.ML.OnnxRuntime/OrtEnv.shared.cs index 1a03338298fa..f4b2649f8d05 100644 --- a/csharp/src/Microsoft.ML.OnnxRuntime/OrtEnv.shared.cs +++ b/csharp/src/Microsoft.ML.OnnxRuntime/OrtEnv.shared.cs @@ -126,7 +126,7 @@ private OrtEnv(IntPtr handle, OrtLoggingLevel logLevel) /// /// /// - /// + /// /// private static void LoggingFunctionThunk(IntPtr param, IntPtr severity, diff --git a/csharp/src/Microsoft.ML.OnnxRuntime/OrtFloat16.shared.cs b/csharp/src/Microsoft.ML.OnnxRuntime/OrtFloat16.shared.cs index 20be2acacfc5..7c22e1b213b4 100644 --- a/csharp/src/Microsoft.ML.OnnxRuntime/OrtFloat16.shared.cs +++ b/csharp/src/Microsoft.ML.OnnxRuntime/OrtFloat16.shared.cs @@ -134,11 +134,11 @@ internal static uint BFloat16BitsToSingleBits(ushort bfloatBits) } /// - /// Creates float NaN with the given sign and fp16 significand shifted << 54 + /// Creates float NaN with the given sign and fp16 significand shifted << 54 /// /// true for negative /// should be shifted 54 bits left before calling the function - /// so only 8 bits of signidicand remains + /// so only 8 bits of significand remains /// internal static float CreateSingleNaN(bool sign, ulong significand) { @@ -416,12 +416,11 @@ internal static ushort ExtractTrailingSignificandFromBits(ushort bits) /// /// Compares values of two Float16 - /// /// /// left hand side /// right hand side /// returns true if left is greater or equal than right according to IEEE - /// + /// public static bool operator >=(Float16 left, Float16 right) { return right <= left; @@ -492,7 +491,7 @@ public static bool IsNaN(Float16 value) /// Determines whether the specified value is negative. /// /// Float16 instance - /// true if the value is negative + /// true if the value is negative public static bool IsNegative(Float16 value) { return (short)(value.value) < 0; @@ -1115,7 +1114,7 @@ public static bool IsNaN(BFloat16 value) /// Determines whether the specified value is negative. /// /// BFloat16 instance - /// true if the value is negative + /// true if the value is negative public static bool IsNegative(BFloat16 value) { return (short)(value.value) < 0; diff --git a/csharp/src/Microsoft.ML.OnnxRuntime/ProviderOptions.shared.cs b/csharp/src/Microsoft.ML.OnnxRuntime/ProviderOptions.shared.cs index e2c8a44d92dc..6a7922357aa3 100644 --- a/csharp/src/Microsoft.ML.OnnxRuntime/ProviderOptions.shared.cs +++ b/csharp/src/Microsoft.ML.OnnxRuntime/ProviderOptions.shared.cs @@ -318,9 +318,9 @@ public static void StringToDict(string s, Dictionary dict) } /// - /// CoreML flags for use with SessionOptions + /// CoreML flags for use with SessionOptions. + /// See https://github.com/microsoft/onnxruntime/blob/main/include/onnxruntime/core/providers/coreml/coreml_provider_factory.h /// - /// [Flags] public enum CoreMLFlags : uint { @@ -332,9 +332,9 @@ public enum CoreMLFlags : uint } /// - /// NNAPI flags for use with SessionOptions + /// NNAPI flags for use with SessionOptions. + /// See https://github.com/microsoft/onnxruntime/blob/main/include/onnxruntime/core/providers/nnapi/nnapi_provider_factory.h /// - /// [Flags] public enum NnapiFlags { diff --git a/csharp/src/Microsoft.ML.OnnxRuntime/SessionOptions.shared.cs b/csharp/src/Microsoft.ML.OnnxRuntime/SessionOptions.shared.cs index 6ecfee0a35b6..3acd84b3016d 100644 --- a/csharp/src/Microsoft.ML.OnnxRuntime/SessionOptions.shared.cs +++ b/csharp/src/Microsoft.ML.OnnxRuntime/SessionOptions.shared.cs @@ -768,8 +768,8 @@ public int LogVerbosityLevel private int _logVerbosityLevel = 0; /// - // Sets the number of threads used to parallelize the execution within nodes - // A value of 0 means ORT will pick a default + /// Sets the number of threads used to parallelize the execution within nodes + /// A value of 0 means ORT will pick a default /// /// returns _intraOpNumThreads value public int IntraOpNumThreads @@ -787,9 +787,9 @@ public int IntraOpNumThreads private int _intraOpNumThreads = 0; // set to what is set in C++ SessionOptions by default; /// - // Sets the number of threads used to parallelize the execution of the graph (across nodes) - // If sequential execution is enabled this value is ignored - // A value of 0 means ORT will pick a default + /// Sets the number of threads used to parallelize the execution of the graph (across nodes) + /// If sequential execution is enabled this value is ignored + /// A value of 0 means ORT will pick a default /// /// returns _interOpNumThreads value public int InterOpNumThreads diff --git a/csharp/test/Microsoft.ML.OnnxRuntime.Tests.Common/InferenceTest.cs b/csharp/test/Microsoft.ML.OnnxRuntime.Tests.Common/InferenceTest.cs index 0892e17fc97b..d63e1fc953b7 100644 --- a/csharp/test/Microsoft.ML.OnnxRuntime.Tests.Common/InferenceTest.cs +++ b/csharp/test/Microsoft.ML.OnnxRuntime.Tests.Common/InferenceTest.cs @@ -255,7 +255,7 @@ public void CanCreateAndDisposeSessionWithModel() { Assert.NotNull(session); Assert.NotNull(session.InputMetadata); - Assert.Equal(1, session.InputMetadata.Count); // 1 input node + Assert.Single(session.InputMetadata); // 1 input node Assert.True(session.InputMetadata.ContainsKey("data_0")); // input node name Assert.Equal(typeof(float), session.InputMetadata["data_0"].ElementType); Assert.True(session.InputMetadata["data_0"].IsTensor); @@ -267,7 +267,7 @@ public void CanCreateAndDisposeSessionWithModel() } Assert.NotNull(session.OutputMetadata); - Assert.Equal(1, session.OutputMetadata.Count); // 1 output node + Assert.Single(session.OutputMetadata); // 1 output node Assert.True(session.OutputMetadata.ContainsKey("softmaxout_1")); // output node name Assert.Equal(typeof(float), session.OutputMetadata["softmaxout_1"].ElementType); Assert.True(session.OutputMetadata["softmaxout_1"].IsTensor); @@ -614,7 +614,7 @@ private void ValidateRunResults(IReadOnlyCollection results) // validate the results foreach (var r in results) { - Assert.Equal(1, results.Count); + Assert.Single(results); Assert.Equal("softmaxout_1", r.Name); float[] expectedOutput = TestDataLoader.LoadTensorFromEmbeddedResource("bench.expected_out"); @@ -798,7 +798,7 @@ private void ThrowInconsistentPinnedOutputs() } [Fact(DisplayName = "TestMultiThreads")] - private void TestMultiThreads() + private async Task TestMultiThreads() { var numThreads = 10; var loop = 10; @@ -824,7 +824,7 @@ private void TestMultiThreads() } })); }; - Task.WaitAll(tasks); + await Task.WhenAll(tasks); session.Dispose(); } @@ -838,7 +838,7 @@ private void TestOverridableInitializerMetadata() Assert.True(session.InputMetadata.ContainsKey("Label")); Assert.True(session.InputMetadata.ContainsKey("F2")); - Assert.Equal(1, session.OverridableInitializerMetadata.Count); + Assert.Single(session.OverridableInitializerMetadata); Assert.True(session.OverridableInitializerMetadata.ContainsKey("F1")); Assert.True(session.OverridableInitializerMetadata["F1"].IsTensor); Assert.Equal(typeof(float), session.OverridableInitializerMetadata["F1"].ElementType); @@ -886,7 +886,7 @@ private void TestSymbolicDimsMetadata() var outputs = session.OutputMetadata; Assert.Equal(2, inputs.Count); - Assert.Equal(1, session.OutputMetadata.Count); + Assert.Single(session.OutputMetadata); Assert.True(inputs.ContainsKey("A")); Assert.True(inputs.ContainsKey("B")); Assert.True(outputs.ContainsKey("C")); @@ -1432,6 +1432,7 @@ private void TestModelSequenceOfMapIntFloat() { // first output is a tensor containing label var outNode0 = outputs.ElementAtOrDefault(0); + Assert.NotNull(outNode0); Assert.Equal("label", outNode0.Name); Assert.Equal(OnnxValueType.ONNX_TYPE_TENSOR, outNode0.ValueType); Assert.Equal(Tensors.TensorElementType.Int64, outNode0.ElementType); @@ -1446,6 +1447,7 @@ private void TestModelSequenceOfMapIntFloat() // second output is a sequence> // try-cast to an sequence of NOV var outNode1 = outputs.ElementAtOrDefault(1); + Assert.NotNull(outNode1); Assert.Equal("probabilities", outNode1.Name); Assert.Equal(OnnxValueType.ONNX_TYPE_SEQUENCE, outNode1.ValueType); @@ -1525,6 +1527,7 @@ private void TestModelSequenceOfMapStringFloat() { // first output is a tensor containing label var outNode0 = outputs.ElementAtOrDefault(0); + Assert.NotNull(outNode0); Assert.Equal("label", outNode0.Name); Assert.Equal(OnnxValueType.ONNX_TYPE_TENSOR, outNode0.ValueType); Assert.Equal(TensorElementType.String, outNode0.ElementType); @@ -1539,6 +1542,7 @@ private void TestModelSequenceOfMapStringFloat() // second output is a sequence> // try-cast to an sequence of NOV var outNode1 = outputs.ElementAtOrDefault(1); + Assert.NotNull(outNode1); Assert.Equal("probabilities", outNode1.Name); Assert.Equal(OnnxValueType.ONNX_TYPE_SEQUENCE, outNode1.ValueType); @@ -1592,6 +1596,7 @@ private void TestModelSequenceOfTensors() // output is a sequence // try-cast to an sequence of NOV var outNode = outputs.ElementAtOrDefault(0); + Assert.NotNull(outNode); Assert.Equal("output_sequence", outNode.Name); Assert.Equal(OnnxValueType.ONNX_TYPE_SEQUENCE, outNode.ValueType); @@ -2035,7 +2040,7 @@ public SkipNonPackageTests() } [Fact(DisplayName = "TestModelRunAsyncTask")] - private async void TestModelRunAsyncTask() + private async Task TestModelRunAsyncTask() { Float16[] inputData = { new Float16(15360), new Float16(16384), new Float16(16896), new Float16(17408), new Float16(17664) }; long[] shape = { 1, 5 }; @@ -2070,7 +2075,7 @@ private async void TestModelRunAsyncTask() } [Fact(DisplayName = "TestModelRunAsyncTaskFail")] - private async void TestModelRunAsyncTaskFail() + private async Task TestModelRunAsyncTaskFail() { Float16[] inputData = { new Float16(15360), new Float16(16384), new Float16(16896), new Float16(17408), new Float16(17664) }; long[] shape = { 1, 5 }; diff --git a/csharp/test/Microsoft.ML.OnnxRuntime.Tests.Common/Microsoft.ML.OnnxRuntime.Tests.Common.csproj b/csharp/test/Microsoft.ML.OnnxRuntime.Tests.Common/Microsoft.ML.OnnxRuntime.Tests.Common.csproj index a26e17b9ee0f..3ce19ab2f1de 100644 --- a/csharp/test/Microsoft.ML.OnnxRuntime.Tests.Common/Microsoft.ML.OnnxRuntime.Tests.Common.csproj +++ b/csharp/test/Microsoft.ML.OnnxRuntime.Tests.Common/Microsoft.ML.OnnxRuntime.Tests.Common.csproj @@ -117,10 +117,10 @@ - + - + diff --git a/csharp/test/Microsoft.ML.OnnxRuntime.Tests.Common/OrtIoBindingAllocationTest.cs b/csharp/test/Microsoft.ML.OnnxRuntime.Tests.Common/OrtIoBindingAllocationTest.cs index b07bcdeeb3b1..de7f0690c726 100644 --- a/csharp/test/Microsoft.ML.OnnxRuntime.Tests.Common/OrtIoBindingAllocationTest.cs +++ b/csharp/test/Microsoft.ML.OnnxRuntime.Tests.Common/OrtIoBindingAllocationTest.cs @@ -33,6 +33,7 @@ public class OrtIoBindingAllocationTests : IDisposable private readonly DisposableListTest _dispList = new DisposableListTest(); private bool _disposed = false; + private OrtEnv _env = OrtEnv.Instance(); public OrtIoBindingAllocationTests() { diff --git a/csharp/test/Microsoft.ML.OnnxRuntime.Tests.Common/Tensors/TensorTests.cs b/csharp/test/Microsoft.ML.OnnxRuntime.Tests.Common/Tensors/TensorTests.cs index c3a6b1059ad8..27cde1dbe9ed 100644 --- a/csharp/test/Microsoft.ML.OnnxRuntime.Tests.Common/Tensors/TensorTests.cs +++ b/csharp/test/Microsoft.ML.OnnxRuntime.Tests.Common/Tensors/TensorTests.cs @@ -2220,7 +2220,9 @@ public void TestICollectionMembers(TensorConstructor constructor) new[] { 0, 0, 1, 2, 3, 4, 5, 6 }; Assert.Equal(expected, actual); +#pragma warning disable CS8625 // Cannot convert null literal to non-nullable reference type. Assert.Throws(() => tensorCollection.CopyTo(null, 0)); +#pragma warning restore CS8625 // Cannot convert null literal to non-nullable reference type. Assert.Throws(() => tensorCollection.CopyTo(new int[3, 4], 0)); Assert.Throws(() => tensorCollection.CopyTo(new int[5], 0)); Assert.Throws(() => tensorCollection.CopyTo(new int[6], 1)); @@ -2311,7 +2313,9 @@ public void TestICollectionTMembers(TensorConstructor constructor) new[] { 0, 0, 1, 2, 3, 4, 5, 6 }; Assert.Equal(expected, actual); +#pragma warning disable CS8625 // Cannot convert null literal to non-nullable reference type. Assert.Throws(() => tensorCollection.CopyTo(null, 0)); +#pragma warning restore CS8625 // Cannot convert null literal to non-nullable reference type. Assert.Throws(() => tensorCollection.CopyTo(new int[5], 0)); Assert.Throws(() => tensorCollection.CopyTo(new int[6], 1)); diff --git a/csharp/test/Microsoft.ML.OnnxRuntime.Tests.Droid/Microsoft.ML.OnnxRuntime.Tests.Droid.csproj b/csharp/test/Microsoft.ML.OnnxRuntime.Tests.Droid/Microsoft.ML.OnnxRuntime.Tests.Droid.csproj index 7876f8181520..f65031b01cd8 100644 --- a/csharp/test/Microsoft.ML.OnnxRuntime.Tests.Droid/Microsoft.ML.OnnxRuntime.Tests.Droid.csproj +++ b/csharp/test/Microsoft.ML.OnnxRuntime.Tests.Droid/Microsoft.ML.OnnxRuntime.Tests.Droid.csproj @@ -126,7 +126,7 @@ - 2.4.1 + 2.9.0 2.5.25 diff --git a/csharp/test/Microsoft.ML.OnnxRuntime.Tests.NetCoreApp/InferenceTest.netcore.cs b/csharp/test/Microsoft.ML.OnnxRuntime.Tests.NetCoreApp/InferenceTest.netcore.cs index 7f3d5d6624b0..ad127c257929 100644 --- a/csharp/test/Microsoft.ML.OnnxRuntime.Tests.NetCoreApp/InferenceTest.netcore.cs +++ b/csharp/test/Microsoft.ML.OnnxRuntime.Tests.NetCoreApp/InferenceTest.netcore.cs @@ -42,7 +42,7 @@ public void CanCreateAndDisposeSessionWithModelPath() { Assert.NotNull(session); Assert.NotNull(session.InputMetadata); - Assert.Equal(1, session.InputMetadata.Count); // 1 input nodeMeta + Assert.Single(session.InputMetadata); // 1 input nodeMeta Assert.True(session.InputMetadata.ContainsKey("data_0")); // input nodeMeta name Assert.Equal(typeof(float), session.InputMetadata["data_0"].ElementType); Assert.True(session.InputMetadata["data_0"].IsTensor); @@ -54,7 +54,7 @@ public void CanCreateAndDisposeSessionWithModelPath() } Assert.NotNull(session.OutputMetadata); - Assert.Equal(1, session.OutputMetadata.Count); // 1 output nodeMeta + Assert.Single(session.OutputMetadata); // 1 output nodeMeta Assert.True(session.OutputMetadata.ContainsKey("softmaxout_1")); // output nodeMeta name Assert.Equal(typeof(float), session.OutputMetadata["softmaxout_1"].ElementType); Assert.True(session.OutputMetadata["softmaxout_1"].IsTensor); @@ -665,7 +665,7 @@ private void RunPretrainedModel(InferenceSession session, } break; default: - Assert.True(false, $"TestPreTrainedModels cannot handle Onnxtype: {outputValue.ValueType}"); + Assert.Fail($"TestPreTrainedModels cannot handle Onnxtype: {outputValue.ValueType}"); break; } } @@ -720,7 +720,7 @@ private void RunPretrainedModel(InferenceSession session, RunOptions runOptions, } else { - Assert.True(false, $"TestPreTrainedModels cannot handle Onnxtype: {outputMeta.OnnxValueType}"); + Assert.Fail($"TestPreTrainedModels cannot handle Onnxtype: {outputMeta.OnnxValueType}"); } } } @@ -843,7 +843,7 @@ private static void VerifySequenceResults(NamedOnnxValue result, NamedOnnxValue } break; default: - Assert.True(false, "VerifySequenceResults cannot handle Onnxtype: " + resultItem.ValueType.ToString()); + Assert.Fail("VerifySequenceResults cannot handle Onnxtype: " + resultItem.ValueType.ToString()); break; } Assert.Equal(resultItem.AsTensor(), expectedItem.AsTensor(), new FloatComparer()); @@ -897,7 +897,7 @@ private static void VerifyTensorResults(TensorElementType elementType, NamedOnnx Assert.Equal(expectedValue.AsTensor(), result.AsTensor(), new ExactComparer()); break; default: - Assert.True(false, "TestPreTrainedModels does not yet support output of type: " + elementType.ToString()); + Assert.Fail("TestPreTrainedModels does not yet support output of type: " + elementType.ToString()); break; } } @@ -937,7 +937,7 @@ private static void VerifySequenceResults(OrtValue resultSequence, OrtValue expe } break; default: - Assert.True(false, $"VerifySequenceResults cannot handle Onnxtype: {elementMeta.OnnxValueType}"); + Assert.Fail($"VerifySequenceResults cannot handle Onnxtype: {elementMeta.OnnxValueType}"); break; } } @@ -1009,7 +1009,7 @@ private static void VerifyTensorResults(TensorElementType expectedElementType, O new BFloat16Comparer { tolerance = 2 }); break; default: - Assert.True(false, "VerifyTensorResults cannot handle ElementType: " + expectedElementType.ToString()); + Assert.Fail("VerifyTensorResults cannot handle ElementType: " + expectedElementType.ToString()); break; } } @@ -1077,7 +1077,7 @@ private static void VerifyContainerContent(IReadOnlyList results, Assert.Equal(result.GetStringTensorAsArray(), expectedValue.AsTensor().ToArray(), new ExactComparer()); break; default: - Assert.True(false, $"VerifyTensorResults cannot handle ElementType: { resultTypeShape.ElementDataType}"); + Assert.Fail($"VerifyTensorResults cannot handle ElementType: { resultTypeShape.ElementDataType}"); break; } } diff --git a/csharp/test/Microsoft.ML.OnnxRuntime.Tests.NetCoreApp/Microsoft.ML.OnnxRuntime.Tests.NetCoreApp.csproj b/csharp/test/Microsoft.ML.OnnxRuntime.Tests.NetCoreApp/Microsoft.ML.OnnxRuntime.Tests.NetCoreApp.csproj index 9886f050fbd6..a10f93f8eacd 100644 --- a/csharp/test/Microsoft.ML.OnnxRuntime.Tests.NetCoreApp/Microsoft.ML.OnnxRuntime.Tests.NetCoreApp.csproj +++ b/csharp/test/Microsoft.ML.OnnxRuntime.Tests.NetCoreApp/Microsoft.ML.OnnxRuntime.Tests.NetCoreApp.csproj @@ -53,9 +53,9 @@ - - - + + +