Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Further adjustments based on API compat #1409

Merged
merged 2 commits into from
Nov 12, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion azure-pipelines.yml
Original file line number Diff line number Diff line change
Expand Up @@ -448,7 +448,7 @@ jobs:
- script: dotnet restore pkg/pack.proj /p:Configuration=Release
displayName: Restore package projects

- script: dotnet pack -c $(BuildConfig) --no-build -v:n /p:SkipNative=true /p:SkipTests=true /p:IncludeTorchSharpPackage=false /p:IncludeLibTorchCpuPackages=false /p:IncludeLibTorchCudaPackages=true pkg/pack.proj
- script: dotnet pack -c $(BuildConfig) --no-build -v:n /p:SkipNative=true /p:SkipTests=true /p:ApiCompatGenerateSuppressionFile=true /p:IncludeTorchSharpPackage=false /p:IncludeLibTorchCpuPackages=false /p:IncludeLibTorchCudaPackages=true pkg/pack.proj
displayName: Create Packages

# We are 10GB space-constrained on the Azure Pipelines CI system so clean up what we can
Expand Down
2 changes: 1 addition & 1 deletion src/TorchSharp/NN/Activation/Sigmoid.cs
Original file line number Diff line number Diff line change
Expand Up @@ -45,7 +45,7 @@ public static Sigmoid Sigmoid()
/// </summary>
/// <param name="inplace">Do the operation in-place. Default: False</param>
/// <returns></returns>
public static Sigmoid Sigmoid(bool inplace = false)
public static Sigmoid Sigmoid(bool inplace)
{
return new Sigmoid(inplace);
}
Expand Down
10 changes: 9 additions & 1 deletion src/TorchSharp/NN/Activation/Softsign.cs
Original file line number Diff line number Diff line change
Expand Up @@ -32,11 +32,19 @@ public static partial class torch
{
public static partial class nn
{
/// <summary>
/// Softsign
/// </summary>
public static Softsign Softsign()
{
return new Softsign(false);
}

/// <summary>
/// Softsign
/// </summary>
/// <param name="inplace">Do the operation in-place. Default: False</param>
public static Softsign Softsign(bool inplace = false)
public static Softsign Softsign(bool inplace)
{
return new Softsign(inplace);
}
Expand Down
2 changes: 1 addition & 1 deletion src/TorchSharp/NN/Activation/Tanhshrink.cs
Original file line number Diff line number Diff line change
Expand Up @@ -67,7 +67,7 @@ public static Tensor tanhshrink(Tensor x, bool inplace = false)
/// </summary>
/// <param name="x">The input tensor</param>
[Obsolete("Not using the PyTorch naming convention.",false)]
public static Tensor tanhshrink(Tensor x) => tanhshrink(x, false);
public static Tensor Tanhshrink(Tensor x) => tanhshrink(x, false);
}
}
}
Expand Down
3 changes: 2 additions & 1 deletion src/TorchSharp/NN/Activation/Threshold.cs
Original file line number Diff line number Diff line change
Expand Up @@ -70,8 +70,9 @@ public static Tensor threshold(Tensor x, double threshold, double value, bool in
/// <param name="x">The input tensor</param>
/// <param name="threshold">The value to threshold at</param>
/// <param name="value">The value to replace with</param>
/// <param name="inplace">Do the operation in-place</param>
[Obsolete("Not using the PyTorch naming convention.",false)]
public static Tensor Threshold(Tensor x, double threshold, double value) => nn.functional.threshold(x, threshold, value, false);
public static Tensor Threshold(Tensor x, double threshold, double value, bool inplace = false) => nn.functional.threshold(x, threshold, value, inplace);
}
}
}
Expand Down
5 changes: 5 additions & 0 deletions src/TorchSharp/NN/Module.cs
Original file line number Diff line number Diff line change
Expand Up @@ -818,6 +818,11 @@ public virtual void register_module(string name, Module submodule)
}
}

protected void ConditionallyRegisterParameter(string name, Tensor value)
{
ConditionallyRegisterParameter(name, value as Parameter);
}

protected void ConditionallyRegisterParameter(string name, Parameter? value)
{
if (value is null) {
Expand Down
8 changes: 4 additions & 4 deletions src/TorchSharp/Tensor/Tensor.cs
Original file line number Diff line number Diff line change
Expand Up @@ -2812,19 +2812,19 @@ public Tensor celu_(Scalar alpha)
return this;
}

public Tensor elu(double alpha = 1) => elu1(alpha, 1.0, 1.0);
public Tensor elu(double alpha = 1) => elu(alpha, 1.0, 1.0);

public Tensor elu_(double alpha = 1) => elu2(alpha, 1.0, 1.0);
public Tensor elu_(double alpha = 1) => elu(alpha, 1.0, 1.0);

private Tensor elu1(Scalar alpha, Scalar scale, Scalar input_scale)
public Tensor elu(Scalar alpha, Scalar scale, Scalar input_scale)
{
var res = NativeMethods.THSTensor_elu(Handle, alpha.Handle, scale.Handle, input_scale.Handle);
if (res == IntPtr.Zero)
CheckForErrors();
return new Tensor(res);
}

private Tensor elu2(Scalar alpha, Scalar scale, Scalar input_scale)
public Tensor elu_(Scalar alpha, Scalar scale, Scalar input_scale)
{
NativeMethods.THSTensor_elu_(Handle, alpha.Handle, scale.Handle, input_scale.Handle);
CheckForErrors();
Expand Down