-
Notifications
You must be signed in to change notification settings - Fork 218
Add approximate parameter to GELU activation function #1548
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
base: main
Are you sure you want to change the base?
Changes from all commits
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
| Original file line number | Diff line number | Diff line change |
|---|---|---|
| @@ -0,0 +1,18 @@ | ||
| // Copyright (c) .NET Foundation and Contributors. All Rights Reserved. See LICENSE in the project root for license information. | ||
| namespace TorchSharp | ||
| { | ||
| /// <summary> | ||
| /// Specifies the approximation method for the GELU activation function. | ||
| /// </summary> | ||
| public enum GELUApproximate | ||
| { | ||
| /// <summary> | ||
| /// Exact GELU computation. | ||
| /// </summary> | ||
| none, | ||
| /// <summary> | ||
| /// Tanh-based approximation. | ||
| /// </summary> | ||
| tanh | ||
| } | ||
| } | ||
| Original file line number | Diff line number | Diff line change |
|---|---|---|
|
|
@@ -2977,15 +2977,31 @@ public Tensor elu_(Scalar alpha, Scalar scale, Scalar input_scale) | |
|
|
||
| public Tensor gelu() | ||
| { | ||
| var res = NativeMethods.THSTensor_gelu(Handle); | ||
| var res = NativeMethods.THSTensor_gelu(Handle, "none"); | ||
| if (res == IntPtr.Zero) | ||
| CheckForErrors(); | ||
| return new Tensor(res); | ||
| } | ||
|
|
||
| public Tensor gelu(GELUApproximate approximate) | ||
| { | ||
| var res = NativeMethods.THSTensor_gelu(Handle, approximate == GELUApproximate.tanh ? "tanh" : "none"); | ||
| if (res == IntPtr.Zero) | ||
| CheckForErrors(); | ||
| return new Tensor(res); | ||
|
Comment on lines
+2986
to
2991
|
||
| } | ||
|
|
||
| public Tensor gelu_() | ||
| { | ||
| var res = NativeMethods.THSTensor_gelu_(Handle); | ||
| var res = NativeMethods.THSTensor_gelu_(Handle, "none"); | ||
| if (res == IntPtr.Zero) | ||
| CheckForErrors(); | ||
| return new Tensor(res); | ||
| } | ||
|
|
||
| public Tensor gelu_(GELUApproximate approximate) | ||
| { | ||
| var res = NativeMethods.THSTensor_gelu_(Handle, approximate == GELUApproximate.tanh ? "tanh" : "none"); | ||
| if (res == IntPtr.Zero) | ||
| CheckForErrors(); | ||
|
Comment on lines
+3002
to
3006
|
||
| return new Tensor(res); | ||
|
|
||
| Original file line number | Diff line number | Diff line change | ||||||||||||||
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
|
|
@@ -618,6 +618,28 @@ public void EvaluateGELU() | |||||||||||||||
| } | ||||||||||||||||
| } | ||||||||||||||||
|
|
||||||||||||||||
| [Fact] | ||||||||||||||||
| public void EvaluateGELUWithTanhApproximate() | ||||||||||||||||
| { | ||||||||||||||||
| var rel = GELU(GELUApproximate.tanh); | ||||||||||||||||
|
|
||||||||||||||||
| foreach (var device in TestUtils.AvailableDevices()) { | ||||||||||||||||
| var input = torch.randn(new long[] { 64, 8 }, device: device) * 25.0; | ||||||||||||||||
| var output = rel.call(input); | ||||||||||||||||
| Assert.Equal(device.type, output.device_type); | ||||||||||||||||
|
|
||||||||||||||||
| var values = output.data<float>().ToArray(); | ||||||||||||||||
| Assert.Equal(input.shape, output.shape); | ||||||||||||||||
| Assert.All(values, val => Assert.True(val >= -0.2)); | ||||||||||||||||
| } | ||||||||||||||||
|
|
||||||||||||||||
| // Verify that tanh approximate produces different results from exact | ||||||||||||||||
| var x = torch.tensor(new float[] { -1.0f, 0.0f, 1.0f, 2.0f }); | ||||||||||||||||
| var exact = torch.nn.functional.gelu(x); | ||||||||||||||||
| var approx = torch.nn.functional.gelu(x, GELUApproximate.tanh); | ||||||||||||||||
| Assert.False(exact.allclose(approx, rtol: 1e-5, atol: 1e-5)); | ||||||||||||||||
|
||||||||||||||||
| Assert.False(exact.allclose(approx, rtol: 1e-5, atol: 1e-5)); | |
| Assert.False(exact.allclose(approx, rtol: 1e-5, atol: 1e-5)); | |
| // Verify that the in-place tanh approximate matches the out-of-place result | |
| var xInPlace = x.clone(); | |
| xInPlace.gelu_(GELUApproximate.tanh); | |
| Assert.True(approx.allclose(xInPlace, rtol: 1e-5, atol: 1e-5)); |
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
PR description mentions adding a
GELU.Approximateenum, but the implementation introduces a top-levelGELUApproximateenum. If the intended public API is the nested name, consider renaming/moving the enum; otherwise, update the PR description to match the shipped API surface.