Gradient descent: Difference between revisions
Content added Content deleted
m (→{{header|TypeScript}}: added zkl header) |
(→{{header|zkl}}: added code) |
||
Line 789: | Line 789: | ||
=={{header|zkl}}== |
=={{header|zkl}}== |
||
{{trans|Go}} with tweaked gradG |
|||
⚫ | |||
<lang zkl> |
<lang zkl>fcn steepestDescent(f, x,y, alpha, h){ |
||
g0:=f(x,y); # Initial estimate of result. |
|||
fix,fiy := gradG(f,x,y,h); # Calculate initial gradient |
|||
# Calculate initial norm. |
|||
b:=alpha / (delG := (fix*fix + fiy*fiy).sqrt()); |
|||
while(delG > h){ # Iterate until value is <= tolerance. |
|||
x,y = x - b*fix, y - b*fiy; |
|||
# Calculate next gradient and next value |
|||
fix,fiy = gradG(f,x,y, h/=2); |
|||
b=alpha / (delG = (fix*fix + fiy*fiy).sqrt()); # Calculate next norm. |
|||
if((g1:=f(x,y)) > g0) alpha/=2 else g0 = g1; # Adjust parameter. |
|||
} |
|||
return(x,y) |
|||
} |
|||
fcn gradG(f,x,y,h){ # gives a rough calculation of gradient f(x,y). |
|||
g0:=f(x,y); |
|||
return((f(x + h, y) - g0)/h, (f(x, y + h) - g0)/h) |
|||
⚫ | |||
<lang zkl>fcn f(x,y){ # Function for which minimum is to be found. |
|||
(x - 1).pow(2)*(-y.pow(2)).exp() + |
|||
y*(y + 2)*(-2.0*x.pow(2)).exp() |
|||
} |
|||
tolerance,alpha := 0.0000006, 0.1; |
|||
x,y := 0.1, -1.0; # Initial guess of location of minimum. |
|||
x,y = steepestDescent(f,x,y,alpha,tolerance); |
|||
println("Testing steepest descent method:"); |
|||
println("The minimum is at (x,y) = (%f,%f). f(x,y) = %f".fmt(x,y,f(x,y)));</lang> |
|||
{{out}} |
{{out}} |
||
<pre> |
<pre> |
||
Testing steepest descent method: |
|||
The minimum is at (x,y) = (0.107608,-1.223299). f(x,y) = -0.750063 |
|||
</pre> |
</pre> |