Jump to content

Gradient descent: Difference between revisions

Added Wren
(add Fortran example)
(Added Wren)
Line 900:
gradientDescentMain();
</lang>
 
=={{header|Wren}}==
{{trans|zkl}}
{{libheader|Wren-math}}
{{libheader|Wren-fmt}}
Slightly different result but close enough.
<lang ecmascript>import "/math" for Math
import "/fmt" for Fmt
 
// Function for which minimum is to be found.
var f = Fn.new { |x, y| (x-1)*(x-1)*Math.exp(-y*y) + y*(y+2)*Math.exp(-2*x*x) }
 
// Provides a rough calculation of gradient f(x, y).
var gradG = Fn.new { |f, x, y, h|
var g0 = f.call(x, y)
return [(f.call(x + h, y) - g0) / h, (f.call(x, y + h) - g0) / h]
}
 
var steepestDescent = Fn.new { |f, x, y, alpha, h|
var g0 = f.call(x, y) // Initial estimate of result.
var gra = gradG.call(f, x, y, h) // Calculate initial gradient.
var fix = gra[0]
var fiy = gra[1]
 
// Calculate initial norm.
var delG = (fix*fix + fiy*fiy).sqrt
var b = alpha/delG
 
// Iterate until value is <= tolerance.
while (delG > h) {
x = x - b*fix
y = y - b*fiy
 
// Calculate next gradient and next value.
h = h / 2
gra = gradG.call(f, x, y, h)
fix = gra[0]
fiy = gra[1]
 
// Calculate next norm.
delG = (fix*fix + fiy*fiy).sqrt
b = alpha/delG
 
// Adjust parameter.
var g1 = f.call(x, y)
if (g1 > g0) {
alpha = alpha / 2
} else {
g0 = g1
}
}
return [x, y]
}
 
var tolerance = 0.0000006
var alpha = 0.1
 
// Initial guess of location of minimum.
var x = 0.1
var y = -1
var sd = steepestDescent.call(f, x, y, alpha, tolerance)
x = sd[0]
y = sd[1]
System.print("Testing steepest descent method:")
Fmt.print("The minimum is at (x, y) = ($f, $f). f(x, y) = $f", x, y, f.call(x, y))</lang>
 
{{out}}
<pre>
Testing steepest descent method:
The minimum is at (x, y) = (0.107612, -1.223291). f(x, y) = -0.750063
</pre>
 
=={{header|zkl}}==
9,482

edits

Cookies help us deliver our services. By using our services, you agree to our use of cookies.