Gradient descent: Difference between revisions

added Perl 6 programming solution
(added Perl 6 programming solution)
Line 135:
* Gradient Calls: 35
</pre>
 
=={{header|Perl 6}}==
{{trans|Go}}
<lang perl6>#!/usr/bin/env perl6
 
use v6.d;
 
sub steepestDescent(@x, $alpha is copy, $tolerance) {
my \N = +@x ; my $h = $tolerance ;
my $g0 = g(@x) ; # Initial estimate of result.
 
my @fi = gradG(@x, $h) ; # Calculate initial gradient
 
# Calculate initial norm.
my $delG = 0;
for ^N { $delG += @fi[$_]² }
$delG = $delG.sqrt;
my $b = $alpha / $delG;
 
while ( $delG > $tolerance ) { # Iterate until value is <= tolerance.
# Calculate next value.
for ^N { @x[$_] -= $b * @fi[$_] }
$h /= 2;
 
@fi = gradG(@x, $h); # Calculate next gradient.
 
# Calculate next norm.
$delG = 0;
for ^N { $delG += @fi[$_]² }
$delG = $delG.sqrt;
$b = $alpha / $delG;
 
my $g1 = g(@x); # Calculate next value.
 
$g1 > $g0 ?? ( $alpha /= 2 ) !! ( $g0 = $g1 ) # Adjust parameter.
}
}
 
sub gradG(@x, $h) { # Provides a rough calculation of gradient g(x).
my \N = +@x ; my ( @y , @z );
@y = @x;
my $g0 = g(@x);
for ^N { @y[$_] += $h ; @z[$_] = (g(@y) - $g0) / $h }
return @z
}
 
# Function for which minimum is to be found.
sub g(\x) { (x[0]-1)² * (-x[1]²).exp + x[1]*(x[1]+2) * (-2*x[0]²).exp }
 
 
my $tolerance = 0.0000006 ; my $alpha = 0.1;
 
my @x = 0.1, -1; # Initial guess of location of minimum.
 
steepestDescent(@x, $alpha, $tolerance);
 
say "Testing steepest descent method:";
say "The minimum is at x[0] = ", @x[0], ", x[1] = ", @x[1];
</lang>
{{out}}
<pre>Testing steepest descent method:
The minimum is at x[0] = 0.10743450794656964, x[1] = -1.2233956711774543
</pre>
 
 
 
=={{header|Phix}}==
351

edits