HTTP: Difference between revisions

From Rosetta Code
Content added Content deleted
(Undo revision 315432 by WdeCvfYlmB (talk))
(Summarily revert Revision as of 06:23, 2 May 2020 rather than trying to piecemeal revert)
Line 5: Line 5:


There is a separate task for [[HTTPS Request]]s.
There is a separate task for [[HTTPS Request]]s.

=={{header|8th}}==
=={{header|8th}}==
<lang forth>
<lang 8th>"http://www.w3.org/Home.html" net:get drop >s .</lang>
"http://www.rosettacode.org" net:get drop >s .
</lang>


=={{header|ABAP}}==
=={{header|ABAP}}==
This works for ABAP Version 7.40 and above
<lang ABAP>report z_http.
<lang ABAP>
cl_http_client => create_by_url(
report z_http.
exporting

url = `http://www.w3.org/Home.html`
cl_http_client=>create_by_url(
importing
exporting
client = data(http_client)
url = `http://rosettacode.org/robots.txt`
exceptions
importing
argument_not_found = 1
client = data(http_client)
plugin_not_active = 2
exceptions
internal_error = 3
others = 4
argument_not_found = 1
plugin_not_active = 2
).
internal_error = 3
others = 4 ).

if sy-subrc <> 0.
if sy-subrc <> 0.
data(error_message) = switch string(
data(error_message) = switch string( sy-subrc
when 1 then `argument_not_found`
sy-subrc
when 1 then `argument_not_found`
when 2 then `plugin_not_active`
when 2 then `plugin_not_active`
when 3 then `internal_error`
when 3 then `internal_error`
when 4 then `other error` ).

when 4 then `other error`
write error_message.
).
exit.
write error_message.
exit.
endif.
endif.

data(rest_http_client) = cast if_rest_client(new cl_rest_http_client(http_client)).
rest_http_client -> get().
data(rest_http_client) = cast if_rest_client( new cl_rest_http_client( http_client ) ).

data(response_string) = rest_http_client -> get_response_entity() -> get_string_data().
rest_http_client->get( ).
split response_string at cl_abap_char_utilities => newline into table data(output_table).

data(response_string) = rest_http_client->get_response_entity( )->get_string_data( ).

split response_string at cl_abap_char_utilities=>newline into table data(output_table).

loop at output_table assigning field-symbol(<output_line>).
loop at output_table assigning field-symbol(<output_line>).
write / <output_line>.
write / <output_line>.
endloop.</lang>
endloop.
</lang>

{{out}}

<pre>
User-agent: *
Allow: /mw/images/
Allow: /mw/skins/
Allow: /mw/title.png
Allow: /mw/resources/
Disallow: /w/
Disallow: /mw/
Disallow: /wiki/Special:
</pre>


=={{header|ActionScript}}==
=={{header|ActionScript}}==
<lang actionscript>package {
<lang actionscript>
package
{
import flash.display.Sprite;
import flash.display.Sprite;
import flash.events.Event;
import flash.events.Event;
import flash.net.*;
import flash.net.*;

public class RequestExample extends Sprite {
public function RequestExample() {
public class RequestExample extends Sprite
{
public function RequestExample()
{
var loader:URLLoader = new URLLoader();
var loader:URLLoader = new URLLoader();
loader.addEventListener(Event.COMPLETE, loadComplete);
loader.addEventListener(Event.COMPLETE, loadComplete);
loader.load(new URLRequest("http://www.w3.org/Home.html"));
loader.load(new URLRequest("http://www.rosettacode.org"));
}
}
private function loadComplete(evt:Event):void {
private function loadComplete(evt:Event):void
{
trace(evt.target.data);
trace(evt.target.data);
}
}
}
}
}
}</lang>
</lang>


=={{header|Ada}}==
=={{header|Ada}}==
{{libheader|AWS}}
{{libheader|AWS}}
<lang ada>with Ada.Text_IO; use Ada.Text_IO;
<lang ada>
with Ada.Text_IO; use Ada.Text_IO;

with AWS.Client;
with AWS.Client;
with AWS.Response;
with AWS.Response;

procedure HTTP_Request is
procedure HTTP_Request is
begin
begin
Put_Line (AWS.Response.Message_Body (AWS.Client.Get (URL => "http://www.w3.org/Home.html")));
Put_Line (AWS.Response.Message_Body (AWS.Client.Get (URL => "http://www.rosettacode.org")));
end HTTP_Request;</lang>
end HTTP_Request;
</lang>


=={{header|ALGOL 68}}==
=={{header|ALGOL 68}}==
{{works with|ALGOL 68|Revision 1 - however ''grep in string'', ''http content'' and ''str error'' are from a non-standard library}}
{{works with|ALGOL 68|Revision 1 - however ''grep in string'', ''http content'' and ''str error'' are from a non-standard library}}

{{works with|ALGOL 68G|Any - tested with release [http://sourceforge.net/projects/algol68/files/algol68g/algol68g-1.18.0/algol68g-1.18.0-9h.tiny.el5.centos.fc11.i386.rpm/download 1.18.0-9h.tiny]}}
{{works with|ALGOL 68G|Any - tested with release [http://sourceforge.net/projects/algol68/files/algol68g/algol68g-1.18.0/algol68g-1.18.0-9h.tiny.el5.centos.fc11.i386.rpm/download 1.18.0-9h.tiny]}}
{{wont work with|ELLA ALGOL 68|Any (with appropriate job cards) - tested with release [http://sourceforge.net/projects/algol68/files/algol68toc/algol68toc-1.8.8d/algol68toc-1.8-8d.fc9.i386.rpm/download 1.8-8d] - due to extensive use of ''grep in string'' and ''http content''}}
{{wont work with|ELLA ALGOL 68|Any (with appropriate job cards) - tested with release [http://sourceforge.net/projects/algol68/files/algol68toc/algol68toc-1.8.8d/algol68toc-1.8-8d.fc9.i386.rpm/download 1.8-8d] - due to extensive use of ''grep in string'' and ''http content''}}

<lang algol68>STRING domain="www.w3.org";
<lang algol68>
STRING page="Home.html";
STRING domain="rosettacode.org";
STRING page="wiki/Main_Page";

STRING re success="^HTTP/[0-9.]* 200";
STRING re success="^HTTP/[0-9.]* 200";
STRING re result description="^HTTP/[0-9.]* [0-9]+ [a-zA-Z ]*";
STRING re result description="^HTTP/[0-9.]* [0-9]+ [a-zA-Z ]*";
STRING re doctype ="\s\s<!DOCTYPE html PUBLIC ""[^>]+"">\s+";
STRING re doctype ="\s\s<!DOCTYPE html PUBLIC ""[^>]+"">\s+";

PROC html page = (REF STRING page) BOOL: (
PROC html page = (REF STRING page) BOOL: (
BOOL out=grep in string(re success, page, NIL, NIL) = 0;
BOOL out=grep in string(re success, page, NIL, NIL) = 0;
IF
IF INT start, end;
INT start, end;
grep in string(re result description, page, start, end) = 0
grep in string(re result description, page, start, end) = 0
THEN
THEN
page := page[end+1:];
page:=page[end+1:];
IF grep in string(re doctype, page, start, end) = 0
IF
grep in string(re doctype, page, start, end) = 0
THEN page:=page[start+2:]
ELSE print ("unknown format retrieving page")
THEN
page:=page[start+2:]
ELSE
print ("unknown format retrieving page")
FI
FI
ELSE print ("unknown error retrieving page")
ELSE
FI;
print ("unknown error retrieving page")
FI;
out
out
);
);

IF
STRING reply;
IF STRING reply;
INT rc =
INT rc = http content (reply, domain, "http://"+domain+"/"+page, 0);
rc = 0 AND html page (reply)
http content (reply, domain, "http://"+domain+"/"+page, 0);
rc = 0 AND html page (reply)
THEN
print (reply)
THEN print (reply)
ELSE print (strerror (rc))
ELSE
print (strerror (rc))
FI
FI
</lang>
</lang>


=={{header|Arturo}}==
=={{header|Arturo}}==

<lang arturo>print [download "http://www.w3.org/Home.html"]</lang>
<lang arturo>print [download "http://google.com"]</lang>

{{out}}

<pre><!doctype html><html itemscope="" itemtype="http://schema.org/WebPage" lang="es"><head><meta content="Google.es permite acceder a la información mundial en castellano, catalán, gallego, euskara e inglés." name="description"><meta content="noodp" name="robots"><meta content="text/html; charset=UTF-8" http-equiv="Content-Type"><meta content="/images/branding/googleg/1x/googleg_standard_color_128dp.png" itemprop="image"><title>Google</title><script nonce="mEe5oG98axwLddedgOh1JA==">(function(){window.google={kEI:'lp2lXbjlCJGKauK8o9AB',kEXPI:'0,18167,1335579,5663,730,224,510,18,228,819,1535,1617,378,206,1017,53,173,1163,798,10,50,211,452,319,19,96,161,89,193,122,766,81,176,221,1130704,1197793,230,302939,26305,1294,12383,4855,32692,15247,867,12163,16521,363,3320,5505,2436,5948,1119,2,579,727,2431,1362,4323,4967,774,2250,4744,3118,6196,1719,1808,1976,2044,8909,5071,226,897,1119,38,920,2090,2975,2736,49,2606,315,91,2,632,3240,4191,1571,2303,2883,19,319,235,884,904,101,2024,1,370,2778,917,261,731,509,777,7,2796,887,80,601,11,14,1279,2212,202,37,286,5,1252,327,513,324,193,1466,8,48,1

[output truncated]
</pre>


=={{header|AutoHotkey}}==
=={{header|AutoHotkey}}==
<lang AutoHotkey>UrlDownloadToFile, http://www.w3.org/Home.html, url.html
<lang AutoHotkey>
Run, cmd /k type url.html</lang>
UrlDownloadToFile, http://rosettacode.org, url.html
Run, cmd /k type url.html
</lang>


=={{header|AWK}}==
=={{header|AWK}}==
{{works with|gawk}}
{{works with|gawk}}
<lang awk>BEGIN {
<lang awk>BEGIN {
site="www.w3.org"
site="en.wikipedia.org"
path="/Home.html"
path="/wiki/"
name="Rosetta_Code"
name="Rosetta_Code"

server = "/inet/tcp/0/" site "/80"
server = "/inet/tcp/0/" site "/80"
print "GET " path name " HTTP/1.0" |& server
print "Host: " site |& server
print "GET " path name " HTTP/1.0" |& server
print "\r\n\r\n" |& server
print "Host: " site |& server
print "\r\n\r\n" |& server
while ( (server |& getline fish) > 0 ) {

if ( ++scale == 1 )
ship = fish
while ( (server |& getline fish) > 0 ) {
else
if ( ++scale == 1 )
ship = ship "\n" fish
ship = fish
}
else
ship = ship "\n" fish
close(server)
}
print ship
close(server)

print ship
}</lang>
}</lang>


=={{header|BaCon}}==
=={{header|BaCon}}==
<lang qbasic>
<lang qbasic>'
' Read and display a website
website$ = "www.w3.org"
'
IF AMOUNT(ARGUMENT$) = 1 THEN
website$ = "www.basic-converter.org"
ELSE
website$ = TOKEN$(ARGUMENT$, 2)
ENDIF

OPEN website$ & ":80" FOR NETWORK AS mynet
OPEN website$ & ":80" FOR NETWORK AS mynet
SEND "GET /Home.html HTTP/1.1\r\nHost: " & website$ & "\r\n\r\n" TO mynet
SEND "GET / HTTP/1.1\r\nHost: " & website$ & "\r\n\r\n" TO mynet
REPEAT
REPEAT
RECEIVE dat$ FROM mynet
RECEIVE dat$ FROM mynet
Line 147: Line 202:


=={{header|Batch File}}==
=={{header|Batch File}}==
<lang batch>
<lang batch>curl.exe -s -L http://www.w3.org/Home.html</lang>
curl.exe -s -L http://rosettacode.org/
</lang>


=={{header|BBC BASIC}}==
=={{header|BBC BASIC}}==
{{works with|BBC BASIC for Windows}}
{{works with|BBC BASIC for Windows}}
<lang bbcbasic>SYS "LoadLibrary", "URLMON.DLL" TO urlmon%
<lang bbcbasic> SYS "LoadLibrary", "URLMON.DLL" TO urlmon%
SYS "GetProcAddress", urlmon%, "URLDownloadToFileA" TO URLDownloadToFile
SYS "GetProcAddress", urlmon%, "URLDownloadToFileA" TO URLDownloadToFile
url$ = "http://www.w3.org/Home.html"
url$ = "http://www.bbcbasic.co.uk/aboutus.html"
file$ = @tmp$ + "rosetta.tmp"
SYS URLDownloadToFile, 0, url$, file$, 0, 0 TO fail%
file$ = @tmp$ + "rosetta.tmp"
SYS URLDownloadToFile, 0, url$, file$, 0, 0 TO fail%
IF fail% ERROR 100, "File download failed"
IF fail% ERROR 100, "File download failed"
OSCLI "TYPE """ + file$ + """"</lang>
OSCLI "TYPE """ + file$ + """"</lang>


=={{header|Biferno}}==
=={{header|Biferno}}==
simple one-liner using httpExt and quick print $
<lang Biferno>$httpExt.ExecRemote("www.w3.org/Home.html")</lang>
<lang Biferno>$httpExt.ExecRemote("www.tabasoft.it")</lang>


=={{header|C}}==
=={{header|C}}==
{{libheader|libcurl}}
<lang C>#include <unistd.h>

#include <netdb.h>
<lang c>
#define BUF_SIZE 16
#include <stdio.h>
int sfd;
#include <stdlib.h>
char buf[BUF_SIZE];
#include <curl/curl.h>
struct addrinfo hints;

struct addrinfo * rp;
int main() {
int
main(void)
hints.ai_family = AF_INET;
{
hints.ai_socktype = SOCK_STREAM;
CURL *curl;
hints.ai_protocol = IPPROTO_TCP;
char buffer[CURL_ERROR_SIZE];
getaddrinfo("www.w3.org", "80", &hints, &rp);

sfd = socket(rp -> ai_family, rp -> ai_socktype, rp -> ai_protocol);
if ((curl = curl_easy_init()) != NULL) {
connect(sfd, rp -> ai_addr, rp -> ai_addrlen);
curl_easy_setopt(curl, CURLOPT_URL, "http://www.rosettacode.org/");
write(sfd, "GET /Home.html HTTP/1.1\r\nHost: www.w3.org\r\nConnection: close\r\n\r\n", 1024);
curl_easy_setopt(curl, CURLOPT_FOLLOWLOCATION, 1);
while (read(sfd, buf, BUF_SIZE) != 0) {
curl_easy_setopt(curl, CURLOPT_ERRORBUFFER, buffer);
write(STDOUT_FILENO, buf, BUF_SIZE);
if (curl_easy_perform(curl) != CURLE_OK) {
}
fprintf(stderr, "%s\n", buffer);
close(sfd);
return 0;
return EXIT_FAILURE;
}
}</lang>
curl_easy_cleanup(curl);
}
return EXIT_SUCCESS;
}
</lang>


=={{header|C sharp}}==
=={{header|C sharp}}==
<lang csharp>using System;
<lang csharp>
using System;
using System.Text;
using System.Text;
using System.Net;
using System.Net;

class Program {
class Program
static void Main(string[] args) {
{
static void Main(string[] args)
{
WebClient wc = new WebClient();
WebClient wc = new WebClient();
string content = wc.DownloadString("http://www.w3.org/Home.html");
string content = wc.DownloadString("http://www.google.com");
Console.WriteLine(content);
Console.WriteLine(content);
}
}
}
}</lang>
</lang>


=={{header|C++}}==
=={{header|C++}}==
<lang cpp>#include <winsock2.h>
<lang cpp>
#include <winsock2.h>
#include <ws2tcpip.h>
#include <ws2tcpip.h>
#include <iostream>
#include <iostream>

addrinfo * result;
int bytes;
char buffer[512];
addrinfo hints;
SOCKET s;
WSADATA wsaData;
int main() {
int main() {
WSADATA wsaData;
WSAStartup(MAKEWORD(2, 2), &wsaData);
WSAStartup( MAKEWORD( 2, 2 ), &wsaData );
ZeroMemory(&hints, sizeof(hints));

hints.ai_family = AF_UNSPEC;
addrinfo *result = NULL;
hints.ai_socktype = SOCK_STREAM;
addrinfo hints;
hints.ai_protocol = IPPROTO_TCP;

getaddrinfo("www.w3.org", "80", &hints, &result);
ZeroMemory( &hints, sizeof( hints ) );
s = socket(result->ai_family, result->ai_socktype, result->ai_protocol);
hints.ai_family = AF_UNSPEC;
connect(s, result->ai_addr, (int) result->ai_addrlen);
hints.ai_socktype = SOCK_STREAM;
freeaddrinfo(result);
hints.ai_protocol = IPPROTO_TCP;
send(s, "GET /Home.html HTTP/1.0\n\n", 16, 0);

do {
getaddrinfo( "74.125.45.100", "80", &hints, &result ); // http://www.google.com
bytes = recv(s, buffer, 512, 0);

if ( bytes > 0 )
SOCKET s = socket( result->ai_family, result->ai_socktype, result->ai_protocol );
std::cout.write(buffer, bytes);

} while (bytes > 0);
connect( s, result->ai_addr, (int)result->ai_addrlen );
return 0;

}</lang>
freeaddrinfo( result );

send( s, "GET / HTTP/1.0\n\n", 16, 0 );

char buffer[512];
int bytes;

do {
bytes = recv( s, buffer, 512, 0 );

if ( bytes > 0 )
std::cout.write(buffer, bytes);
} while ( bytes > 0 );

return 0;
}
</lang>

{{libheader|U++}}
{{libheader|U++}}

<lang cpp>#include <Web/Web.h>
<lang cpp>
#include <Web/Web.h>

using namespace Upp;
using namespace Upp;

CONSOLE_APP_MAIN {
CONSOLE_APP_MAIN
Cout() << HttpClient("www.w3.org/Home.html").ExecuteRedirect();
{
}</lang>
Cout() << HttpClient("www.rosettacode.org").ExecuteRedirect();
}
</lang>


=={{header|Caché ObjectScript}}==
=={{header|Caché ObjectScript}}==
Line 236: Line 326:
<pre>
<pre>
USER>Set HttpRequest=##class(%Net.HttpRequest).%New()
USER>Set HttpRequest=##class(%Net.HttpRequest).%New()
USER>Set HttpRequest.Server="www.w3.org"
USER>Set HttpRequest.Server="checkip.dyndns.org"
USER>Do HttpRequest.Get("/Home.html")
USER>Do HttpRequest.Get("/")
USER>Do HttpRequest.HttpResponse.Data.OutputToDevice()
USER>Do HttpRequest.HttpResponse.Data.OutputToDevice()
</pre>
</pre>
Line 243: Line 333:
=={{header|Clojure}}==
=={{header|Clojure}}==
Using the Java API:
Using the Java API:
<lang clojure>(
<lang clojure>
defn get-http [url]
(defn get-http [url]
(let [sc (java.util.Scanner.
(
let [sc (java.util.Scanner.(.openStream (java.net.URL. url)))]
(.openStream (java.net.URL. url)))]
(
(while (.hasNext sc)
while (.hasNext sc)
(println (.nextLine sc)))))
(get-http "http://www.rosettacode.org")
(
</lang>
println (.nextLine sc)

)
)
)
)
(get-http "http://www.w3.org")</lang>
Using <code>clojure.contrib.http.agent</code>:
Using <code>clojure.contrib.http.agent</code>:
<lang clojure>(
<lang clojure>
ns example
(ns example
(:use [clojure.contrib.http.agent :only (string http-agent)]))
(

:use [clojure.contrib.http.agent :only (string http-agent)]
(println (string (http-agent "http://www.rosettacode.org/")))
)
</lang>
)

(println (string (http-agent "http://www.w3.org/")))</lang>
{{works with|Clojure|1.2}}
{{works with|Clojure|1.2}}
<lang clojure>(print (slurp "http://www.w3.org/"))</lang>
<lang clojure>
(print (slurp "http://www.rosettacode.org/"))
</lang>


=={{header|COBOL}}==
=={{header|COBOL}}==

Tested with GnuCOBOL
Tested with GnuCOBOL
<lang cobol>COBOL


<lang cobol>COBOL >>SOURCE FORMAT IS FIXED
identification division.
identification division.
program-id. curl-rosetta.


environment division.
program-id.
configuration section.
curl-write-callback.
environment division.
repository.
function read-url
function all intrinsic.


data division.
configuration section.
working-storage section.
repository.
function all intrinsic.


copy "gccurlsym.cpy".
data division.


01 web-page pic x(16777216).
working-storage section.
01 real-size usage binary-long.
01 curl-status usage binary-long.
01 memory-block based.
05 memory-address usage pointer sync.
05 memory-size usage binary-long sync.
05 running-total usage binary-long sync.
01 content-buffer pic x(65536) based.
01 web-space pic x(16777216) based.
01 left-over usage binary-long.
linkage section.
01 contents usage pointer.
01 element-size usage binary-long.
01 element-count usage binary-long.
01 memory-structure usage pointer.
procedure division using
by value contents
by value element-size
by value element-count
by value memory-structure
returning real-size.
set address of memory-block to memory-structure
compute real-size = element-size * element-count
end-compute
compute left-over = memory-size - running-total
end-compute
if left-over > 0 and < real-size then
move left-over to real-size
end-if
if (left-over > 0) and (real-size > 1) then
set address of content-buffer to contents
set address of web-space to memory-address
move content-buffer(1:real-size) to web-space(running-total:real-size)
add real-size to running-total
else
display "curl buffer sizing problem" upon syserr
end-if
goback.
end program curl-write-callback.


01 cli pic x(7) external.
identification division.
88 helping values "-h", "-help", "help", spaces.
88 displaying value "display".
88 summarizing value "summary".


*> ***************************************************************
function-id.
procedure division.
read-url.
accept cli from command-line
environment division.
if helping then
display "./curl-rosetta [help|display|summary]"
goback
end-if


*>
configuration section.
*> Read a web resource into fixed ram.
repository.
*> Caller is in charge of sizing the buffer,
function all intrinsic.
*> (or getting trickier with the write callback)
*> Pass URL and working-storage variable,
data division.
*> get back libcURL error code or 0 for success
working-storage section.
copy "gccurlsym.cpy".
replace also ==:CALL-EXCEPTION:== by == on exception perform hard-exception ==.
01 curl-handle usage pointer.
01 callback-handle usage procedure-pointer.
01 memory-block.
05 memory-address usage pointer sync.
05 memory-size usage binary-long sync.
05 running-total usage binary-long sync.
01 curl-result usage binary-long.
01 cli pic x(7) external.
88 helping values "-h", "-help", "help", spaces.
88 displaying value "display".
88 summarizing value "summary".
linkage section.
01 url pic x any length.
01 buffer pic x any length.
01 curl-status usage binary-long.
procedure division using
url buffer
returning curl-status.
if displaying or summarizing then
display "Read: " url upon syserr
end-if
call "curl_global_init" using
by value CURL_GLOBAL_ALL on exception
display "need libcurl, link with -lcurl" upon syserr
stop run returning 1
end-call
call "curl_easy_init"
returning curl-handle :CALL-EXCEPTION:
end-call
if curl-handle equal NULL then
display "no curl handle" upon syserr
stop run returning 1
end-if
call "curl_easy_setopt" using
by value curl-handle
by value CURLOPT_URL
by reference concatenate(trim(url trailing), x"00") :CALL-EXCEPTION:
end-call
call "curl_easy_setopt" using
by value curl-handle
by value CURLOPT_FOLLOWLOCATION
by value 1 :CALL-EXCEPTION:
end-call
set callback-handle to address of entry "curl-write-callback"
call "curl_easy_setopt" using
by value curl-handle
by value CURLOPT_WRITEFUNCTION
by value callback-handle :CALL-EXCEPTION:
end-call
set memory-address to address of buffer
move length(buffer) to memory-size
move 1 to running-total
call "curl_easy_setopt" using
by value curl-handle
by value CURLOPT_WRITEDATA
by value address of memory-block :CALL-EXCEPTION:
end-call
call "curl_easy_setopt" using
by value curl-handle
by value CURLOPT_USERAGENT
by reference concatenate("libcurl-agent/1.0", x"00") :CALL-EXCEPTION:
end-call
call "curl_easy_perform" using
by value curl-handle
returning curl-result :CALL-EXCEPTION:
end-call
move curl-result to curl-status
call "curl_easy_cleanup" using
by value curl-handle
returning omitted :CALL-EXCEPTION:
end-call
goback.
:EXCEPTION-HANDLERS:
end function read-url.


move read-url("http://www.rosettacode.org", web-page)
identification division.
to curl-status


perform check
program-id.
perform show
curl-rosetta.

environment division.
goback.
*> ***************************************************************
configuration section.

repository.
*> Now tesing the result, relying on the gccurlsym
function read-url function all intrinsic.
*> GnuCOBOL Curl Symbol copy book
data division.
check.
if curl-status not equal zero then
display
working-storage section.
curl-status " "
copy "gccurlsym.cpy".
CURLEMSG(curl-status) upon syserr
01 web-page pic x(16777216).
01 curl-status usage binary-long.
end-if
01 cli pic x(7) external.
.

88 helping values "-h", "-help", "help", spaces.
88 displaying value "display".
*> And display the page
show.
88 summarizing value "summary".
if summarizing then
display "Length: " stored-char-length(web-page)
procedure division.
end-if
if displaying then
accept cli from command-line
display trim(web-page trailing) with no advancing
if helping then
end-if
display "./curl-rosetta [help|display|summary]" goback
end-if
.

move read-url("http://www.rosettacode.org", web-page) to curl-status
REPLACE ALSO ==:EXCEPTION-HANDLERS:== BY
perform check
perform show goback.
==
*> informational warnings and abends
check.
soft-exception.
if curl-status not equal zero then
display curl-status " " CURLEMSG(curl-status) upon syserr
display space upon syserr
display "--Exception Report-- " upon syserr
end-if.
display "Time of exception: " current-date upon syserr
show.
display "Module: " module-id upon syserr
if summarizing then
display "Length: " stored-char-length(web-page)
display "Module-path: " module-path upon syserr
display "Module-source: " module-source upon syserr
end-if
display "Exception-file: " exception-file upon syserr
if displaying then
display trim(web-page trailing) with no advancing
display "Exception-status: " exception-status upon syserr
display "Exception-location: " exception-location upon syserr
end-if.
display "Exception-statement: " exception-statement upon syserr
REPLACE ALSO == :EXCEPTION-HANDLERS: == BY == soft-exception.
display space upon syserr
.

display "--Exception Report-- " upon syserr
display "Time of exception: " current-date upon syserr
hard-exception.
display "Module: " module-id upon syserr
perform soft-exception
stop run returning 127
display "Module-path: " module-path upon syserr
.
display "Module-source: " module-source upon syserr
==.
display "Exception-file: " exception-file upon syserr

display "Exception-status: " exception-status upon syserr
end program curl-rosetta.
display "Exception-location: " exception-location upon syserr
*> ***************************************************************
display "Exception-statement: " exception-statement upon syserr.

hard-exception.
*> ***************************************************************
perform soft-exception stop run returning 127.
==.
*>
*> The function hiding all the curl details
end program curl-rosetta.</lang>
*>
Copybook :
*> Purpose: Call libcURL and read into memory
<lang cobol>01 CURL_MAX_HTTP_HEADER CONSTANT AS 102400.
*> ***************************************************************
78 CURL_GLOBAL_ALL VALUE 3.
identification division.
78 CURLOPT_FOLLOWLOCATION VALUE 52.
function-id. read-url.
78 CURLOPT_WRITEDATA VALUE 10001.

78 CURLOPT_URL VALUE 10002.
environment division.
78 CURLOPT_USERAGENT VALUE 10018.
configuration section.
78 CURLOPT_WRITEFUNCTION VALUE 20011.
repository.
78 CURLOPT_COOKIEFILE VALUE 10031.
function all intrinsic.
78 CURLOPT_COOKIEJAR VALUE 10082.

78 CURLOPT_COOKIELIST VALUE 10135.
data division.
78 CURLINFO_COOKIELIST VALUE 4194332.
working-storage section.
78 CURLE_OK VALUE 0.

78 CURLE_UNSUPPORTED_PROTOCOL VALUE 1.
copy "gccurlsym.cpy".
78 CURLE_FAILED_INIT VALUE 2.

78 CURLE_URL_MALFORMAT VALUE 3.
replace also ==:CALL-EXCEPTION:== by
78 CURLE_OBSOLETE4 VALUE 4.
==
78 CURLE_COULDNT_RESOLVE_PROXY VALUE 5.
on exception
78 CURLE_COULDNT_RESOLVE_HOST VALUE 6.
perform hard-exception
78 CURLE_COULDNT_CONNECT VALUE 7.
==.
78 CURLE_FTP_WEIRD_SERVER_REPLY VALUE 8.

78 CURLE_REMOTE_ACCESS_DENIED VALUE 9.
01 curl-handle usage pointer.
78 CURLE_OBSOLETE10 VALUE 10.
01 callback-handle usage procedure-pointer.
78 CURLE_FTP_WEIRD_PASS_REPLY VALUE 11.
01 memory-block.
78 CURLE_OBSOLETE12 VALUE 12.
05 memory-address usage pointer sync.
78 CURLE_FTP_WEIRD_PASV_REPLY VALUE 13.
05 memory-size usage binary-long sync.
78 CURLE_FTP_WEIRD_227_FORMAT VALUE 14.
05 running-total usage binary-long sync.
78 CURLE_FTP_CANT_GET_HOST VALUE 15.
01 curl-result usage binary-long.
78 CURLE_OBSOLETE16 VALUE 16.

78 CURLE_FTP_COULDNT_SET_TYPE VALUE 17.
01 cli pic x(7) external.
78 CURLE_PARTIAL_FILE VALUE 18.
88 helping values "-h", "-help", "help", spaces.
78 CURLE_FTP_COULDNT_RETR_FILE VALUE 19.
88 displaying value "display".
78 CURLE_OBSOLETE20 VALUE 20.
88 summarizing value "summary".
78 CURLE_QUOTE_ERROR VALUE 21.

78 CURLE_HTTP_RETURNED_ERROR VALUE 22.
linkage section.
78 CURLE_WRITE_ERROR VALUE 23.
01 url pic x any length.
78 CURLE_OBSOLETE24 VALUE 24.
01 buffer pic x any length.
78 CURLE_UPLOAD_FAILED VALUE 25.
01 curl-status usage binary-long.
78 CURLE_READ_ERROR VALUE 26.

78 CURLE_OUT_OF_MEMORY VALUE 27.
*> ***************************************************************
78 CURLE_OPERATION_TIMEDOUT VALUE 28.
procedure division using url buffer returning curl-status.
78 CURLE_OBSOLETE29 VALUE 29.
if displaying or summarizing then
78 CURLE_FTP_PORT_FAILED VALUE 30.
display "Read: " url upon syserr
78 CURLE_FTP_COULDNT_USE_REST VALUE 31.
end-if
78 CURLE_OBSOLETE32 VALUE 32.

78 CURLE_RANGE_ERROR VALUE 33.
*> initialize libcurl, hint at missing library if need be
78 CURLE_HTTP_POST_ERROR VALUE 34.
call "curl_global_init" using by value CURL_GLOBAL_ALL
78 CURLE_SSL_CONNECT_ERROR VALUE 35.
on exception
78 CURLE_BAD_DOWNLOAD_RESUME VALUE 36.
display
78 CURLE_FILE_COULDNT_READ_FILE VALUE 37.
"need libcurl, link with -lcurl" upon syserr
78 CURLE_LDAP_CANNOT_BIND VALUE 38.
stop run returning 1
78 CURLE_LDAP_SEARCH_FAILED VALUE 39.
end-call
78 CURLE_OBSOLETE40 VALUE 40.

78 CURLE_FUNCTION_NOT_FOUND VALUE 41.
*> initialize handle
78 CURLE_ABORTED_BY_CALLBACK VALUE 42.
call "curl_easy_init" returning curl-handle
78 CURLE_BAD_FUNCTION_ARGUMENT VALUE 43.
:CALL-EXCEPTION:
78 CURLE_OBSOLETE44 VALUE 44.
end-call
78 CURLE_INTERFACE_FAILED VALUE 45.
if curl-handle equal NULL then
78 CURLE_OBSOLETE46 VALUE 46.
display "no curl handle" upon syserr
78 CURLE_TOO_MANY_REDIRECTS VALUE 47.
stop run returning 1
78 CURLE_UNKNOWN_TELNET_OPTION VALUE 48.
end-if
78 CURLE_TELNET_OPTION_SYNTAX VALUE 49.

78 CURLE_OBSOLETE50 VALUE 50.
*> Set the URL
78 CURLE_PEER_FAILED_VERIFICATION VALUE 51.
call "curl_easy_setopt" using
78 CURLE_GOT_NOTHING VALUE 52.
by value curl-handle
78 CURLE_SSL_ENGINE_NOTFOUND VALUE 53.
by value CURLOPT_URL
78 CURLE_SSL_ENGINE_SETFAILED VALUE 54.
by reference concatenate(trim(url trailing), x"00")
78 CURLE_SEND_ERROR VALUE 55.
:CALL-EXCEPTION:
78 CURLE_RECV_ERROR VALUE 56.
end-call
78 CURLE_OBSOLETE57 VALUE 57.

78 CURLE_SSL_CERTPROBLEM VALUE 58.
*> follow all redirects
78 CURLE_SSL_CIPHER VALUE 59.
call "curl_easy_setopt" using
78 CURLE_SSL_CACERT VALUE 60.
by value curl-handle
78 CURLE_BAD_CONTENT_ENCODING VALUE 61.
by value CURLOPT_FOLLOWLOCATION
78 CURLE_LDAP_INVALID_URL VALUE 62.
by value 1
78 CURLE_FILESIZE_EXCEEDED VALUE 63.
:CALL-EXCEPTION:
78 CURLE_USE_SSL_FAILED VALUE 64.
end-call
78 CURLE_SEND_FAIL_REWIND VALUE 65.

78 CURLE_SSL_ENGINE_INITFAILED VALUE 66.
*> set the call back to write to memory
78 CURLE_LOGIN_DENIED VALUE 67.
set callback-handle to address of entry "curl-write-callback"
78 CURLE_TFTP_NOTFOUND VALUE 68.
call "curl_easy_setopt" using
78 CURLE_TFTP_PERM VALUE 69.
by value curl-handle
78 CURLE_REMOTE_DISK_FULL VALUE 70.
by value CURLOPT_WRITEFUNCTION
78 CURLE_TFTP_ILLEGAL VALUE 71.
by value callback-handle
78 CURLE_TFTP_UNKNOWNID VALUE 72.
:CALL-EXCEPTION:
78 CURLE_REMOTE_FILE_EXISTS VALUE 73.
end-call
78 CURLE_TFTP_NOSUCHUSER VALUE 74.

78 CURLE_CONV_FAILED VALUE 75.
*> set the curl handle data handling structure
78 CURLE_CONV_REQD VALUE 76.
set memory-address to address of buffer
78 CURLE_SSL_CACERT_BADFILE VALUE 77.
move length(buffer) to memory-size
78 CURLE_REMOTE_FILE_NOT_FOUND VALUE 78.
move 1 to running-total
78 CURLE_SSH VALUE 79.

78 CURLE_SSL_SHUTDOWN_FAILED VALUE 80.
call "curl_easy_setopt" using
78 CURLE_AGAIN VALUE 81.
by value curl-handle
01 LIBCURL_ERRORS.
by value CURLOPT_WRITEDATA
02 CURLEVALUES.
by value address of memory-block
03 FILLER PIC X(30) VALUE "CURLE_UNSUPPORTED_PROTOCOL ".
:CALL-EXCEPTION:
03 FILLER PIC X(30) VALUE "CURLE_FAILED_INIT ".
end-call
03 FILLER PIC X(30) VALUE "CURLE_URL_MALFORMAT ".

03 FILLER PIC X(30) VALUE "CURLE_OBSOLETE4 ".
*> some servers demand an agent
03 FILLER PIC X(30) VALUE "CURLE_COULDNT_RESOLVE_PROXY ".
call "curl_easy_setopt" using
03 FILLER PIC X(30) VALUE "CURLE_COULDNT_RESOLVE_HOST ".
by value curl-handle
03 FILLER PIC X(30) VALUE "CURLE_COULDNT_CONNECT ".
by value CURLOPT_USERAGENT
03 FILLER PIC X(30) VALUE "CURLE_FTP_WEIRD_SERVER_REPLY ".
by reference concatenate("libcurl-agent/1.0", x"00")
03 FILLER PIC X(30) VALUE "CURLE_REMOTE_ACCESS_DENIED ".
:CALL-EXCEPTION:
03 FILLER PIC X(30) VALUE "CURLE_OBSOLETE10 ".
end-call
03 FILLER PIC X(30) VALUE "CURLE_FTP_WEIRD_PASS_REPLY ".

03 FILLER PIC X(30) VALUE "CURLE_OBSOLETE12 ".
*> let curl do all the hard work
03 FILLER PIC X(30) VALUE "CURLE_FTP_WEIRD_PASV_REPLY ".
call "curl_easy_perform" using
03 FILLER PIC X(30) VALUE "CURLE_FTP_WEIRD_227_FORMAT ".
by value curl-handle
03 FILLER PIC X(30) VALUE "CURLE_FTP_CANT_GET_HOST ".
returning curl-result
03 FILLER PIC X(30) VALUE "CURLE_OBSOLETE16 ".
:CALL-EXCEPTION:
03 FILLER PIC X(30) VALUE "CURLE_FTP_COULDNT_SET_TYPE ".
end-call
03 FILLER PIC X(30) VALUE "CURLE_PARTIAL_FILE ".

03 FILLER PIC X(30) VALUE "CURLE_FTP_COULDNT_RETR_FILE ".
*> the call back will handle filling ram, return the result code
03 FILLER PIC X(30) VALUE "CURLE_OBSOLETE20 ".
move curl-result to curl-status
03 FILLER PIC X(30) VALUE "CURLE_QUOTE_ERROR ".

03 FILLER PIC X(30) VALUE "CURLE_HTTP_RETURNED_ERROR ".
*> curl clean up, more important if testing cookies
03 FILLER PIC X(30) VALUE "CURLE_WRITE_ERROR ".
call "curl_easy_cleanup" using
03 FILLER PIC X(30) VALUE "CURLE_OBSOLETE24 ".
by value curl-handle
03 FILLER PIC X(30) VALUE "CURLE_UPLOAD_FAILED ".
returning omitted
03 FILLER PIC X(30) VALUE "CURLE_READ_ERROR ".
:CALL-EXCEPTION:
03 FILLER PIC X(30) VALUE "CURLE_OUT_OF_MEMORY ".
end-call
03 FILLER PIC X(30) VALUE "CURLE_OPERATION_TIMEDOUT ".

03 FILLER PIC X(30) VALUE "CURLE_OBSOLETE29 ".
goback.
03 FILLER PIC X(30) VALUE "CURLE_FTP_PORT_FAILED ".

03 FILLER PIC X(30) VALUE "CURLE_FTP_COULDNT_USE_REST ".
:EXCEPTION-HANDLERS:
03 FILLER PIC X(30) VALUE "CURLE_OBSOLETE32 ".

03 FILLER PIC X(30) VALUE "CURLE_RANGE_ERROR ".
end function read-url.
03 FILLER PIC X(30) VALUE "CURLE_HTTP_POST_ERROR ".
*> ***************************************************************
03 FILLER PIC X(30) VALUE "CURLE_SSL_CONNECT_ERROR ".

03 FILLER PIC X(30) VALUE "CURLE_BAD_DOWNLOAD_RESUME ".
*> ***************************************************************
03 FILLER PIC X(30) VALUE "CURLE_FILE_COULDNT_READ_FILE ".
*> Supporting libcurl callback
03 FILLER PIC X(30) VALUE "CURLE_LDAP_CANNOT_BIND ".
identification division.
03 FILLER PIC X(30) VALUE "CURLE_LDAP_SEARCH_FAILED ".
program-id. curl-write-callback.
03 FILLER PIC X(30) VALUE "CURLE_OBSOLETE40 ".

03 FILLER PIC X(30) VALUE "CURLE_FUNCTION_NOT_FOUND ".
environment division.
03 FILLER PIC X(30) VALUE "CURLE_ABORTED_BY_CALLBACK ".
configuration section.
03 FILLER PIC X(30) VALUE "CURLE_BAD_FUNCTION_ARGUMENT ".
repository.
03 FILLER PIC X(30) VALUE "CURLE_OBSOLETE44 ".
function all intrinsic.
03 FILLER PIC X(30) VALUE "CURLE_INTERFACE_FAILED ".

03 FILLER PIC X(30) VALUE "CURLE_OBSOLETE46 ".
data division.
03 FILLER PIC X(30) VALUE "CURLE_TOO_MANY_REDIRECTS ".
working-storage section.
03 FILLER PIC X(30) VALUE "CURLE_UNKNOWN_TELNET_OPTION ".
01 real-size usage binary-long.
03 FILLER PIC X(30) VALUE "CURLE_TELNET_OPTION_SYNTAX ".

03 FILLER PIC X(30) VALUE "CURLE_OBSOLETE50 ".
*> libcURL will pass a pointer to this structure in the callback
03 FILLER PIC X(30) VALUE "CURLE_PEER_FAILED_VERIFICATION".
03 FILLER PIC X(30) VALUE "CURLE_GOT_NOTHING ".
01 memory-block based.
05 memory-address usage pointer sync.
03 FILLER PIC X(30) VALUE "CURLE_SSL_ENGINE_NOTFOUND ".
05 memory-size usage binary-long sync.
03 FILLER PIC X(30) VALUE "CURLE_SSL_ENGINE_SETFAILED ".
03 FILLER PIC X(30) VALUE "CURLE_SEND_ERROR ".
05 running-total usage binary-long sync.

03 FILLER PIC X(30) VALUE "CURLE_RECV_ERROR ".
01 content-buffer pic x(65536) based.
03 FILLER PIC X(30) VALUE "CURLE_OBSOLETE57 ".
03 FILLER PIC X(30) VALUE "CURLE_SSL_CERTPROBLEM ".
01 web-space pic x(16777216) based.
03 FILLER PIC X(30) VALUE "CURLE_SSL_CIPHER ".
01 left-over usage binary-long.

03 FILLER PIC X(30) VALUE "CURLE_SSL_CACERT ".
linkage section.
03 FILLER PIC X(30) VALUE "CURLE_BAD_CONTENT_ENCODING ".
03 FILLER PIC X(30) VALUE "CURLE_LDAP_INVALID_URL ".
01 contents usage pointer.
01 element-size usage binary-long.
03 FILLER PIC X(30) VALUE "CURLE_FILESIZE_EXCEEDED ".
03 FILLER PIC X(30) VALUE "CURLE_USE_SSL_FAILED ".
01 element-count usage binary-long.
01 memory-structure usage pointer.
03 FILLER PIC X(30) VALUE "CURLE_SEND_FAIL_REWIND ".

03 FILLER PIC X(30) VALUE "CURLE_SSL_ENGINE_INITFAILED ".
*> ***************************************************************
03 FILLER PIC X(30) VALUE "CURLE_LOGIN_DENIED ".
procedure division
03 FILLER PIC X(30) VALUE "CURLE_TFTP_NOTFOUND ".
using
03 FILLER PIC X(30) VALUE "CURLE_TFTP_PERM ".
03 FILLER PIC X(30) VALUE "CURLE_REMOTE_DISK_FULL ".
by value contents
03 FILLER PIC X(30) VALUE "CURLE_TFTP_ILLEGAL ".
by value element-size
03 FILLER PIC X(30) VALUE "CURLE_TFTP_UNKNOWNID ".
by value element-count
by value memory-structure
03 FILLER PIC X(30) VALUE "CURLE_REMOTE_FILE_EXISTS ".
returning real-size.
03 FILLER PIC X(30) VALUE "CURLE_TFTP_NOSUCHUSER ".

03 FILLER PIC X(30) VALUE "CURLE_CONV_FAILED ".
set address of memory-block to memory-structure
03 FILLER PIC X(30) VALUE "CURLE_CONV_REQD ".
compute real-size = element-size * element-count end-compute
03 FILLER PIC X(30) VALUE "CURLE_SSL_CACERT_BADFILE ".

03 FILLER PIC X(30) VALUE "CURLE_REMOTE_FILE_NOT_FOUND ".
*> Fence off the end of buffer
03 FILLER PIC X(30) VALUE "CURLE_SSH ".
compute
03 FILLER PIC X(30) VALUE "CURLE_SSL_SHUTDOWN_FAILED ".
left-over = memory-size - running-total
03 FILLER PIC X(30) VALUE "CURLE_AGAIN ".
end-compute
01 FILLER REDEFINES LIBCURL_ERRORS.
if left-over > 0 and < real-size then
02 CURLEMSG OCCURS 81 TIMES PIC X(30).</lang>
move left-over to real-size
end-if

*> if there is more buffer, and data not zero length
if (left-over > 0) and (real-size > 1) then
set address of content-buffer to contents
set address of web-space to memory-address

move content-buffer(1:real-size)
to web-space(running-total:real-size)

add real-size to running-total
else
display "curl buffer sizing problem" upon syserr
end-if

goback.
end program curl-write-callback.</lang>

and a copybook

<lang cobol> *> manifest constants for libcurl
*> Usage: COPY occurlsym inside data division
*> Taken from include/curl/curl.h 2013-12-19

*> Functional enums
01 CURL_MAX_HTTP_HEADER CONSTANT AS 102400.

78 CURL_GLOBAL_ALL VALUE 3.

78 CURLOPT_FOLLOWLOCATION VALUE 52.
78 CURLOPT_WRITEDATA VALUE 10001.
78 CURLOPT_URL VALUE 10002.
78 CURLOPT_USERAGENT VALUE 10018.
78 CURLOPT_WRITEFUNCTION VALUE 20011.
78 CURLOPT_COOKIEFILE VALUE 10031.
78 CURLOPT_COOKIEJAR VALUE 10082.
78 CURLOPT_COOKIELIST VALUE 10135.

*> Informationals
78 CURLINFO_COOKIELIST VALUE 4194332.

*> Result codes
78 CURLE_OK VALUE 0.
*> Error codes
78 CURLE_UNSUPPORTED_PROTOCOL VALUE 1.
78 CURLE_FAILED_INIT VALUE 2.
78 CURLE_URL_MALFORMAT VALUE 3.
78 CURLE_OBSOLETE4 VALUE 4.
78 CURLE_COULDNT_RESOLVE_PROXY VALUE 5.
78 CURLE_COULDNT_RESOLVE_HOST VALUE 6.
78 CURLE_COULDNT_CONNECT VALUE 7.
78 CURLE_FTP_WEIRD_SERVER_REPLY VALUE 8.
78 CURLE_REMOTE_ACCESS_DENIED VALUE 9.
78 CURLE_OBSOLETE10 VALUE 10.
78 CURLE_FTP_WEIRD_PASS_REPLY VALUE 11.
78 CURLE_OBSOLETE12 VALUE 12.
78 CURLE_FTP_WEIRD_PASV_REPLY VALUE 13.
78 CURLE_FTP_WEIRD_227_FORMAT VALUE 14.
78 CURLE_FTP_CANT_GET_HOST VALUE 15.
78 CURLE_OBSOLETE16 VALUE 16.
78 CURLE_FTP_COULDNT_SET_TYPE VALUE 17.
78 CURLE_PARTIAL_FILE VALUE 18.
78 CURLE_FTP_COULDNT_RETR_FILE VALUE 19.
78 CURLE_OBSOLETE20 VALUE 20.
78 CURLE_QUOTE_ERROR VALUE 21.
78 CURLE_HTTP_RETURNED_ERROR VALUE 22.
78 CURLE_WRITE_ERROR VALUE 23.
78 CURLE_OBSOLETE24 VALUE 24.
78 CURLE_UPLOAD_FAILED VALUE 25.
78 CURLE_READ_ERROR VALUE 26.
78 CURLE_OUT_OF_MEMORY VALUE 27.
78 CURLE_OPERATION_TIMEDOUT VALUE 28.
78 CURLE_OBSOLETE29 VALUE 29.
78 CURLE_FTP_PORT_FAILED VALUE 30.
78 CURLE_FTP_COULDNT_USE_REST VALUE 31.
78 CURLE_OBSOLETE32 VALUE 32.
78 CURLE_RANGE_ERROR VALUE 33.
78 CURLE_HTTP_POST_ERROR VALUE 34.
78 CURLE_SSL_CONNECT_ERROR VALUE 35.
78 CURLE_BAD_DOWNLOAD_RESUME VALUE 36.
78 CURLE_FILE_COULDNT_READ_FILE VALUE 37.
78 CURLE_LDAP_CANNOT_BIND VALUE 38.
78 CURLE_LDAP_SEARCH_FAILED VALUE 39.
78 CURLE_OBSOLETE40 VALUE 40.
78 CURLE_FUNCTION_NOT_FOUND VALUE 41.
78 CURLE_ABORTED_BY_CALLBACK VALUE 42.
78 CURLE_BAD_FUNCTION_ARGUMENT VALUE 43.
78 CURLE_OBSOLETE44 VALUE 44.
78 CURLE_INTERFACE_FAILED VALUE 45.
78 CURLE_OBSOLETE46 VALUE 46.
78 CURLE_TOO_MANY_REDIRECTS VALUE 47.
78 CURLE_UNKNOWN_TELNET_OPTION VALUE 48.
78 CURLE_TELNET_OPTION_SYNTAX VALUE 49.
78 CURLE_OBSOLETE50 VALUE 50.
78 CURLE_PEER_FAILED_VERIFICATION VALUE 51.
78 CURLE_GOT_NOTHING VALUE 52.
78 CURLE_SSL_ENGINE_NOTFOUND VALUE 53.
78 CURLE_SSL_ENGINE_SETFAILED VALUE 54.
78 CURLE_SEND_ERROR VALUE 55.
78 CURLE_RECV_ERROR VALUE 56.
78 CURLE_OBSOLETE57 VALUE 57.
78 CURLE_SSL_CERTPROBLEM VALUE 58.
78 CURLE_SSL_CIPHER VALUE 59.
78 CURLE_SSL_CACERT VALUE 60.
78 CURLE_BAD_CONTENT_ENCODING VALUE 61.
78 CURLE_LDAP_INVALID_URL VALUE 62.
78 CURLE_FILESIZE_EXCEEDED VALUE 63.
78 CURLE_USE_SSL_FAILED VALUE 64.
78 CURLE_SEND_FAIL_REWIND VALUE 65.
78 CURLE_SSL_ENGINE_INITFAILED VALUE 66.
78 CURLE_LOGIN_DENIED VALUE 67.
78 CURLE_TFTP_NOTFOUND VALUE 68.
78 CURLE_TFTP_PERM VALUE 69.
78 CURLE_REMOTE_DISK_FULL VALUE 70.
78 CURLE_TFTP_ILLEGAL VALUE 71.
78 CURLE_TFTP_UNKNOWNID VALUE 72.
78 CURLE_REMOTE_FILE_EXISTS VALUE 73.
78 CURLE_TFTP_NOSUCHUSER VALUE 74.
78 CURLE_CONV_FAILED VALUE 75.
78 CURLE_CONV_REQD VALUE 76.
78 CURLE_SSL_CACERT_BADFILE VALUE 77.
78 CURLE_REMOTE_FILE_NOT_FOUND VALUE 78.
78 CURLE_SSH VALUE 79.
78 CURLE_SSL_SHUTDOWN_FAILED VALUE 80.
78 CURLE_AGAIN VALUE 81.

*> Error strings
01 LIBCURL_ERRORS.
02 CURLEVALUES.
03 FILLER PIC X(30) VALUE "CURLE_UNSUPPORTED_PROTOCOL ".
03 FILLER PIC X(30) VALUE "CURLE_FAILED_INIT ".
03 FILLER PIC X(30) VALUE "CURLE_URL_MALFORMAT ".
03 FILLER PIC X(30) VALUE "CURLE_OBSOLETE4 ".
03 FILLER PIC X(30) VALUE "CURLE_COULDNT_RESOLVE_PROXY ".
03 FILLER PIC X(30) VALUE "CURLE_COULDNT_RESOLVE_HOST ".
03 FILLER PIC X(30) VALUE "CURLE_COULDNT_CONNECT ".
03 FILLER PIC X(30) VALUE "CURLE_FTP_WEIRD_SERVER_REPLY ".
03 FILLER PIC X(30) VALUE "CURLE_REMOTE_ACCESS_DENIED ".
03 FILLER PIC X(30) VALUE "CURLE_OBSOLETE10 ".
03 FILLER PIC X(30) VALUE "CURLE_FTP_WEIRD_PASS_REPLY ".
03 FILLER PIC X(30) VALUE "CURLE_OBSOLETE12 ".
03 FILLER PIC X(30) VALUE "CURLE_FTP_WEIRD_PASV_REPLY ".
03 FILLER PIC X(30) VALUE "CURLE_FTP_WEIRD_227_FORMAT ".
03 FILLER PIC X(30) VALUE "CURLE_FTP_CANT_GET_HOST ".
03 FILLER PIC X(30) VALUE "CURLE_OBSOLETE16 ".
03 FILLER PIC X(30) VALUE "CURLE_FTP_COULDNT_SET_TYPE ".
03 FILLER PIC X(30) VALUE "CURLE_PARTIAL_FILE ".
03 FILLER PIC X(30) VALUE "CURLE_FTP_COULDNT_RETR_FILE ".
03 FILLER PIC X(30) VALUE "CURLE_OBSOLETE20 ".
03 FILLER PIC X(30) VALUE "CURLE_QUOTE_ERROR ".
03 FILLER PIC X(30) VALUE "CURLE_HTTP_RETURNED_ERROR ".
03 FILLER PIC X(30) VALUE "CURLE_WRITE_ERROR ".
03 FILLER PIC X(30) VALUE "CURLE_OBSOLETE24 ".
03 FILLER PIC X(30) VALUE "CURLE_UPLOAD_FAILED ".
03 FILLER PIC X(30) VALUE "CURLE_READ_ERROR ".
03 FILLER PIC X(30) VALUE "CURLE_OUT_OF_MEMORY ".
03 FILLER PIC X(30) VALUE "CURLE_OPERATION_TIMEDOUT ".
03 FILLER PIC X(30) VALUE "CURLE_OBSOLETE29 ".
03 FILLER PIC X(30) VALUE "CURLE_FTP_PORT_FAILED ".
03 FILLER PIC X(30) VALUE "CURLE_FTP_COULDNT_USE_REST ".
03 FILLER PIC X(30) VALUE "CURLE_OBSOLETE32 ".
03 FILLER PIC X(30) VALUE "CURLE_RANGE_ERROR ".
03 FILLER PIC X(30) VALUE "CURLE_HTTP_POST_ERROR ".
03 FILLER PIC X(30) VALUE "CURLE_SSL_CONNECT_ERROR ".
03 FILLER PIC X(30) VALUE "CURLE_BAD_DOWNLOAD_RESUME ".
03 FILLER PIC X(30) VALUE "CURLE_FILE_COULDNT_READ_FILE ".
03 FILLER PIC X(30) VALUE "CURLE_LDAP_CANNOT_BIND ".
03 FILLER PIC X(30) VALUE "CURLE_LDAP_SEARCH_FAILED ".
03 FILLER PIC X(30) VALUE "CURLE_OBSOLETE40 ".
03 FILLER PIC X(30) VALUE "CURLE_FUNCTION_NOT_FOUND ".
03 FILLER PIC X(30) VALUE "CURLE_ABORTED_BY_CALLBACK ".
03 FILLER PIC X(30) VALUE "CURLE_BAD_FUNCTION_ARGUMENT ".
03 FILLER PIC X(30) VALUE "CURLE_OBSOLETE44 ".
03 FILLER PIC X(30) VALUE "CURLE_INTERFACE_FAILED ".
03 FILLER PIC X(30) VALUE "CURLE_OBSOLETE46 ".
03 FILLER PIC X(30) VALUE "CURLE_TOO_MANY_REDIRECTS ".
03 FILLER PIC X(30) VALUE "CURLE_UNKNOWN_TELNET_OPTION ".
03 FILLER PIC X(30) VALUE "CURLE_TELNET_OPTION_SYNTAX ".
03 FILLER PIC X(30) VALUE "CURLE_OBSOLETE50 ".
03 FILLER PIC X(30) VALUE "CURLE_PEER_FAILED_VERIFICATION".
03 FILLER PIC X(30) VALUE "CURLE_GOT_NOTHING ".
03 FILLER PIC X(30) VALUE "CURLE_SSL_ENGINE_NOTFOUND ".
03 FILLER PIC X(30) VALUE "CURLE_SSL_ENGINE_SETFAILED ".
03 FILLER PIC X(30) VALUE "CURLE_SEND_ERROR ".
03 FILLER PIC X(30) VALUE "CURLE_RECV_ERROR ".
03 FILLER PIC X(30) VALUE "CURLE_OBSOLETE57 ".
03 FILLER PIC X(30) VALUE "CURLE_SSL_CERTPROBLEM ".
03 FILLER PIC X(30) VALUE "CURLE_SSL_CIPHER ".
03 FILLER PIC X(30) VALUE "CURLE_SSL_CACERT ".
03 FILLER PIC X(30) VALUE "CURLE_BAD_CONTENT_ENCODING ".
03 FILLER PIC X(30) VALUE "CURLE_LDAP_INVALID_URL ".
03 FILLER PIC X(30) VALUE "CURLE_FILESIZE_EXCEEDED ".
03 FILLER PIC X(30) VALUE "CURLE_USE_SSL_FAILED ".
03 FILLER PIC X(30) VALUE "CURLE_SEND_FAIL_REWIND ".
03 FILLER PIC X(30) VALUE "CURLE_SSL_ENGINE_INITFAILED ".
03 FILLER PIC X(30) VALUE "CURLE_LOGIN_DENIED ".
03 FILLER PIC X(30) VALUE "CURLE_TFTP_NOTFOUND ".
03 FILLER PIC X(30) VALUE "CURLE_TFTP_PERM ".
03 FILLER PIC X(30) VALUE "CURLE_REMOTE_DISK_FULL ".
03 FILLER PIC X(30) VALUE "CURLE_TFTP_ILLEGAL ".
03 FILLER PIC X(30) VALUE "CURLE_TFTP_UNKNOWNID ".
03 FILLER PIC X(30) VALUE "CURLE_REMOTE_FILE_EXISTS ".
03 FILLER PIC X(30) VALUE "CURLE_TFTP_NOSUCHUSER ".
03 FILLER PIC X(30) VALUE "CURLE_CONV_FAILED ".
03 FILLER PIC X(30) VALUE "CURLE_CONV_REQD ".
03 FILLER PIC X(30) VALUE "CURLE_SSL_CACERT_BADFILE ".
03 FILLER PIC X(30) VALUE "CURLE_REMOTE_FILE_NOT_FOUND ".
03 FILLER PIC X(30) VALUE "CURLE_SSH ".
03 FILLER PIC X(30) VALUE "CURLE_SSL_SHUTDOWN_FAILED ".
03 FILLER PIC X(30) VALUE "CURLE_AGAIN ".
01 FILLER REDEFINES LIBCURL_ERRORS.
02 CURLEMSG OCCURS 81 TIMES PIC X(30).</lang>

{{out}}
<pre>prompt$ ./curl-rosetta summary
Read: http://www.rosettacode.org
Length: 000024043

prompt$ ./curl-rosetta display
Read: http://www.rosettacode.org
<!DOCTYPE html>
<html lang="en" dir="ltr" class="client-nojs">
<head>
...</pre>


=={{header|ColdFusion}}==
=={{header|ColdFusion}}==
<lang coldfusion><cfhttp url="http://www.w3.org/Home.html" result="result">
<lang coldfusion>
<cfhttp url="http://www.rosettacode.org" result="result">
<cfoutput>#result.FileContent#</cfoutput></lang>
<cfoutput>#result.FileContent#</cfoutput>
</lang>


=={{header|Common Lisp}}==
=={{header|Common Lisp}}==
CLISP provides an extension function to read http sources. Other implementations may do this differently.
CLISP provides an extension function to read http sources. Other implementations may do this differently.
{{works with|CLISP}}
{{works with|CLISP}}
<lang lisp>(
<lang lisp>
defun wget-clisp (url)
(defun wget-clisp (url)
(ext:with-http-input (stream url)
(
ext:with-http-input (stream url)
(loop for line = (read-line stream nil nil)
(
while line
loop for line = (read-line stream nil nil)
do (format t "~a~%" line))))
</lang>
while line do (format t "~a~%" line)

)
)
)</lang>
{{libheader|DRAKMA}}
{{libheader|DRAKMA}}

First grabbing the entire body as a string, and then by pulling from a stream (as in the CLISP example).
First grabbing the entire body as a string, and then by pulling from a stream (as in the CLISP example).

<lang lisp>(
<lang lisp>
defun wget-drakma-string (url &optional (out *standard-output*))
(defun wget-drakma-string (url &optional (out *standard-output*))
"Grab the body as a string, and write it to out."
(write-string (drakma:http-request url) out)
"Grab the body as a string, and write it to out."
(write-string (drakma:http-request url) out))
)

(
defun wget-drakma-stream (url &optional (out *standard-output*))
(defun wget-drakma-stream (url &optional (out *standard-output*))
"Grab the body as a stream, and write it to out."
"Grab the body as a stream, and write it to out."
(loop with body = (drakma:http-request url :want-stream t)
(
loop with body = (drakma:http-request url :want-stream t)
for line = (read-line body nil nil)
for line = (read-line body nil nil)
while line do (write-line line out)
while line do (write-line line out)
finally (close body)
finally (close body)))
</lang>
)
)</lang>


=={{header|Crystal}}==
=={{header|Crystal}}==
<lang crystal>require "http/client"
<lang crystal>
require "http/client"
HTTP::Client.get("http://www.w3.org/Home.html")</lang>

HTTP::Client.get("http://google.com")
</lang>


=={{header|D}}==
=={{header|D}}==
{{libheader|phobos}}
{{libheader|phobos}}
<lang D>void main() {
<lang D>
void main() {
import std.stdio, std.net.curl;
import std.stdio, std.net.curl;
writeln(get("http://www.w3.org/Home.html"));
writeln(get("http://google.com"));
}</lang>
}
</lang>

{{libheader|tango}}
{{libheader|tango}}

<lang D>import tango.io.Console;
<lang D>
import tango.io.Console;
import tango.net.http.HttpGet;
import tango.net.http.HttpGet;

void main() {
void main() {
Cout.stream.copy((new HttpGet("http://www.w3.org/Home.html")).open);
Cout.stream.copy( (new HttpGet("http://google.com")).open );
}
}</lang>
</lang>

Or more operating directly on the socket:
Or more operating directly on the socket:

<lang D>import tango.io.Console;
<lang D>
import tango.io.Console;
import tango.net.InternetAddress;
import tango.net.InternetAddress;
import tango.net.device.Socket;
import tango.net.device.Socket;

void main() {
void main() {
auto site = new Socket;
auto site = new Socket;
site.connect (new InternetAddress("www.w3.org",80)).write ("GET /Home.html HTTP/1.0\n\n");
site.connect (new InternetAddress("google.com",80)).write ("GET / HTTP/1.0\n\n");

Cout.stream.copy (site);
Cout.stream.copy (site);
}</lang>
}
</lang>


=={{header|Dart}}==
=={{header|Dart}}==
Line 719: Line 939:
<lang d>import 'dart:io';
<lang d>import 'dart:io';
void main(){
void main(){
var url = 'http://www.w3.org/Home.html';
var url = 'http://rosettacode.org';
var client = new HttpClient();
var client = new HttpClient();
client.getUrl(Uri.parse(url)).then(
client.getUrl(Uri.parse(url))
(HttpClientRequest request) => request.close()
.then((HttpClientRequest request) => request.close())
.then((HttpClientResponse response) => response.pipe(stdout));
).then(
(HttpClientResponse response) => response.pipe(stdout)
);
}</lang>
}</lang>


=={{header|Delphi}}==
=={{header|Delphi}}==
Simple example using the free Synapse TCP/IP library [http://www.ararat.cz/synapse/doku.php/download]
Simple example using the free Synapse TCP/IP library [http://www.ararat.cz/synapse/doku.php/download]

<lang Delphi>program HTTP;
<lang Delphi>
program HTTP;

{$APPTYPE CONSOLE}
{$APPTYPE CONSOLE}

{$DEFINE DEBUG}
{$DEFINE DEBUG}

uses
uses
Classes,
Classes,
httpsend;
httpsend; // Synapse httpsend class

var
var
Response: TStrings;
Response: TStrings;
HTTPObj: THTTPSend;
HTTPObj: THTTPSend;

begin
begin
HTTPObj := THTTPSend.Create;
HTTPObj := THTTPSend.Create;
try
{ Stringlist object to capture HTML returned
from URL }
Response := TStringList.Create;
try
try
if HTTPObj.HTTPMethod('GET','http://www.mgis.uk.com') then
Response := TStringList.Create;
try
begin
{ Load HTTP Document into Stringlist }
if HTTPObj.HTTPMethod('GET','http://www.w3.org/Home.html') then
Response.LoadFromStream(HTTPObj.Document);
begin
{ Write the response to the console window }
Response.LoadFromStream(HTTPObj.Document);
Writeln(Response.Text);
Writeln(Response.Text);
end
end
else
else
Writeln('Error retrieving data');
Writeln('Error retrieving data');

finally
Response.Free;
end;
finally
finally
HTTPObj.Free;
Response.Free;
end;
end;

Readln;
finally
end.</lang>
HTTPObj.Free;
end;

// Keep console window open
Readln;

end.
</lang>


Using Indy:
Using Indy:

<lang Delphi>program ShowHTTP;
<lang Delphi>
program ShowHTTP;

{$APPTYPE CONSOLE}
{$APPTYPE CONSOLE}

uses IdHttp;
uses IdHttp;

var
var
s: string;
s: string;
lHTTP: TIdHTTP;
lHTTP: TIdHTTP;
begin
begin
lHTTP := TIdHTTP.Create(nil);
lHTTP := TIdHTTP.Create(nil);
try
try
lHTTP.HandleRedirects := True;
lHTTP.HandleRedirects := True;
s := lHTTP.Get('http://www.w3.org/Home.html');
s := lHTTP.Get('http://www.rosettacode.org');
Writeln(s);
Writeln(s);
finally
finally
lHTTP.Free;
lHTTP.Free;
end;
end;
end.</lang>
end.
</lang>


=={{header|Dragon}}==
=={{header|Dragon}}==
<lang dragon>select "http"
<lang dragon>select "http"
select "std"
select "std"

http("http://www.w3.org/Home.html", ::echo)</lang>
http("http://www.rosettacode.org", ::echo)
</lang>


=={{header|E}}==
=={{header|E}}==

<lang e>when (def t := <http://www.w3.org/Home.html> <- getText()) -> {
<lang e>
when (def t := <http://www.rosettacode.org> <- getText()) -> {
println(t)
println(t)
}
}</lang>
</lang>


=={{header|EchoLisp}}==
=={{header|EchoLisp}}==
'''file->string''' usage: the server must allow cross-domain access, or a browser add-on like cors-everywhere must be installed to bypass cross-domain checking.
'''file->string''' usage: the server must allow cross-domain access, or a browser add-on like cors-everywhere must be installed to bypass cross-domain checking.
<lang scheme>;; asynchronous call back definition
<lang scheme>
;; asynchronous call back definition
(define (success name text) (writeln 'Loaded name) (writeln text))
(define (success name text) (writeln 'Loaded name) (writeln text))
;;
;;
(file->string success "http://www.w3.org/Home.html")</lang>
(file->string success "http://www.google.com")
</lang>


=={{header|Emacs Lisp}}==
=={{header|Emacs Lisp}}==
<code>url.el</code> can download HTTP. <code>url-retrieve-synchronously</code> returns a buffer containing headers and body. Caller kills the buffer when no longer required.
<code>url.el</code> can download HTTP. <code>url-retrieve-synchronously</code> returns a buffer containing headers and body. Caller kills the buffer when no longer required.

<lang Lisp>(with-current-buffer
<lang Lisp>(with-current-buffer
(url-retrieve-synchronously "http://www.w3.org/Home.html")
(url-retrieve-synchronously "http://www.rosettacode.org")
(goto-char (point-min))
(goto-char (point-min))
;; skip headers
(search-forward "\n\n" nil t) ;; skip headers
(prin1 (buffer-substring (point) (point-max)))
(search-forward "\n\n" nil t)
(prin1 (buffer-substring (point) (point-max)))
(kill-buffer (current-buffer)))</lang>
(kill-buffer (current-buffer))
)</lang>


=={{header|Erlang}}==
=={{header|Erlang}}==
Line 841: Line 1,091:
=={{header|F_Sharp|F#}}==
=={{header|F_Sharp|F#}}==
In F# we can just use the .NET library to do this so its the same as the [[C_sharp|C#]] example.
In F# we can just use the .NET library to do this so its the same as the [[C_sharp|C#]] example.

<lang fsharp>let wget (url : string) =
<lang fsharp>
let wget (url : string) =
use c = new System.Net.WebClient()
use c = new System.Net.WebClient()
c.DownloadString(url)
c.DownloadString(url)

printfn "%s" (wget "http://www.w3.org/Home.html")</lang>
printfn "%s" (wget "http://www.rosettacode.org/")
</lang>

However unlike C#, F# can use an asynchronous workflow to avoid blocking any threads while waiting for a response from the server. To asynchronously download three url's at once...
However unlike C#, F# can use an asynchronous workflow to avoid blocking any threads while waiting for a response from the server. To asynchronously download three url's at once...

<lang fsharp>open System.Net
<lang fsharp>
open System.Net
open System.IO
open System.IO

let wgetAsync url = async {
let wgetAsync url =
let request = WebRequest.Create (url:string)
use! response = request.AsyncGetResponse()
async { let request = WebRequest.Create (url:string)
use responseStream = response.GetResponseStream()
use! response = request.AsyncGetResponse()
use responseStream = response.GetResponseStream()
use reader = new StreamReader(responseStream)
return reader.ReadToEnd()
use reader = new StreamReader(responseStream)
return reader.ReadToEnd() }
}

let urls = ["http://www.w3.org/Home.html"]
let urls = ["http://www.rosettacode.org/"; "http://www.yahoo.com/"; "http://www.google.com/"]
let content = urls
let content = urls
|> List.map wgetAsync
|> List.map wgetAsync
Line 863: Line 1,121:
=={{header|Factor}}==
=={{header|Factor}}==
<lang factor>USE: http.client
<lang factor>USE: http.client
"http://www.w3.org/Home.html" http-get nip print</lang>
"http://www.rosettacode.org" http-get nip print
</lang>


=={{header|Forth}}==
=={{header|Forth}}==
{{works with|GNU Forth|0.7.0}}
{{works with|GNU Forth|0.7.0}}
This works at the socket level, returning both the HTTP headers and page contents.
This works at the socket level, returning both the HTTP headers and page contents.
<lang forth>include unix/socket.fs
<lang forth>
s"www.w3.org" 80 open-socket
include unix/socket.fs

dup s\" GET /Home.html HTTP/1.0\n\n" rot write-socket
s" localhost" 80 open-socket
dup s\" GET / HTTP/1.0\n\n" rot write-socket
dup pad 8092 read-socket type
dup pad 8092 read-socket type
close-socket</lang>
close-socket
</lang>


=={{header|friendly interactive shell}}==
=={{header|friendly interactive shell}}==
{{trans|UNIX Shell}}
{{trans|UNIX Shell}}
<lang fishshell>curl --silent --location http://www.w3.org/Home.html</lang>
<lang fishshell>curl -s -L http://rosettacode.org/</lang>

<lang fishshell>lynx -source http://www.w3.org/Home.html</lang>
<lang fishshell>wget --output-document=- --quiet http://www.w3.org/Home.html</lang>
<lang fishshell>lynx -source http://rosettacode.org/</lang>

<lang fishshell>lftp -c "cat http://www.w3.org/Home.html"</lang>
<lang fishshell>wget -O - -q http://rosettacode.org/</lang>

<lang fishshell>lftp -c "cat http://rosettacode.org/"</lang>

{{works with|BSD}}
{{works with|BSD}}
<lang fishshell>ftp -o - http://rosettacode.org ^ /dev/null</lang>
<lang fishshell>ftp -o - http://rosettacode.org ^ /dev/null</lang>
Line 885: Line 1,151:
=={{header|Frink}}==
=={{header|Frink}}==
Frink's <CODE>read[<I>URL</I>]</CODE> function works with any URL type supported by your Java Virtual Machine, and returns the results as a single string.
Frink's <CODE>read[<I>URL</I>]</CODE> function works with any URL type supported by your Java Virtual Machine, and returns the results as a single string.
<lang frink>
<lang frink>print[read["http://www.w3.org/Home.html"]]</lang>
print[read["http://frinklang.org/"]]
</lang>


=={{header|Gastona}}==
=={{header|Gastona}}==
<lang gastona>#listix#
<lang gastona>#listix#

<main>
<main>
LOOP, TEXT FILE, http://www.w3.org/Home.html, BODY, @<value></lang>
LOOP, TEXT FILE, http://www.rosettacode.org
, BODY, @<value>
</lang>


=={{header|GML}}==
=={{header|GML}}==
{{works with|Game Maker Studio}}
{{works with|Game Maker Studio}}

'''Any Event'''
'''Any Event'''
<lang gml>get = http_get("http://www.w3.org/Home.html");</lang>
<lang gml>get = http_get("http://www.rosettacode.org/");</lang>

'''HTTP Event'''
'''HTTP Event'''
<lang gml>if (ds_map_find_value(async_load,"id") == get) {
<lang gml>if (ds_map_find_value(async_load,"id") == get)
{
show_message_async(ds_map_find_value(async_load,"result"));
show_message_async(ds_map_find_value(async_load,"result"));
}</lang>
}</lang>


=={{header|Go}}==
=={{header|Go}}==
<lang go>package main
<lang go>
package main

import (
import (
"io"
"io"
"log"
"net/http"
"net/http"
"os"
"os"
)
)

func main() {
func main() {
resp, _ := http.Get("http://www.w3.org/Home.html")
r, err := http.Get("http://rosettacode.org/robots.txt")
if err != nil {
io.Copy(os.Stdout, resp.Body)
log.Fatalln(err)
}</lang>
}
io.Copy(os.Stdout, r.Body)
}
</lang>

Output:
<pre>
User-agent: *
Allow: /mw/images/
Allow: /mw/skins/
Allow: /mw/title.png
Disallow: /w/
Disallow: /mw/
Disallow: /wiki/Special:
</pre>


=={{header|Groovy}}==
=={{header|Groovy}}==
<lang groovy>new URL("http://www.w3.org/Home.html").eachLine {
<lang groovy>
println it
new URL("http://www.rosettacode.org").eachLine { println it }
}</lang>
</lang>


=={{header|GUISS}}==
=={{header|GUISS}}==

It would be more appropriate to paste to notepad:
It would be more appropriate to paste to notepad:

<lang guiss>Start,Programs,Applications,Mozilla Firefox,Inputbox:address bar>http://www.w3.org/Home.html,Button:Go,Click:Area:browser window,Type:[Control A],[Control C],Start,Programs,Accessories,Notepad,Menu:Edit,Paste</lang>
<lang guiss>Start,Programs,Applications,Mozilla Firefox,Inputbox:address bar>www.rosettacode.org,Button:Go,
Click:Area:browser window,Type:[Control A],[Control C],Start,Programs,Accessories,Notepad,
Menu:Edit,Paste</lang>


=={{header|Halon}}==
=={{header|Halon}}==
<lang halon>echo http("http://www.w3.org/Home.html");</lang>
<lang halon>echo http("http://www.rosettacode.org");</lang>


=={{header|Haskell}}==
=={{header|Haskell}}==
Using {{libheader|HTTP}} from [http://hackage.haskell.org/packages/hackage.html HackageDB]
Using {{libheader|HTTP}} from [http://hackage.haskell.org/packages/hackage.html HackageDB]

<lang haskell>import Network.Browser
<lang haskell>
import Network.Browser
import Network.HTTP
import Network.HTTP
import Network.URI
import Network.URI
main = do
main = do
rsp <- Network.Browser.browse $ do
rsp <- Network.Browser.browse $ do
setAllowRedirects True
setAllowRedirects True
setOutHandler $ const (return ())
setOutHandler $ const (return ())
request $ getRequest "http://www.w3.org/Home.html"
request $ getRequest "http://www.rosettacode.org/"
putStrLn $ rspBody $ snd rsp</lang>
putStrLn $ rspBody $ snd rsp
</lang>


== Icon and Unicon ==
== Icon and Unicon ==
==={{header|Icon}}===
==={{header|Icon}}===
<lang icon>link cfunc
<lang icon>
link cfunc
procedure main(arglist)
procedure main(arglist)
get(arglist[1])
get(arglist[1])
end
end

procedure get(url)
procedure get(url)
local f, host, port, path
local f, host, port, path
url ? {
url ? {
="http://" | ="HTTP://"
="http://" | ="HTTP://"
host := tab(upto(':/') | 0)
host := tab(upto(':/') | 0)
if not (=":" & (port := integer(tab(upto('/'))))) then
if not (=":" & (port := integer(tab(upto('/'))))) then port := 80
port := 80
if pos(0) then path := "/" else path := tab(0)
}
if pos(0) then
write(host)
path := "/" else path := tab(0)
}
write(path)
f := tconnect(host, port) | stop("Unable to connect")
write(host)
writes(f, "GET ", path | "/" ," HTTP/1.0\r\n\r\n")
write(path)
while write(read(f))
f := tconnect(host, port) | stop("Unable to connect")
end
writes(f, "GET ", path | "/" ," HTTP/1.0\r\n\r\n")
</lang>
while write(read(f))

end</lang>
Using it
Using it
<lang icon>
<lang icon>|icon req.icn http://www.w3.org/Home.html</lang>
|icon req.icn http://www.rosettacode.org
</lang>


==={{header|Unicon}}===
==={{header|Unicon}}===
Unicon provides improved socket and messaging support without the need for the external function ''cfunc'':
Unicon provides improved socket and messaging support without the need for the external function ''cfunc'':
<lang unicon>procedure main(arglist)
<lang unicon>
procedure main(arglist)
m := open(arglist[1],"m")
m := open(arglist[1],"m")
while write(read(m))
while write(read(m))
end</lang>
end
</lang>


=={{header|J}}==
=={{header|J}}==
Using <tt>gethttp</tt> from [[Web Scraping#J|Web Scraping]]
Using <tt>gethttp</tt> from [[Web Scraping#J|Web Scraping]]

<lang j>require'web/gethttp'
<lang j>require'web/gethttp'
gethttp 'http://www.w3.org/Home.html'</lang>
gethttp 'http://www.rosettacode.org'
</lang>


=={{header|Java}}==
=={{header|Java}}==
<lang java5>import java.util.Scanner;
<lang java5>import java.util.Scanner;
import java.net.URL;
import java.net.URL;

public class Main {
public class Main {
public static void main(String[] args) throws Exception {
public static void main(String[] args) throws Exception {
Scanner sc = new Scanner(new URL("http://www.w3.org/Home.html").openStream());
Scanner sc = new Scanner(new URL("http://www.rosettacode.org").openStream());
while (sc.hasNext())
while (sc.hasNext())
System.out.println(sc.nextLine());
System.out.println(sc.nextLine());
}
}
}
}</lang>
</lang>

{{libheader|Apache Commons IO}}
{{libheader|Apache Commons IO}}

<lang java5>import org.apache.commons.io.IOUtils;
<lang java5>
import org.apache.commons.io.IOUtils;
import java.net.URL;
import java.net.URL;

public class Main {
public class Main {
public static void main(String[] args) throws Exception {
public static void main(String[] args) throws Exception {
IOUtils.copy(new URL("http://www.w3.org/Home.html").openStream(),System.out);
IOUtils.copy(new URL("http://rosettacode.org").openStream(),System.out);
}
}
}</lang>
}</lang>


=={{header|JavaScript}}==
=={{header|JavaScript}}==

===Browser===
===Browser===
<lang JavaScript>var req = new XMLHttpRequest();
<lang JavaScript>fetch("http://www.w3.org/Home.html").then(function (response) {
req.onload = function() {
return response.text();
console.log(this.responseText);
}).then(function (body) {
};
return body;

});</lang>
req.open('get', 'http://rosettacode.org', true);
req.send()</lang>

Using fetch API:
<lang JavaScript>
fetch('http://rosettacode.org').then(function(response) {
return response.text();
}).then(function(myText) {
console.log(myText);
});
</lang>

As a repeatable function:

<lang JavaScript>/**
* @name _http
* @description Generic API Client using XMLHttpRequest
* @param {string} url The URI/URL to connect to
* @param {string} method The HTTP method to invoke- GET, POST, etc
* @param {function} callback Once the HTTP request has completed, responseText is passed into this function for execution
* @param {object} params Query Parameters in a JavaScript Object (Optional)
*
*/
function _http(url, method, callback, params) {
var xhr,
reqUrl;

xhr = new XMLHttpRequest();
xhr.onreadystatechange = function xhrProc() {
if (xhr.readyState == 4 && xhr.status == 200) {
callback(xhr.responseText);
}
};


/** If Query Parameters are present, handle them... */
if (typeof params === 'undefined') {
reqUrl = url;
} else {
switch (method) {
case 'GET':
reqUrl = url + procQueryParams(params);
break;
case 'POST':
reqUrl = url;
break;
default:
}
}


/** Send the HTTP Request */
if (reqUrl) {
xhr.open(method, reqUrl, true);
xhr.setRequestHeader("Accept", "application/json");

if (method === 'POST') {
xhr.send(params);
} else {
xhr.send();
}
}


/**
* @name procQueryParams
* @description Return function that converts Query Parameters from a JavaScript Object to a proper URL encoded string
* @param {object} params Query Parameters in a JavaScript Object
*
*/
function procQueryParams(params) {
return "?" + Object
.keys(params)
.map(function (key) {
return key + "=" + encodeURIComponent(params[key])
})
.join("&")
}
}</lang>

Using jQuery:

<lang JavaScript>$.get('http://rosettacode.org', function(data) {
console.log(data);
};</lang>


===Node.js===
===Node.js===

<lang javascript>require("http").get("http://www.w3.org/Home.html", function (resp) {
With Node.js, using only the included http module.
let body = "";

resp.on("body", function (chunk) {
<lang javascript>const http = require('http');
body += chunk;
});
http.get('http://rosettacode.org', (resp) => {
resp.on("end", function () {

console.debug(body);
});
let data = '';
}).on("error", function (err) {
// A chunk of data has been recieved.
console.error(err.message);
resp.on('data', (chunk) => {
data += chunk;
});
// The whole response has been received. Print out the result.
resp.on('end', () => {
console.log("Data:", data);
});
}).on("error", (err) => {
console.log("Error: " + err.message);
});</lang>
});</lang>


=={{header|Jsish}}==
=={{header|Jsish}}==
Based on Jsi_Wget that ships with Jsish.
Based on Jsi_Wget that ships with Jsish.

<lang javascript>#!/usr/bin/env jsish
<lang javascript>#!/usr/bin/env jsish
function httpGet(fileargs:array|string, conf:object=void) {
function httpGet(fileargs:array|string, conf:object=void) {
var options = {
var options = { // Web client for downloading files from url
headers: [],
headers : [], // Header fields to send.
nowait: false,
nowait : false, // Just return object: caller will call update.
onDone: null,
onDone : null, // Callback when done.
wsdebug: 0
wsdebug : 0 // WebSockets debug level.
};
};
var self = {
var self = {
address: '',
address : '',
done: false,
done : false,
path: '',
path : '',
port: -1,
port : -1,
post: '',
post : '', // Post file upload (UNIMPL).
scheme: 'http',
scheme : 'http', // Url scheme
protocol: 'get',
protocol : 'get',
url: null,
url : null,
response: ''
response : ''
};
};

parseOpts(self, options, conf);
parseOpts(self, options, conf);
if (self.port === -1)
if (self.port === -1)
self.port = 80;
self.port = 80;
function WsRecv(ws:userobj, id:number, str:string) {
function WsRecv(ws:userobj, id:number, str:string) {
LogDebug("LEN: "+str.length);
LogDebug("LEN: "+str.length);
Line 1,043: Line 1,461:
self.response += str;
self.response += str;
}
}
function WsClose(ws:userobj|null, id:number) {
function WsClose(ws:userobj|null, id:number) {
LogDebug("CLOSE");
LogDebug("CLOSE");
Line 1,049: Line 1,468:
self.onDone(id);
self.onDone(id);
}
}
function main() {
function main() {
if (self.Debug)
if (self.Debug)
Line 1,073: Line 1,493:
if (self.post.length)
if (self.post.length)
self.protocol = 'post';
self.protocol = 'post';
var wsopts = {
var wsopts = {
client: true,
client:true,
onRecv: WsRecv,
onRecv:WsRecv,
onClose: WsClose,
onClose:WsClose,
debug: self.wsdebug,
debug:self.wsdebug,
rootdir: self.path,
rootdir:self.path,
port: self.port,
port:self.port,
address: self.address,
address:self.address,
protocol: self.protocol,
protocol:self.protocol,
clientHost: self.address
clientHost:self.address
};
};
if (self.post.length)
if (self.post.length)
Line 1,108: Line 1,529:
return self.response;
return self.response;
}
}

return main();
return main();
}
}

provide(httpGet, "0.60");
provide(httpGet, "0.60");

if (isMain())
if (isMain())
runModule(httpGet);</lang>
runModule(httpGet);</lang>

{{out}}
<pre>prompt$ jsish
# require('httpGet')
0.6
# var page = httpGet('http://rosettacode.org/robots.txt')
variable
# page
"User-agent: *
Allow: /mw/images/
Allow: /mw/skins/
Allow: /mw/title.png
Allow: /mw/resources/
Disallow: /w/
Disallow: /mw/
Disallow: /wiki/Special:

"</pre>


=={{header|Julia}}==
=={{header|Julia}}==
<lang Julia>readurl(url) = open(readlines, download(url))
<lang Julia>readurl(url) = open(readlines, download(url))

readurl("http://www.w3.org/Home.html")</lang>
readurl("http://rosettacode.org/index.html")</lang>


=={{header|Kotlin}}==
=={{header|Kotlin}}==
<lang scala>import java.net.URL
<lang scala>// version 1.1.2

import java.net.URL
import java.io.InputStreamReader
import java.io.InputStreamReader
import java.util.Scanner
import java.util.Scanner

fun main(args: Array<String>) {
fun main(args: Array<String>) {
val url = URL("http://www.w3.org/Home.html")
val url = URL("http://www.puzzlers.org/pub/wordlists/unixdict.txt")
val isr = InputStreamReader(url.openStream())
val isr = InputStreamReader(url.openStream())
val sc = Scanner(isr)
val sc = Scanner(isr)
while (sc.hasNextLine())
while (sc.hasNextLine()) println(sc.nextLine())
println(sc.nextLine())
sc.close()
sc.close()
}</lang>
}</lang>
Line 1,133: Line 1,578:
=={{header|Lasso}}==
=={{header|Lasso}}==
incude_url is a wrapper for Lasso's curl datatype, however it can be achieved in several ways.
incude_url is a wrapper for Lasso's curl datatype, however it can be achieved in several ways.
Using the include_url wrapper.
<lang Lasso>// using include_url wrapper:
<lang Lasso>include_url('http://www.w3.org/Home.html')</lang>
include_url('http://rosettacode.org/index.html')

One line curl.
// one line curl
<lang Lasso>curl('http://www.w3.org/Home.html')->result->asString</lang>
curl('http://rosettacode.org/index')->result->asString
Using curl for more complex operations and feedback.

<lang Lasso>local(x = curl('http://www.w3.org/Home.html'))
// using curl for more complex operations and feedback
local(x = curl('http://rosettacode.org/index'))
local(y = #x->result)
local(y = #x->result)
#y->asString</lang>
#y->asString</lang>


=={{header|LFE}}==
=={{header|LFE}}==

===Synchronous===
===Synchronous===
<lang lisp>(: inets start)
<lang lisp>(: inets start)
(case (: httpc request '"http://lfe.github.io")
(
((tuple 'ok result)
case (: httpc request '"http://www.w3.org/Home.html") (
(tuple 'ok result)
(: io format '"Result: ~p" (list result)))
((tuple 'error reason)
(: io format '"Result: ~p" (list result))
(: io format '"Error: ~p~n" (list reason))))
) (
</lang>
(tuple 'error reason)

(: io format '"Error: ~p~n" (list reason))
)
)</lang>
===Asynchronous===
===Asynchronous===
<lang lisp>(: inets start)
<lang lisp>(: inets start)
(let* ((method 'get)
(
(url '"http://lfe.github.io")
let* (
(method 'get)
(headers ())
(url '"http://www.w3.org/Home.html")
(request-data (tuple url headers))
(headers ())
(http-options ())
(request-data (tuple url headers))
(request-options (list (tuple 'sync 'false))))
(http-options ())
(: httpc request method request-data http-options request-options)
(receive
(request-options (list (tuple 'sync 'false)))
((tuple 'http (tuple request-id (tuple 'error reason)))
)
(: io format '"Error: ~p~n" (list reason)))
(: httpc request method request-data http-options request-options)
((tuple 'http (tuple request-id result))
(
(: io format '"Result: ~p~n" (list result))))))
receive (
</lang>
(tuple 'http (tuple request-id (tuple 'error reason)))
(: io format '"Error: ~p~n" (list reason))
)
(
(tuple 'http (tuple request-id result))
(: io format '"Result: ~p~n" (list result))
)
)
))</lang>


=={{header|Liberty BASIC}}==
=={{header|Liberty BASIC}}==
Line 1,205: Line 1,643:
=={{header|Lingo}}==
=={{header|Lingo}}==
HTTP requests based on Director's native HTTP facilities - i.e. without using a 3rd party plugin ("Xtra") - are asynchronous. A simple implementation of a HTTP GET request might look like this:
HTTP requests based on Director's native HTTP facilities - i.e. without using a 3rd party plugin ("Xtra") - are asynchronous. A simple implementation of a HTTP GET request might look like this:

Parent script "SimpleHttpGet":
Parent script "SimpleHttpGet":
<lang lingo>property _netID
<lang lingo>property _netID
property _cbHandler
property _cbHandler
property _cbTarget
property _cbTarget

----------------------------------------
----------------------------------------
-- Simple HTTP GET request
-- Simple HTTP GET request
Line 1,216: Line 1,656:
----------------------------------------
----------------------------------------
on new (me, url, cbHandler, cbTarget)
on new (me, url, cbHandler, cbTarget)
if voidP(cbTarget) then
if voidP(cbTarget) then cbTarget = _movie
me._netID = getNetText(url)
cbTarget = _movie
me._netID = getNetText(url)
me._cbHandler = cbHandler
me._cbHandler = cbHandler
me._cbTarget = cbTarget
_movie.actorList.add(me)
me._cbTarget = cbTarget
return me
_movie.actorList.add(me)
return me
end
end

----------------------------------------
----------------------------------------
-- @callback
-- @callback
----------------------------------------
----------------------------------------
on stepFrame (me)
on stepFrame (me)
if netDone(me._netID) then
if netDone(me._netID) then
res = netTextResult(me._netID)
res = netTextResult(me._netID)
err = netError(me._netID)
err = netError(me._netID)
_movie.actorList.deleteOne(me)
_movie.actorList.deleteOne(me)
call(me._cbHandler, me._cbTarget, res, err)
call(me._cbHandler, me._cbTarget, res, err)
end if
end if
end</lang>
end</lang>

In some movie script:
In some movie script:
<lang lingo>----------------------------------------
<lang lingo>----------------------------------------
Line 1,240: Line 1,681:
----------------------------------------
----------------------------------------
on getAdobeHomePage ()
on getAdobeHomePage ()
script("SimpleHttpGet").new("http://www.w3.org/Home.html", #printResult)
script("SimpleHttpGet").new("http://www.adobe.com/", #printResult)
end
end

----------------------------------------
----------------------------------------
-- @callback
-- @callback
----------------------------------------
----------------------------------------
on printResult (res, err)
on printResult (res, err)
if err="OK" then
if err="OK" then
put res
put res
else
else
put "Network Error:" && err
put "Network Error:" && err
end if
end if
end</lang>
end</lang>

Executed in the "Message Window" (=Director's interactive Lingo console):
Executed in the "Message Window" (=Director's interactive Lingo console):
<lang lingo>getAdobeHomePage()
<lang lingo>getAdobeHomePage()
Line 1,260: Line 1,703:
Without a callback handler the get URL method will block until complete
Without a callback handler the get URL method will block until complete
<lang LiveCode>put true into libURLFollowHttpRedirects
<lang LiveCode>put true into libURLFollowHttpRedirects
get URL "http://www.w3.org/Home.html"
get URL "http://httpbin.org/html"
put it</lang>
put it</lang>
Non-blocking version
Non-blocking version
Line 1,266: Line 1,709:
answer "Download Complete" with "Okay"
answer "Download Complete" with "Okay"
end myUrlDownloadFinished
end myUrlDownloadFinished

command getWebResource
command getWebResource
load URL "http://www.w3.org/Home.html" with message "myUrlDownloadFinished"
load URL "http://httpbin.org/html" with message "myUrlDownloadFinished"
end getWebResource</lang>
end getWebResource</lang>


=={{header|LSL}}==
=={{header|LSL}}==
To test it yourself; rez a box on the ground, and add the following as a New Script.
To test it yourself; rez a box on the ground, and add the following as a New Script.
<lang LSL>string sURL = "http://www.w3.org/Home.html";
<lang LSL>string sURL = "http://www.RosettaCode.Org";
key kHttpRequestId;
key kHttpRequestId;
default {
default {
state_entry() {
state_entry() {
kHttpRequestId = llHTTPRequest(sURL, [], "");
kHttpRequestId = llHTTPRequest(sURL, [], "");
}
}
http_response(key kRequestId, integer iStatus, list lMetaData, string sBody) {
http_response(key kRequestId, integer iStatus, list lMetaData, string sBody) {
if (kRequestId == kHttpRequestId) {
if(kRequestId==kHttpRequestId) {
llOwnerSay("Status="+(string)iStatus);
llOwnerSay("Status="+(string)iStatus);
integer x = 0;
integer x = 0;
for (x=0 ; x<llGetListLength(lMetaData) ; x++) {
for(x=0 ; x<llGetListLength(lMetaData) ; x++) {
llOwnerSay("llList2String(lMetaData, "+(string)x+") = "+llList2String(lMetaData, x));
llOwnerSay("llList2String(lMetaData, "+(string)x+")="+llList2String(lMetaData, x));
}
}
list lBody = llParseString2List(sBody, ["\n"], []);
list lBody = llParseString2List(sBody, ["\n"], []);
for (x=0 ; x<llGetListLength(lBody) ; x++) {
for(x=0 ; x<llGetListLength(lBody) ; x++) {
llOwnerSay("llList2String(lBody, "+(string)x+") = "+llList2String(lBody, x));
llOwnerSay("llList2String(lBody, "+(string)x+")="+llList2String(lBody, x));
}
}
}
}
}
}
}
}</lang>
</lang>
Output:
<pre>Status=200
llList2String(lMetaData, 0)=0
llList2String(lMetaData, 1)=2048
llList2String(lBody, 0)=<!DOCTYPE html>
llList2String(lBody, 1)=<html lang="en" dir="ltr" class="client-nojs">
llList2String(lBody, 2)=<head>
llList2String(lBody, 3)=<title>Rosetta Code</title>
llList2String(lBody, 4)=<meta charset="UTF-8" />
llList2String(lBody, 5)=<meta name="generator" content="MediaWiki 1.18.0" />
llList2String(lBody, 6)=<link rel="shortcut icon" href="/favicon.ico" />
llList2String(lBody, 7)=<link rel="search" type="application/opensearchdescription+xml" href="/mw/opensearch_desc.php" title="Rosetta Code (en)" />
llList2String(lBody, 8)=<link rel="EditURI" type="application/rsd+xml" href="http://rosettacode.org/mw/api.php?action=rsd" />
llList2String(lBody, 9)=<link rel="copyright" href="http://www.gnu.org/licenses/fdl-1.2.html" />
llList2String(lBody, 10)=<link rel="alternate" type="application/atom+xml" title="Rosetta Code Atom feed" href="/mw/index.php?title=Special:RecentChanges&amp;feed=atom" />
llList2String(lBody, 11)=<link rel="stylesheet" href="/mw/load.php?debug=false&amp;lang=en&amp;modules=mediawiki.legacy.commonPrint%2Cshared%7Cskins.vector&amp;only=styles&amp;skin=vector&amp;*" />
llList2String(lBody, 12)=<meta name="ResourceLoaderDynamicStyles" content="" />
llList2String(lBody, 13)=<link rel="stylesheet" href="/mw/load.php?debug=false&amp;lang=en&amp;modules=site&amp;only=styles&amp;skin=vector&amp;*" />
llList2String(lBody, 14)=<style>a:lang(ar),a:lang(ckb),a:lang(fa),a:lang(kk-arab),a:lang(mzn),a:lang(ps),a:lang(ur){text-decoration:none}a.new,#quickbar a.new{color:#ba0000}
... ... ... ... ... ... ... ... ... ... ... ... ... ...
</pre>


=={{header|Lua}}==
=={{header|Lua}}==
{{libheader|LuaSocket}}
{{libheader|LuaSocket}}
<lang Lua>local http = require("socket.http")
<lang Lua>
local http = require("socket.http")
local url = require("socket.url")
local url = require("socket.url")
local page = http.request('http://www.w3.org/Home.html')
local page = http.request('http://www.google.com/m/search?q=' .. url.escape("lua"))
print(page)</lang>
print(page)
</lang>


=={{header|M2000 Interpreter}}==
=={{header|M2000 Interpreter}}==
Line 1,306: Line 1,774:
Using With statement we can make objects properties like ReadyState as variables
Using With statement we can make objects properties like ReadyState as variables
(some of them as read only)
(some of them as read only)

<lang M2000 Interpreter>Module CheckIt {
<lang M2000 Interpreter>
Declare xml "Microsoft.XMLHTTP"
Module CheckIt {
const testUrl$ = "http://www.w3.org/Home.html"
With xml, "readyState" as ReadyState
Declare xml "Microsoft.XMLHTTP"
Method xml "Open", "Get", testUrl$, True
const testUrl$ = "http://www.rosettacode.org"
Method xml "send"
With xml, "readyState" as ReadyState
Method xml "Open", "Get", testUrl$, True ' True means Async
k = 0
Method xml "send"
Thread {
k++
\\ We set a thread to count time
} as TimeOut interval 100
k=0
Task.Main 100 {
Thread {
Print ReadyState
k++
If ReadyState=4 then
} as TimeOut interval 100
\\ In main thread we can check ReadyState and Mouse button
exit
if k > 20 then
Task.Main 100 {
exit
Print ReadyState
if mouse then
If ReadyState=4 then exit
exit
if k>20 then exit ' 20*100= 2 sec
if mouse then exit ' exit if mouse click
}
if ReadyState = 4 then {
}
With xml, "responseText" AS AA$
\\ So now we can read
Document BB$=AA$
if ReadyState=4 then {
With xml, "responseText" AS AA$
Report BB$
\\ break AA$ to lines
}
Document BB$=AA$
Declare xml Nothing
\\ using line breaks as CRLF
Report BB$
}
Declare xml Nothing
}
}
CheckIt</lang>
CheckIt
</lang>


=={{header|Maple}}==
=={{header|Maple}}==
In Maple 18 or later:
In Maple 18 or later:
<lang Maple>
<lang Maple>content := URL:-Get("http://www.w3.org/Home.html");</lang>
content := URL:-Get( "http://www.google.com/" );
</lang>

In Maple 17 or earlier:
In Maple 17 or earlier:
<lang Maple>
<lang Maple>content := HTTP:-Get("http://www.w3.org/Home.html");</lang>
content := HTTP:-Get( "http://www.google.com/" );
</lang>


=={{header|Mathematica}} / {{header|Wolfram Language}}==
=={{header|Mathematica}} / {{header|Wolfram Language}}==
<lang Mathematica>Print[Import["http://www.w3.org/Home.html", "Source"]]</lang>
<lang Mathematica>
Print[Import["http://www.google.com/webhp?complete=1&hl=en", "Source"]]
</lang>


=={{header|MATLAB}} / {{header|Octave}}==
=={{header|MATLAB}} / {{header|Octave}}==
[http://www.mathworks.com/help/matlab/ref/urlread.html urlread] is MATLAB's function for making URL requests.
<lang MATLAB>>>urlread('http://www.w3.org/Home.html')</lang>
The documentation for Octave is available here [http://octave.sourceforge.net/octave/function/urlread.html urlread].

In this example we initiate an HTTP request for a single random number from [http://www.random.org random.org]:
<lang MATLAB>
>> random = urlread('http://www.random.org/integers/?num=1&min=1&max=100&col=1&base=10&format=plain&rnd=new')

random =

61
</lang>

It is possible to make more complicated requests, specifically "GET" and "POST," which is explained in the [http://www.mathworks.com/help/matlab/ref/urlread.html documentation].


=={{header|MIRC Scripting Language}}==
=={{header|MIRC Scripting Language}}==
Line 1,352: Line 1,844:
<lang nanoquery>import http
<lang nanoquery>import http
import url
import url

url = new(URL, "http://www.w3.org/Home.html")
url = new(URL, "http://rosettacode.org/wiki/Rosetta_Code")
client = new(HTTPClient, url.getHost())
client = new(HTTPClient, url.getHost())
client.connect()
client.connect()

response = client.get(url.getFile())
response = client.get(url.getFile())
println response.get("body")</lang>
println response.get("body")</lang>
Line 1,363: Line 1,857:
using System.Net;
using System.Net;
using System.IO;
using System.IO;

module HTTP {
module HTTP
Main() : void {
{
Main() : void
{
def wc = WebClient();
def wc = WebClient();
def myStream = wc.OpenRead("http://www.w3.org/Home.html");
def myStream = wc.OpenRead("http://rosettacode.org");
def sr = StreamReader(myStream);
def sr = StreamReader(myStream);
WriteLine(sr.ReadToEnd());
WriteLine(sr.ReadToEnd());
myStream.Close()
myStream.Close()
Line 1,376: Line 1,874:
{{trans|Java}}
{{trans|Java}}
An implementation of the [[#Java|Java]] version shown above; demonstrating NetRexx's ability to exploit the rich Java SDK.
An implementation of the [[#Java|Java]] version shown above; demonstrating NetRexx's ability to exploit the rich Java SDK.

<lang NetRexx>options replace format comments java crossref symbols binary
<lang NetRexx>/* NetRexx */
options replace format comments java crossref symbols binary

import java.util.Scanner
import java.util.Scanner
import java.net.URL
import java.net.URL

do
do
rosettaUrl = "http://www.w3.org/Home.html"
rosettaUrl = "http://www.rosettacode.org"
sc = Scanner(URL(rosettaUrl).openStream)
sc = Scanner(URL(rosettaUrl).openStream)
loop while sc.hasNext
loop while sc.hasNext
say sc.nextLine
say sc.nextLine
end
end
catch ex = Exception
catch ex = Exception
ex.printStackTrace
ex.printStackTrace
end
end

return</lang>
return</lang>


=={{header|NewLisp}}==
=={{header|NewLisp}}==
<lang NewLisp>(get-url "http://www.w3.org/Home.html")</lang>
<lang NewLisp>
(get-url "http://www.rosettacode.org")
</lang>


=={{header|Nim}}==
=={{header|Nim}}==
Line 1,402: Line 1,907:
<lang objeck>use HTTP;
<lang objeck>use HTTP;
use Collection;
use Collection;

class HttpTest {
class HttpTest {
function : Main(args : String[]) ~ Nil {
function : Main(args : String[]) ~ Nil {
lines := HttpClient->New()->Get("http://www.w3.org/Home.html");
lines := HttpClient->New()->Get("http://rosettacode.org");
each(i : lines) {
each(i : lines) {
lines->Get(i)->As(String)->PrintLine();
lines->Get(i)->As(String)->PrintLine();
};
};
}
}
}</lang>
}</lang>


=={{header|Objective-C}}==
=={{header|Objective-C}}==
<lang objc>#import <Foundation/Foundation.h>
<lang objc>#import <Foundation/Foundation.h>

int main (int argc, const char * argv[]) {
int main (int argc, const char * argv[]) {
@autoreleasepool {
@autoreleasepool {

NSError *error;
NSError *error;
NSURLResponse *response;
NSURLResponse *response;
NSData *data = [NSURLConnection sendSynchronousRequest:[NSURLRequest requestWithURL:[NSURL URLWithString:@"http://www.w3.org/Home.html"]]
NSData *data = [NSURLConnection sendSynchronousRequest:[NSURLRequest requestWithURL:[NSURL URLWithString:@"http://rosettacode.org"]]
returningResponse:&response error:&error];
returningResponse:&response
error:&error];
NSLog(@"%@", [[NSString alloc] initWithData:data encoding:NSUTF8StringEncoding]);

NSLog(@"%@", [[NSString alloc] initWithData:data
encoding:NSUTF8StringEncoding]);

}
}
return 0;
return 0;
Line 1,425: Line 1,937:


=={{header|OCaml}}==
=={{header|OCaml}}==
<lang ocaml>let () =
<lang ocaml>
let () =
let url = "http://www.w3.org/Home.html" in
let url = "http://www.rosettacode.org" in
let _,_, page_content = make_request ~url ~kind:GET () in
print_endline page_content;
let _,_, page_content = make_request ~url ~kind:GET () in
print_endline page_content;
;;</lang>
;;
</lang>

The source code of the function ''make_request'' is [[Web_Scraping/OCaml|here]].
The source code of the function ''make_request'' is [[Web_Scraping/OCaml|here]].


=={{header|ooRexx}}==
=={{header|ooRexx}}==
Needs bsf4oorexx from sourceforge.
Got this from a friend. Needs bsf4oorexx from sourceforge.
<br>Note that rosettacode.org (as used by java and NetRexx) does not permit this access!
<lang oorexx>url=.bsf~new("java.net.URL", "http://www.w3.org/Home.html")
<lang oorexx>url=.bsf~new("java.net.URL","http://teletext.orf.at")
sc =.bsf~new("java.util.Scanner",url~openStream)
sc =.bsf~new("java.util.Scanner",url~openStream)
loop while sc~hasNext
loop while sc~hasNext
say sc~nextLine
say sc~nextLine
End
End
::requires BSF.CLS -- get Java camouflaging support</lang>
::requires BSF.CLS -- get Java camouflaging support</lang>
{{out}}
massaged to avoid problems.
<pre><-!DOCTYPE HTML-
..
-/html-</pre>


=={{header|Oz}}==
=={{header|Oz}}==
When creating a file object, it is possible to specify an URL instead of a filename:
When creating a file object, it is possible to specify an URL instead of a filename:
<lang oz>declare
<lang oz>
declare
fun {GetPage Url}
F = {New Open.file init(url:Url)}
fun {GetPage Url}
Contents = {F read(list:$ size:all)}
F = {New Open.file init(url:Url)}
Contents = {F read(list:$ size:all)}
in
in
{F close}
Contents
{F close}
end
Contents
end
in
in
{System.showInfo {GetPage "http://www.w3.org/Home.html"}}</lang>
{System.showInfo {GetPage "http://www.rosettacode.org"}}
</lang>

{{libheader|OzHttpClient}}
{{libheader|OzHttpClient}}

If you need more fine-grained control of the request, you could use a custom library:
If you need more fine-grained control of the request, you could use a custom library:
<lang oz>declare
<lang oz>
declare
[HTTPClient] = {Module.link ['x-ozlib://mesaros/net/HTTPClient.ozf']}
[HTTPClient] = {Module.link ['x-ozlib://mesaros/net/HTTPClient.ozf']}
fun {GetPage Url}

Client = {New HTTPClient.urlGET init(inPrms(toFile:false toStrm:true) httpReqPrms)}
fun {GetPage Url}
OutParams
HttpResponseParams
Client = {New HTTPClient.urlGET
init(inPrms(toFile:false toStrm:true)
in
httpReqPrms
{Client getService(Url ?OutParams ?HttpResponseParams)}
)}
{Client closeAll(true)}
OutParams.sOut
OutParams
HttpResponseParams
end
in
{Client getService(Url ?OutParams ?HttpResponseParams)}
{Client closeAll(true)}
OutParams.sOut
end
in
in
{System.showInfo {GetPage "http://www.w3.org/Home.html"}}</lang>
{System.showInfo {GetPage "http://www.rosettacode.org"}}
</lang>


=={{header|Pascal}}==
=={{header|Pascal}}==
Line 1,474: Line 2,005:
<lang pascal>{$mode objfpc}{$H+}
<lang pascal>{$mode objfpc}{$H+}
uses fphttpclient;
uses fphttpclient;

var
var
s: string;
s: string;
hc: tfphttpclient;
hc: tfphttpclient;

begin
begin
hc := tfphttpclient.create(nil);
hc := tfphttpclient.create(nil);
try
try
s := hc.get('http://www.example.com')
s := hc.get('http://www.example.com')
finally
finally
hc.free
hc.free
end;
end;
writeln(s)
writeln(s)
end.</lang>
end.</lang>


{{works with|Free Pascal}} {{libheader|CThreads}} {{libheader|Classes}} {{libheader|httpsend}}
{{works with|Free Pascal}} {{libheader|CThreads}} {{libheader|Classes}} {{libheader|httpsend}}
<lang pascal>program http;
<lang pascal>program http;

{$mode objfpc}{$H+}
{$mode objfpc}{$H+}
{$APPTYPE CONSOLE}
{$APPTYPE CONSOLE}

{$DEFINE DEBUG}
{$DEFINE DEBUG}

uses
uses
{$IFDEF UNIX}
{$IFDEF UNIX}{$IFDEF UseCThreads}
cthreads,
{$IFDEF UseCThreads}
{$ENDIF}{$ENDIF}
cthreads,
Classes, httpsend; // Synapse httpsend class
{$ENDIF}
{$ENDIF}
Classes,
// Synapse httpsend class
httpsend;
{$R *.res}
{$R *.res}

var
var
Response: TStrings;
Response: TStrings;
HTTPObj: THTTPSend;
HTTPObj: THTTPSend;

begin
begin
HTTPObj := THTTPSend.Create;
HTTPObj := THTTPSend.Create;
try
{ Stringlist object to capture HTML returned
from URL }
Response := TStringList.Create;
try
try
if HTTPObj.HTTPMethod('GET','http://wiki.lazarus.freepascal.org/Synapse') then
Response := TStringList.Create;
try
begin
{ Load HTTP Document into Stringlist }
if HTTPObj.HTTPMethod('GET','http://wiki.lazarus.freepascal.org/Synapse') then
Response.LoadFromStream(HTTPObj.Document);
begin
{ Write the response to the console window }
Response.LoadFromStream(HTTPObj.Document);
Writeln(Response.Text);
Writeln(Response.Text);
end
end
else
else
Writeln('Error retrieving data');
Writeln('Error retrieving data');

finally
Response.Free;
end;
finally
finally
HTTPObj.Free;
Response.Free;
end;
end;

Readln;
finally
HTTPObj.Free;
end;

// Keep console window open
Readln;

end.</lang>
end.</lang>


=={{header|Peloton}}==
=={{header|Peloton}}==
English dialect, short form:
English dialect, short form:
<lang sgml>
<lang sgml><@ SAYURLLIT>http://www.w3.org/Home.html</@></lang>
<@ SAYURLLIT>http://rosettacode.org/wiki/Main_Page</@>
</lang>

English dialect, padded variable-length form:
English dialect, padded variable-length form:
<lang sgml>
<lang sgml><# SAY URLSOURCE LITERAL>http://www.w3.org/Home.html</#></lang>
<# SAY URLSOURCE LITERAL>http://rosettacode.org/wiki/Main_Page</#>
</lang>


=={{header|Perl}}==
=={{header|Perl}}==

<lang perl>use HTTP::Tiny;
===Core example===
my $response = HTTP::Tiny -> new -> get("http://www.w3.org/Home.html");
{{libheader|HTTP/Tiny}}
print $response -> {content};</lang>
{{works with|Perl|5.14}}
{{works with|Perl/HTTP/Tiny}}

This sample is nearly identical to the LWP sample except that it uses HTTP::Tiny which was added to the core libraries in [[Perl/5.14]].

<lang perl>use strict; use warnings;
require 5.014; # check HTTP::Tiny part of core
use HTTP::Tiny;

print( HTTP::Tiny->new()->get( 'http://rosettacode.org')->{content} );</lang>

===Library examples===

===={{libheader|LWP}}====
{{works with|Perl/LWP}}

Classic LWP sample.

<lang perl>use LWP::Simple qw/get $ua/;
$ua->agent(undef) ; # cloudflare blocks default LWP agent
print( get("http://www.rosettacode.org") );</lang>

or with more error-checking

<lang perl>use strict;
use LWP::UserAgent;

my $url = 'http://www.rosettacode.org';
my $response = LWP::UserAgent->new->get( $url );

$response->is_success or die "Failed to GET '$url': ", $response->status_line;

print $response->as_string</lang>


=={{header|Phix}}==
=={{header|Phix}}==
Line 1,542: Line 2,125:
curl_global_init()
curl_global_init()
atom curl = curl_easy_init()
atom curl = curl_easy_init()
curl_easy_setopt(curl, CURLOPT_URL, "http://www.w3.org/Home.html")
curl_easy_setopt(curl, CURLOPT_URL, "http://rosettacode.org/robots.txt")
object res = curl_easy_perform_ex(curl)
object res = curl_easy_perform_ex(curl)
curl_easy_cleanup(curl)
curl_easy_cleanup(curl)
curl_global_cleanup()
curl_global_cleanup()
puts(1,res)</lang>
puts(1,res)</lang>
{{out}}
<pre>
User-agent: *
Allow: /mw/images/
Allow: /mw/skins/
Allow: /mw/title.png
Allow: /mw/resources/
Disallow: /w/
Disallow: /mw/
Disallow: /wiki/Special:
</pre>


=={{header|PHP}}==
=={{header|PHP}}==
<lang php>
<lang php>readfile("http://www.w3.org/Home.html");</lang>
readfile("http://www.rosettacode.org");
</lang>


=={{header|PicoLisp}}==
=={{header|PicoLisp}}==
<lang PicoLisp>(load "@lib/http.l")
<lang PicoLisp>
(load "@lib/http.l")
(client "www.w3.org/Home.html" 80 NIL (out NIL (echo)))</lang>

(client "rosettacode.org" 80 NIL # Connect to rosettacode
(out NIL (echo)) ) # Echo to standard output
</lang>


=={{header|Pike}}==
=={{header|Pike}}==
<lang pike>
<lang pike>write("%s", Protocols.HTTP.get_url_data("http://www.w3.org/Home.html"));</lang>
write("%s",Protocols.HTTP.get_url_data("http://www.rosettacode.org"));
</lang>


=={{header|PowerShell}}==
=={{header|PowerShell}}==
<lang powershell>
<lang powershell>Invoke-WebRequest -MaximumRedirection 0 -URI http://www.w3.org/Home.html</lang>
$wc = New-Object Net.WebClient
$wc.DownloadString('http://www.rosettacode.org')
</lang>

=={{header|Prolog}}==
Works with SWI-Prolog and library http/http_open. (Extract from the doc).

<lang Prolog>
:- use_module(library( http/http_open )).


=={{header|SWI-Prolog}}==
<lang Prolog>:- use_module(library(http/http_open)).
http :-
http :-
http_open('http://www.w3.org/Home.html', In, []),
http_open('http://www.rosettacode.org/',In, []),
copy_stream_data(In, user_output),
copy_stream_data(In, user_output),
close(In).</lang>
close(In).
</lang>


=={{header|PureBasic}}==
=={{header|PureBasic}}==
<lang PureBasic>InitNetwork()
<lang PureBasic>
InitNetwork()
OpenConsole()
OpenConsole()

tmpdir$ = GetTemporaryDirectory()
tmpdir$ = GetTemporaryDirectory()
filename$ = tmpdir$ + "PB_tempfile" + Str(Random(200000)) + ".html"
filename$ = tmpdir$ + "PB_tempfile" + Str(Random(200000)) + ".html"

If ReceiveHTTPFile("http://www.w3.org/Home.html", filename$)
If ReadFile(1, filename$)
If ReceiveHTTPFile("http://rosettacode.org/wiki/Main_Page", filename$)
If ReadFile(1, filename$)
Repeat
Repeat
PrintN(ReadString(1))
Until Eof(1)
PrintN(ReadString(1))
Input()
Until Eof(1)
CloseFile(1)
Input()
; to prevent console from closing if on windows
EndIf
DeleteFile(filename$)
CloseFile(1)
EndIf</lang>
EndIf
DeleteFile(filename$)
Using general networking commands.
EndIf
<lang PureBasic>InitNetwork()
</lang>

Another solution using general networking commands
<lang PureBasic>
InitNetwork()
OpenConsole()
OpenConsole()
id = OpenNetworkConnection("www.w3.org", 80)
id = OpenNetworkConnection("rosettacode.org", 80)
SendNetworkString(id, "GET /Home.html HTTP/1.1" + Chr(10) + "Host: www.w3.org" + Chr(10) + Chr(10))
SendNetworkString(id, "GET /wiki/Main_Page HTTP/1.1" + Chr(10) + "Host: rosettacode.org" + Chr(10) + Chr(10))
Repeat
Repeat
If NetworkClientEvent(id) = 2
If NetworkClientEvent(id) = 2
a$ = Space(1000)
a$ = Space(1000)
ReceiveNetworkData(id, @a$, 1000)
ReceiveNetworkData(id, @a$, 1000)
out$ + a$
out$ + a$
EndIf
EndIf
Until FindString(out$, "</html>", 0)
Until FindString(out$, "</html>", 0)
PrintN(out$)
PrintN(out$)
; next line only to prevent console from closing on Windows
Input()</lang>
Input()
</lang>

Of course you could use wget too.


=={{header|Python}}==
=={{header|Python}}==

<lang python>import http
;Python 3:
connection = http.client.HTTPConnection("www.w3.org")
Using the [http://docs.python.org/py3k/library/urllib.request.html urllib.request] module.
connection.request("GET", "/")
<lang python>
print(connection.getresponse().read())</lang>
import urllib.request
print(urllib.request.urlopen("http://rosettacode.org").read())
</lang>

Using a more low-level [https://docs.python.org/3/library/http.client.html http.client] library.
<lang python>
from http.client import HTTPConnection
conn = HTTPConnection("example.com")
# If you need to use set_tunnel, do so here.
conn.request("GET", "/")
# Alternatively, you can use connect(), followed by the putrequest, putheader and endheaders functions.
result = conn.getresponse()
r1 = result.read() # This retrieves the entire contents.
</lang>

;Python 2:
Using the [http://docs.python.org/library/urllib.html urllib] library.
<lang python>
import urllib
print urllib.urlopen("http://rosettacode.org").read()
</lang>

Using the [http://docs.python.org/library/urllib2.html urllib2] library.
<lang python>
import urllib2
print urllib2.urlopen("http://rosettacode.org").read()
</lang>


{{libheader|Requests}}
{{works with|Python|2.7, 3.4–3.7}}
<lang Python>
import requests
print(requests.get("http://rosettacode.org").text)
</lang>


=={{header|R}}==
=={{header|R}}==
{{libheader|RCurl}}
{{libheader|RCurl}}
{{libheader|XML}}
{{libheader|XML}}

First, retrieve the webpage.
First, retrieve the webpage.

<lang R>library(RCurl)
<lang R>
webpage <- getURL("http://www.w3.org/Home.html")
library(RCurl)
webpage <- getURL("http://www.w3.org/Home.html", .opts=list(followlocation=TRUE))
webpage <- getURL("http://rosettacode.org")
webpage <- getURL("http://www.w3.org/Home.html", .opts=list(proxy="123.123.123.123", proxyusername="domain\\username", proxypassword="mypassword", proxyport=8080))</lang>

#If you are linking to a page that no longer exists and need to follow the redirect, use followlocation=TRUE
webpage <- getURL("http://www.rosettacode.org", .opts=list(followlocation=TRUE))

#If you are behind a proxy server, you will need to use something like:
webpage <- getURL("http://rosettacode.org",
.opts=list(proxy="123.123.123.123", proxyusername="domain\\username", proxypassword="mypassword", proxyport=8080))
#Don't forget that backslashes in your username or password need to be escaped!
</lang>

Now parse the html code into a tree and print the html.
Now parse the html code into a tree and print the html.

<lang R>library(XML)
<lang R>
pagetree <- htmlTreeParse(webpage)
library(XML)
pagetree$children$html</lang>
pagetree <- htmlTreeParse(webpage )
pagetree$children$html
</lang>


=={{header|Racket}}==
=={{header|Racket}}==
<lang Racket>#lang racket
<lang Racket>
#lang racket
(require net/url)
(require net/url)
(copy-port (get-pure-port (string->url "http://www.rosettacode.org")
(
#:redirections 100)
copy-port (
get-pure-port (
(current-output-port))
</lang>
string->url "http://www.w3.org/Home.html"
)
#:redirections 100
)
(current-output-port)
)</lang>


=={{header|Raku}}==
=={{header|Raku}}==
Line 1,634: Line 2,301:
{{libheader|LWP}}
{{libheader|LWP}}
Using LWP::Simple from [https://modules.raku.org/search/?q=LWP%3A%3ASimple the Raku ecosystem].
Using LWP::Simple from [https://modules.raku.org/search/?q=LWP%3A%3ASimple the Raku ecosystem].

<lang perl6>use v6;
<lang perl6>use v6;

use LWP::Simple;
use LWP::Simple;

print LWP::Simple.get("http://www.w3.org/Home.html");</lang>
print LWP::Simple.get("http://www.rosettacode.org");
</lang>

or, without LWP::Simple:
or, without LWP::Simple:

<lang perl6>use v6;
<lang perl6>use v6;

my $socket = IO::Socket::INET.new(host => "www.w3.org", port => 80,);
my $socket = IO::Socket::INET.new(host => "www.rosettacode.org",
$socket.print("GET /Home.html HTTP/1.0\r\n\r\n");
port => 80,);
$socket.print("GET / HTTP/1.0\r\n\r\n");
print $socket.recv();
print $socket.recv();
$socket.close;</lang>
$socket.close;
</lang>


=={{header|REALbasic}}==
=={{header|REALbasic}}==
REALBasic provides an HTTPSocket class for handling HTTP connections. The 'Get' method of the HTTPSocket is overloaded and can download data to a file or return data as a string, in both cases a timeout argument can be passed.
REALBasic provides an HTTPSocket class for handling HTTP connections. The 'Get' method of the HTTPSocket is overloaded and can download data to a file or return data as a string, in both cases a timeout argument can be passed.
<lang REALbasic>Dim sock As New HTTPSocket
<lang REALbasic>
Dim sock As New HTTPSocket
Print(sock.Get("http://www.w3.org/Home.html", 10))</lang>
Print(sock.Get("http://www.rosettacode.org", 10)) //set the timeout period to 10 seconds.
</lang>


=={{header|REBOL}}==
=={{header|REBOL}}==
<lang REBOL>print read http://www.w3.org/Home.html</lang>
<lang REBOL>
print read http://rosettacode.org
</lang>


=={{header|REXX}}==
=={{header|REXX}}==
Line 1,658: Line 2,338:
<lang Rexx>/* ft=rexx */
<lang Rexx>/* ft=rexx */
/* GET2.RX - Display contents of an URL on the terminal. */
/* GET2.RX - Display contents of an URL on the terminal. */
/* Usage: rexx get.rx http://www.w3.org/Home.html */
/* Usage: rexx get.rx http://rosettacode.org */
parse arg url .
parse arg url .
'curl' url</lang>
'curl' url</lang>
Line 1,666: Line 2,346:
<lang Rexx>/* ft=rexx */
<lang Rexx>/* ft=rexx */
/* GET2.RX - Display contents of an URL on the terminal. */
/* GET2.RX - Display contents of an URL on the terminal. */
/* Usage: rexx get2.rx http://www.w3.org/Home.html */
/* Usage: rexx get2.rx http://rosettacode.org */
parse arg url .
parse arg url .
address system 'curl' url with output stem stuff.
address system 'curl' url with output stem stuff.
Line 1,677: Line 2,357:
<lang Rexx>/* ft=rexx */
<lang Rexx>/* ft=rexx */
/* GET3.RX - Display contents of an URL on the terminal. */
/* GET3.RX - Display contents of an URL on the terminal. */
/* Usage: rexx get3.rx http://www.w3.org/Home.html */
/* Usage: rexx get3.rx http://rosettacode.org */
parse arg url .
parse arg url .
address system 'curl' url with output fifo ''
address system 'curl' url with output fifo ''
Line 1,683: Line 2,363:


=={{header|Ring}}==
=={{header|Ring}}==
<lang ring>
<lang ring>See download("http://www.w3.org/Home.html")</lang>
See download("http://rosettacode.org")
</lang>


=={{header|RLaB}}==
=={{header|RLaB}}==
Line 1,725: Line 2,407:
=={{header|Ruby}}==
=={{header|Ruby}}==
The simple way loads the entire content into memory, then prints it.
The simple way loads the entire content into memory, then prints it.

<lang ruby>require 'open-uri'
<lang ruby>
print open("http://www.w3.org/Home.html") {
require 'open-uri'
|f| f.read

}</lang>
print open("http://rosettacode.org") {|f| f.read}
</lang>

If the content might be large, the better way uses FileUtils.copy_stream.
If the content might be large, the better way uses FileUtils.copy_stream.

<lang ruby>require 'fileutils'
<lang ruby>
require 'fileutils'
require 'open-uri'
require 'open-uri'

open("http://www.w3.org/Home.html") {
|f| FileUtils.copy_stream(f, $stdout)
open("http://rosettacode.org/") {|f| FileUtils.copy_stream(f, $stdout)}
}</lang>
</lang>


=={{header|Run BASIC}}==
=={{header|Run BASIC}}==
<lang runbasic>print httpget$("http://www.w3.org/Home.html")</lang>
<lang runbasic>print httpget$("http://rosettacode.org/wiki/Main_Page")</lang>


=={{header|Rust}}==
=={{header|Rust}}==
Cargo.toml
Cargo.toml
<lang toml>[dependencies]
<lang toml>
[dependencies]
hyper = "0.6"</lang>
hyper = "0.6"
</lang>
src/main.rs
src/main.rs
<lang rust>//cargo-deps: hyper="0.6"
<lang rust>
//cargo-deps: hyper="0.6"
// The above line can be used with cargo-script which makes cargo's dependency handling more convenient for small programs
// The above line can be used with cargo-script which makes cargo's dependency handling more convenient for small programs
extern crate hyper;
extern crate hyper;

use std::io::Read;
use std::io::Read;
use hyper::client::Client;
use hyper::client::Client;

fn main() {
fn main() {
let client = Client::new();
let client = Client::new();
let mut resp = client.get("http://www.w3.org/Home.html").send().unwrap();
let mut resp = client.get("http://rosettacode.org").send().unwrap();
let mut body = String::new();
let mut body = String::new();
resp.read_to_string(&mut body).unwrap();
resp.read_to_string(&mut body).unwrap();
println!("{}", body);
println!("{}", body);
}
}</lang>
</lang>


=={{header|Scala}}==
=={{header|Scala}}==
{{libheader|Scala}}
{{libheader|Scala}}
<lang scala>import scala.io.Source
<lang scala>import scala.io.Source

object HttpTest extends App {
object HttpTest extends App {
System.setProperty("http.agent", "*")
System.setProperty("http.agent", "*")

Source.fromURL("http://www.w3.org/Home.html").getLines.foreach(println)
Source.fromURL("http://www.rosettacode.org").getLines.foreach(println)
}</lang>
}</lang>


=={{header|Scheme}}==
=={{header|Scheme}}==
{{works with|Guile}}
{{works with|Guile}}

<lang scheme>(use-modules (ice-9 regex))
<lang scheme>
(define url "http://www.w3.org/Home.html")
; Use the regular expression module to parse the url (included with Guile)
(use-modules (ice-9 regex))

; Set the url and parse the hostname, port, and path into variables
(define url "http://www.rosettacode.org/wiki/HTTP")
(define r (make-regexp "^(http://)?([^:/]+)(:)?(([0-9])+)?(/.*)?" regexp/icase))
(define r (make-regexp "^(http://)?([^:/]+)(:)?(([0-9])+)?(/.*)?" regexp/icase))
(define host (match:substring (regexp-exec r url) 2))
(define host (match:substring (regexp-exec r url) 2))
(define port (match:substring (regexp-exec r url) 4))
(define port (match:substring (regexp-exec r url) 4))
(define path (match:substring (regexp-exec r url) 6))
(define path (match:substring (regexp-exec r url) 6))

; Set port to 80 if it wasn't set above and convert from a string to a number
(if (eq? port #f) (define port "80"))
(if (eq? port #f) (define port "80"))
(define port (string->number port))
(define port (string->number port))

(
; Connect to remote host on specified port
let ((s (socket PF_INET SOCK_STREAM 0)))
(let ((s (socket PF_INET SOCK_STREAM 0)))
(connect s AF_INET (car (hostent:addr-list (gethostbyname host))) port)
(connect s AF_INET (car (hostent:addr-list (gethostbyname host))) port)
(display "GET " s)

(display path s)
; Send a HTTP request for the specified path
(display " HTTP/1.0\r\n\r\n" s)
(display "GET " s)
(
do ((c (read-char s) (read-char s)))
(display path s)
((eof-object? c))
(display " HTTP/1.0\r\n\r\n" s)

(display c)
; Display the received HTML
)
(do ((c (read-char s) (read-char s))) ((eof-object? c))
)</lang>
(display c)))
</lang>
{{works with|Chicken Scheme}}
{{works with|Chicken Scheme}}
Using the [http://api.call-cc.org/doc/http-client http-client] library, this is trivial.
Using the [http://api.call-cc.org/doc/http-client http-client] library, this is trivial.
<lang scheme>(use http-client)
<lang scheme>
(use http-client)
(print (with-input-from-request "http://www.w3.org/Home.html" #f read-string))</lang>
(print
(with-input-from-request "http://google.com/"
#f read-string))
</lang>


=={{header|Seed7}}==
=={{header|Seed7}}==
Line 1,796: Line 2,504:
contains the function [http://seed7.sourceforge.net/libraries/gethttp.htm#getHttp%28in_string%29 getHttp],
contains the function [http://seed7.sourceforge.net/libraries/gethttp.htm#getHttp%28in_string%29 getHttp],
which gets data specified by an URL using the HTTP protocol.
which gets data specified by an URL using the HTTP protocol.

<lang seed7>$ include "seed7_05.s7i";
<lang seed7>
include "gethttp.s7i";
$ include "seed7_05.s7i";
const proc: main is func begin
include "gethttp.s7i";
writeln(getHttp("www.w3.org/Home.html"));

end func;</lang>
const proc: main is func
begin
writeln(getHttp("www.rosettacode.org"));
end func;</lang>


=={{header|SenseTalk}}==
=={{header|SenseTalk}}==
<lang sensetalk>put url "http://www.w3.org/Home.html"</lang>
<lang sensetalk>put url "http://www.rosettacode.org"</lang>


=={{header|Sidef}}==
=={{header|Sidef}}==
Line 1,809: Line 2,521:
<lang ruby>func get(url) {
<lang ruby>func get(url) {
var lwp = (
var lwp = (
try {
try { require('LWP::UserAgent') }
require('LWP::UserAgent')
catch { warn "'LWP::UserAgent' is not installed!"; return nil }
} catch {
warn "'LWP::UserAgent' is not installed!"; return nil
}
)
)
var ua = lwp.new(agent => 'Mozilla/5.0')
var ua = lwp.new(agent => 'Mozilla/5.0')
Line 1,821: Line 2,530:
return nil
return nil
}
}

print get("http://www.w3.org/Home.html")</lang>
print get("http://rosettacode.org")</lang>


=={{header|Smalltalk}}==
=={{header|Smalltalk}}==
{{works with|Pharo}}
{{works with|Pharo}}
<lang smalltalk>
<lang smalltalk>Transcript show: 'http://www.w3.org/Home.html' asUrl retrieveContents contentStream.</lang>
Transcript show: 'http://rosettacode.org' asUrl retrieveContents contentStream.
</lang>


=={{header|SNOBOL4}}==
=={{header|SNOBOL4}}==
{{works with|Macro SNOBOL4 in C}}
{{works with|Macro SNOBOL4 in C}}
<lang snobol>-include "tcp.sno"
<lang snobol>-include "tcp.sno"
tcp.open(.conn, 'www.w3.org', 'http') :s(cont1)
tcp.open(.conn,'www.rosettacode.org','http') :s(cont1)
terminal = "cannot open" :(end)
terminal = "cannot open" :(end)
cont1 conn = "GET /Home.html HTTP/1.0" char(10) char(10)
cont1 conn = "GET http://rosettacode.org/wiki/Main_Page HTTP/1.0" char(10) char(10)
while output = conn :s(while)
while output = conn :s(while)
tcp.close(.conn)
tcp.close(.conn)
end</lang>
end
</lang>


=={{header|Swift}}==
=={{header|Swift}}==
<lang Swift>import Foundation
<lang Swift>import Foundation

let request = NSURLRequest(URL: NSURL(string: "http://www.w3.org/Home.html")!)
let request = NSURLRequest(URL: NSURL(string: "http://rosettacode.org/")!)
NSURLConnection.sendAsynchronousRequest(request, queue: NSOperationQueue()) {

res, data, err in
// Using trailing closure
NSURLConnection.sendAsynchronousRequest(request, queue: NSOperationQueue()) {res, data, err in
// data is binary
if (data != nil) {
if (data != nil) {
let string = NSString(data: data!, encoding: NSUTF8StringEncoding)
let string = NSString(data: data!, encoding: NSUTF8StringEncoding)
Line 1,847: Line 2,564:
}
}
}
}

CFRunLoopRun()</lang>
CFRunLoopRun() // dispatch</lang>


=={{header|Tcl}}==
=={{header|Tcl}}==
Note that the <code>http</code> package is distributed as part of Tcl.
Note that the <code>http</code> package is distributed as part of Tcl.

<lang tcl>package require http
<lang tcl>
set request [http::geturl "http://www.w3.org/Home.html"]
package require http
set request [http::geturl "http://www.rosettacode.org"]
puts [http::data $request]
puts [http::data $request]
http::cleanup $request</lang>
http::cleanup $request</lang>


=={{header|TSE SAL}}==
=={{header|TSE SAL}}==
<lang TSE SAL>DLL "<urlmon.dll>"
<lang TSE SAL>

INTEGER PROC FNUrlGetSourceApiI(
DLL "<urlmon.dll>"
INTEGER lpunknown,
INTEGER PROC FNUrlGetSourceApiI(
STRING urlS : CSTRVAL,
INTEGER lpunknown,
STRING filenameS : CSTRVAL,
STRING urlS : CSTRVAL,
INTEGER dword,
STRING filenameS : CSTRVAL,
INTEGER tlpbindstatuscallback
INTEGER dword,
) : "URLDownloadToFileA"
INTEGER tlpbindstatuscallback
) : "URLDownloadToFileA"
END
END

// library: url: get: source <description></description> <version control></version control> <version>1.0.0.0.3</version> (filenamemacro=geturgso.s) [kn, ri, su, 13-04-2008 05:12:53]
// library: url: get: source <description></description> <version control></version control> <version>1.0.0.0.3</version> (filenamemacro=geturgso.s) [kn, ri, su, 13-04-2008 05:12:53]
PROC PROCUrlGetSource( STRING urlS, STRING filenameS )
PROC PROCUrlGetSource( STRING urlS, STRING filenameS )
FNUrlGetSourceApiI( 0, urlS, filenameS, 0, 0 )
FNUrlGetSourceApiI( 0, urlS, filenameS, 0, 0 )
END
END

PROC Main()
PROC Main()
STRING s1[255] = "http://www.google.com/index.html"
STRING s1[255] = "http://www.google.com/index.html"
STRING s2[255] = "c:\temp\ddd.txt"
STRING s2[255] = "c:\temp\ddd.txt"
IF ( NOT ( Ask( "url: get: source: urlS = ", s1, _EDIT_HISTORY_ ) ) AND ( Length( s1 ) > 0 ) )
IF ( NOT ( Ask( "url: get: source: urlS = ", s1, _EDIT_HISTORY_ ) ) AND ( Length( s1 ) > 0 ) ) RETURN() ENDIF
IF ( NOT ( AskFilename( "url: get: source: filenameS = ", s2, _DEFAULT_, _EDIT_HISTORY_ ) ) AND ( Length( s2 ) > 0 ) ) RETURN() ENDIF
RETURN()
ENDIF
IF ( NOT ( AskFilename( "url: get: source: filenameS = ", s2, _DEFAULT_, _EDIT_HISTORY_ ) ) AND ( Length( s2 ) > 0 ) )
RETURN()
ENDIF
PROCUrlGetSource( s1, s2 )
PROCUrlGetSource( s1, s2 )
EditFile( s2 )
EditFile( s2 )
END</lang>
END

</lang>


=={{header|TUSCRIPT}}==
=={{header|TUSCRIPT}}==
<lang tuscript>$$ MODE TUSCRIPT
<lang tuscript>
$$ MODE TUSCRIPT
SET DATEN = REQUEST ("http://www.w3.org/Home.html")
SET DATEN = REQUEST ("http://www.rosettacode.org")
*{daten}</lang>
*{daten}
</lang>


=={{header|UNIX Shell}}==
=={{header|UNIX Shell}}==
<lang bash>curl -s -L http://www.w3.org/Home.html</lang>
<lang bash>curl -s -L http://rosettacode.org/</lang>


<lang bash>lynx -source http://www.w3.org/Home.html</lang>
<lang bash>lynx -source http://rosettacode.org/</lang>


<lang bash>wget -O - -q http://www.w3.org/Home.html</lang>
<lang bash>wget -O - -q http://rosettacode.org/</lang>


<lang bash>lftp -c "cat http://www.w3.org/Home.html"</lang>
<lang bash>lftp -c "cat http://rosettacode.org/"</lang>


{{works with|BSD}}
{{works with|BSD}}
<lang bash>ftp -o - http://www.w3.org/Home.html 2>/dev/null</lang>
<lang bash>ftp -o - http://rosettacode.org 2>/dev/null</lang>


=={{header|VBScript}}==
=={{header|VBScript}}==
Line 1,904: Line 2,628:


Based on code at [http://itknowledgeexchange.techtarget.com/vbscript-systems-administrator/how-to-retrieve-html-web-pages-with-vbscript-via-the-microsoftxmlhttp-object/ How to retrieve HTML web pages with VBScript via the Microsoft.XmlHttp object]
Based on code at [http://itknowledgeexchange.techtarget.com/vbscript-systems-administrator/how-to-retrieve-html-web-pages-with-vbscript-via-the-microsoftxmlhttp-object/ How to retrieve HTML web pages with VBScript via the Microsoft.XmlHttp object]
<lang vb>Option Explicit
<lang vb>
Option Explicit
Const sURL="http://www.w3.org/Home.html"

Const sURL="http://rosettacode.org/"

Dim oHTTP
Dim oHTTP
Set oHTTP = CreateObject("Microsoft.XmlHTTP")
Set oHTTP = CreateObject("Microsoft.XmlHTTP")

On Error Resume Next
On Error Resume Next
oHTTP.Open "GET", sURL, False
oHTTP.Open "GET", sURL, False
Line 1,916: Line 2,644:
Wscript.Echo "error " & Err.Number & ": " & Err.Description
Wscript.Echo "error " & Err.Number & ": " & Err.Description
End If
End If

Set oHTTP = Nothing</lang>
Set oHTTP = Nothing
</lang>


=={{header|Visual Basic}}==
=={{header|Visual Basic}}==
Line 1,942: Line 2,672:


=={{header|Visual Basic .NET}}==
=={{header|Visual Basic .NET}}==
<lang vbnet>Imports System.Net
<lang vbnet>
Imports System.Net

Dim client As WebClient = New WebClient()
Dim client As WebClient = New WebClient()
Dim content As String = client.DownloadString("http://www.w3.org/Home.html")
Dim content As String = client.DownloadString("http://www.google.com")
Console.WriteLine(content)</lang>
Console.WriteLine(content)
</lang>


=={{header|zkl}}==
=={{header|zkl}}==
File htmlGet.zkl. This uses HTTP/1.0 Protocol to avoid chunked data. Or use cURL (see https example).
File htmlGet.zkl. This uses HTTP/1.0 Protocol to avoid chunked data. Or use cURL (see https example).
<lang zkl>url := ask(0,"URL: ");
<lang zkl>url := ask(0,"URL: ");

host := url;
host := url;
dir := "/";
dir := "/";
port := 80;
port := 80;
if (n := url.find("/")) {
if (n := url.find("/")) { dir = url[n,*]; host = url[0,n]; }
if (n := host.find(":")) { port = host[n+1,*]; host = host[0,n]; }
dir = url[n,*];

host = url[0,n];
}
if (n := host.find(":")) {
port = host[n+1,*];
host = host[0,n];
}
get := "GET %s HTTP/1.0\r\nHost: %s:%s\r\n\r\n".fmt(dir,host,port.toInt());
get := "GET %s HTTP/1.0\r\nHost: %s:%s\r\n\r\n".fmt(dir,host,port.toInt());
println("-->",get);
println("-->",get);
Line 1,967: Line 2,696:
data := server.read(True);
data := server.read(True);
println(data.text);</lang>
println(data.text);</lang>
zkl htmlGet.zkl rosettacode.org/wiki/HTTP
{{out}}
<pre>
-->GET /wiki/HTTP HTTP/1.0
Host: rosettacode.org:80


HTTP/1.1 200 OK
Server: cloudflare-nginx
Date: Tue, 11 Mar 2014 08:31:43 GMT
Content-Type: text/html; charset=UTF-8
Connection: close
Set-Cookie:XXX
23:50:00 GMT; path=/; domain=.rosettacode.org; HttpOnly
X-Powered-By: PHP/5.3.3-7+squeeze18
X-Content-Type-Options: nosniff
Content-Language: en
ETag: W/"rosettacode:pcache:idhash:3055-0!1!0!!en!2--20140227082903"
Vary: Accept-Encoding,Cookie
Cache-Control: s-maxage=86400, must-revalidate, max-age=0
Last-Modified: Thu, 27 Feb 2014 08:29:03 GMT
Age: 86011
X-Cache: HIT from prgmr2.rosettacode.org
X-Cache-Lookup: HIT from prgmr2.rosettacode.org:80
Via: 1.0 prgmr2.rosettacode.org (squid/3.1.6)
CF-RAY: 109665b7e92a012c-SJC

<!DOCTYPE html>
<html lang="en" dir="ltr" class="client-nojs">
<head>
<title>HTTP - Rosetta Code</title>
...
</pre>


=={{header|Zoea}}==
=={{header|Zoea}}==
<lang Zoea>program: http
<lang Zoea>
program: http
input: 'http://www.w3.org/Home.html'
input: 'https://zoea.co.uk/examples/test.txt'
output: 'hello from zoea'</lang>
output: 'hello from zoea'
</lang>


=={{header|Zsh}}==
=={{header|Zsh}}==
<lang zsh>zmodload zsh/net/tcp
<lang zsh>
zmodload zsh/net/tcp
ztcp www.w3.org 80
ztcp example.com 80
fd=$REPLY
fd=$REPLY
print -l -u $fd -- 'GET /Home.html HTTP/1.1' 'Host: www.w3.org' ''
print -l -u $fd -- 'GET / HTTP/1.1' 'Host: example.com' ''
while read -u $fd -r -e -t 1; do; :; done
while read -u $fd -r -e -t 1; do; :; done
ztcp -c $fd</lang>
ztcp -c $fd
</lang>

{{omit from|Applesoft BASIC|No TCP/IP network support on Apple II}}
{{omit from|Brainf***}}
{{omit from|Commodore BASIC|Does not have network access}}
{{omit from|Inform 7|Does not have network access.}}
{{omit from|Integer BASIC|No TCP/IP network support on Apple II}}
{{omit from|Locomotive Basic|Does not have network access.}}
{{omit from|Lotus 123 Macro Scripting}}
{{omit from|M4}}
{{omit from|Maxima}}
{{omit from|ML/I}}
{{omit from|Openscad}}
{{omit from|PARI/GP}}
{{omit from|PostScript}}
{{omit from|Retro|Does not have network access.}}
{{omit from|SQL PL|Does not have network access}}
{{omit from|TI-83 BASIC|Does not have network access.}}
{{omit from|TI-89 BASIC|Does not have network access.}}
{{omit from|Unlambda|Does not have network access.}}
{{omit from|Yorick|Does not have network access.}}
{{omit from|ZX Spectrum Basic|Does not have network access.}}

Revision as of 12:38, 7 November 2020

Task
HTTP
You are encouraged to solve this task according to the task description, using any language you may know.
Task

Access and print a URL's content (the located resource) to the console.

There is a separate task for HTTPS Requests.

8th

<lang forth> "http://www.rosettacode.org" net:get drop >s . </lang>

ABAP

This works for ABAP Version 7.40 and above <lang ABAP> report z_http.

cl_http_client=>create_by_url(

 exporting
   url                = `http://rosettacode.org/robots.txt`
 importing
   client             = data(http_client)
 exceptions
   argument_not_found = 1
   plugin_not_active  = 2
   internal_error     = 3
   others             = 4 ).

if sy-subrc <> 0.

 data(error_message) = switch string( sy-subrc
   when 1 then `argument_not_found`
   when 2 then `plugin_not_active`
   when 3 then `internal_error`
   when 4 then `other error` ).
 write error_message.
 exit.

endif.

data(rest_http_client) = cast if_rest_client( new cl_rest_http_client( http_client ) ).

rest_http_client->get( ).

data(response_string) = rest_http_client->get_response_entity( )->get_string_data( ).

split response_string at cl_abap_char_utilities=>newline into table data(output_table).

loop at output_table assigning field-symbol(<output_line>).

 write / <output_line>.

endloop. </lang>

Output:
User-agent: *
Allow: /mw/images/
Allow: /mw/skins/
Allow: /mw/title.png
Allow: /mw/resources/
Disallow: /w/
Disallow: /mw/
Disallow: /wiki/Special:

ActionScript

<lang actionscript> package {

   import flash.display.Sprite;
   import flash.events.Event;
   import flash.net.*;
   public class RequestExample extends Sprite
   {
       public function RequestExample()
       {
           var loader:URLLoader = new URLLoader();
           loader.addEventListener(Event.COMPLETE, loadComplete);
           loader.load(new URLRequest("http://www.rosettacode.org"));
       }
       private function loadComplete(evt:Event):void
       {
           trace(evt.target.data);
       }
   }

} </lang>

Ada

Library: AWS

<lang ada> with Ada.Text_IO; use Ada.Text_IO;

with AWS.Client; with AWS.Response;

procedure HTTP_Request is begin

  Put_Line (AWS.Response.Message_Body (AWS.Client.Get (URL => "http://www.rosettacode.org")));

end HTTP_Request; </lang>

ALGOL 68

Works with: ALGOL 68 version Revision 1 - however grep in string, http content and str error are from a non-standard library
Works with: ALGOL 68G version Any - tested with release 1.18.0-9h.tiny


<lang algol68> STRING domain="rosettacode.org"; STRING page="wiki/Main_Page";

STRING re success="^HTTP/[0-9.]* 200"; STRING re result description="^HTTP/[0-9.]* [0-9]+ [a-zA-Z ]*"; STRING re doctype ="\s\s<!DOCTYPE html PUBLIC ""[^>]+"">\s+";

PROC html page = (REF STRING page) BOOL: (

    BOOL out=grep in string(re success, page, NIL, NIL) = 0;
    IF INT start, end;
       grep in string(re result description, page, start, end) = 0
    THEN
       page:=page[end+1:];
       IF grep in string(re doctype, page, start, end) = 0
       THEN page:=page[start+2:]
       ELSE print ("unknown format retrieving page")
       FI
    ELSE print ("unknown error retrieving page")
    FI;
    out

);

IF STRING reply;

  INT rc =
     http content (reply, domain, "http://"+domain+"/"+page, 0);
  rc = 0 AND html page (reply)

THEN print (reply) ELSE print (strerror (rc)) FI </lang>

Arturo

<lang arturo>print [download "http://google.com"]</lang>

Output:
<!doctype html><html itemscope="" itemtype="http://schema.org/WebPage" lang="es"><head><meta content="Google.es permite acceder a la información mundial en castellano, catalán, gallego, euskara e inglés." name="description"><meta content="noodp" name="robots"><meta content="text/html; charset=UTF-8" http-equiv="Content-Type"><meta content="/images/branding/googleg/1x/googleg_standard_color_128dp.png" itemprop="image"><title>Google</title><script nonce="mEe5oG98axwLddedgOh1JA==">(function(){window.google={kEI:'lp2lXbjlCJGKauK8o9AB',kEXPI:'0,18167,1335579,5663,730,224,510,18,228,819,1535,1617,378,206,1017,53,173,1163,798,10,50,211,452,319,19,96,161,89,193,122,766,81,176,221,1130704,1197793,230,302939,26305,1294,12383,4855,32692,15247,867,12163,16521,363,3320,5505,2436,5948,1119,2,579,727,2431,1362,4323,4967,774,2250,4744,3118,6196,1719,1808,1976,2044,8909,5071,226,897,1119,38,920,2090,2975,2736,49,2606,315,91,2,632,3240,4191,1571,2303,2883,19,319,235,884,904,101,2024,1,370,2778,917,261,731,509,777,7,2796,887,80,601,11,14,1279,2212,202,37,286,5,1252,327,513,324,193,1466,8,48,1

[output truncated]

AutoHotkey

<lang AutoHotkey> UrlDownloadToFile, http://rosettacode.org, url.html Run, cmd /k type url.html </lang>

AWK

Works with: gawk

<lang awk>BEGIN {

 site="en.wikipedia.org"
 path="/wiki/"
 name="Rosetta_Code"
 server = "/inet/tcp/0/" site "/80"
 print "GET " path name " HTTP/1.0" |& server
 print "Host: " site |& server
 print "\r\n\r\n" |& server
 while ( (server |& getline fish) > 0 ) {
   if ( ++scale == 1 )
     ship = fish
   else
     ship = ship "\n" fish
 }
 close(server)
 print ship

}</lang>

BaCon

<lang qbasic>' ' Read and display a website ' IF AMOUNT(ARGUMENT$) = 1 THEN

   website$ = "www.basic-converter.org"

ELSE

   website$ = TOKEN$(ARGUMENT$, 2)

ENDIF

OPEN website$ & ":80" FOR NETWORK AS mynet SEND "GET / HTTP/1.1\r\nHost: " & website$ & "\r\n\r\n" TO mynet REPEAT

   RECEIVE dat$ FROM mynet
   total$ = total$ & dat$

UNTIL ISFALSE(WAIT(mynet, 500)) CLOSE NETWORK mynet PRINT total$ </lang>

Batch File

<lang batch> curl.exe -s -L http://rosettacode.org/ </lang>

BBC BASIC

<lang bbcbasic> SYS "LoadLibrary", "URLMON.DLL" TO urlmon%

     SYS "GetProcAddress", urlmon%, "URLDownloadToFileA" TO URLDownloadToFile
     
     url$ = "http://www.bbcbasic.co.uk/aboutus.html"
     file$ = @tmp$ + "rosetta.tmp"
     SYS URLDownloadToFile, 0, url$, file$, 0, 0 TO fail%
     IF fail% ERROR 100, "File download failed"
     
     OSCLI "TYPE """ + file$ + """"</lang>

Biferno

simple one-liner using httpExt and quick print $ <lang Biferno>$httpExt.ExecRemote("www.tabasoft.it")</lang>

C

Library: libcurl

<lang c>

  1. include <stdio.h>
  2. include <stdlib.h>
  3. include <curl/curl.h>

int main(void) {

       CURL *curl;
       char buffer[CURL_ERROR_SIZE];
       if ((curl = curl_easy_init()) != NULL) {
               curl_easy_setopt(curl, CURLOPT_URL, "http://www.rosettacode.org/");
               curl_easy_setopt(curl, CURLOPT_FOLLOWLOCATION, 1);
               curl_easy_setopt(curl, CURLOPT_ERRORBUFFER, buffer);
               if (curl_easy_perform(curl) != CURLE_OK) {
                       fprintf(stderr, "%s\n", buffer);
                       return EXIT_FAILURE;
               }
               curl_easy_cleanup(curl);
       }
       return EXIT_SUCCESS;

} </lang>

C#

<lang csharp> using System; using System.Text; using System.Net;

class Program {

   static void Main(string[] args)
   {
       WebClient wc = new WebClient();
       string content = wc.DownloadString("http://www.google.com");
       Console.WriteLine(content);
   }

} </lang>

C++

<lang cpp>

  1. include <winsock2.h>
  2. include <ws2tcpip.h>
  3. include <iostream>

int main() { WSADATA wsaData; WSAStartup( MAKEWORD( 2, 2 ), &wsaData );

addrinfo *result = NULL; addrinfo hints;

ZeroMemory( &hints, sizeof( hints ) ); hints.ai_family = AF_UNSPEC; hints.ai_socktype = SOCK_STREAM; hints.ai_protocol = IPPROTO_TCP;

getaddrinfo( "74.125.45.100", "80", &hints, &result ); // http://www.google.com

SOCKET s = socket( result->ai_family, result->ai_socktype, result->ai_protocol );

connect( s, result->ai_addr, (int)result->ai_addrlen );

freeaddrinfo( result );

send( s, "GET / HTTP/1.0\n\n", 16, 0 );

char buffer[512]; int bytes;

do { bytes = recv( s, buffer, 512, 0 );

if ( bytes > 0 ) std::cout.write(buffer, bytes); } while ( bytes > 0 );

return 0; } </lang>

Library: U++

<lang cpp>

  1. include <Web/Web.h>

using namespace Upp;

CONSOLE_APP_MAIN { Cout() << HttpClient("www.rosettacode.org").ExecuteRedirect(); } </lang>

Caché ObjectScript

USER>Set HttpRequest=##class(%Net.HttpRequest).%New()
USER>Set HttpRequest.Server="checkip.dyndns.org"
USER>Do HttpRequest.Get("/")
USER>Do HttpRequest.HttpResponse.Data.OutputToDevice()

Clojure

Using the Java API: <lang clojure> (defn get-http [url]

 (let [sc (java.util.Scanner. 

(.openStream (java.net.URL. url)))]

   (while (.hasNext sc)
     (println (.nextLine sc)))))

(get-http "http://www.rosettacode.org") </lang>

Using clojure.contrib.http.agent: <lang clojure> (ns example

 (:use [clojure.contrib.http.agent :only (string http-agent)]))

(println (string (http-agent "http://www.rosettacode.org/"))) </lang>

Works with: Clojure version 1.2

<lang clojure> (print (slurp "http://www.rosettacode.org/")) </lang>

COBOL

Tested with GnuCOBOL

<lang cobol>COBOL >>SOURCE FORMAT IS FIXED

      identification division.
      program-id. curl-rosetta.
      environment division.
      configuration section.
      repository.
          function read-url
          function all intrinsic.
      data division.
      working-storage section.
      copy "gccurlsym.cpy".
      01 web-page             pic x(16777216).
      01 curl-status          usage binary-long.
      01 cli                  pic x(7) external.
         88 helping           values "-h", "-help", "help", spaces.
         88 displaying        value "display".            
         88 summarizing       value "summary". 
     *> ***************************************************************
      procedure division.
      accept cli from command-line
      if helping then
          display "./curl-rosetta [help|display|summary]"
          goback
      end-if
     *>
     *> Read a web resource into fixed ram.
     *>   Caller is in charge of sizing the buffer,
     *>     (or getting trickier with the write callback)
     *> Pass URL and working-storage variable,
     *>   get back libcURL error code or 0 for success
      move read-url("http://www.rosettacode.org", web-page)
        to curl-status
      perform check
      perform show
      goback.
     *> ***************************************************************
     *> Now tesing the result, relying on the gccurlsym
     *>   GnuCOBOL Curl Symbol copy book
      check.
      if curl-status not equal zero then
          display
              curl-status " "
              CURLEMSG(curl-status) upon syserr
      end-if
      .
     *> And display the page
      show.
      if summarizing then
          display "Length: " stored-char-length(web-page)
      end-if
      if displaying then
          display trim(web-page trailing) with no advancing
      end-if
      .
      REPLACE ALSO ==:EXCEPTION-HANDLERS:== BY
      ==
     *> informational warnings and abends
      soft-exception.
        display space upon syserr
        display "--Exception Report-- " upon syserr
        display "Time of exception:   " current-date upon syserr
        display "Module:              " module-id upon syserr
        display "Module-path:         " module-path upon syserr
        display "Module-source:       " module-source upon syserr
        display "Exception-file:      " exception-file upon syserr
        display "Exception-status:    " exception-status upon syserr
        display "Exception-location:  " exception-location upon syserr
        display "Exception-statement: " exception-statement upon syserr
      .
      hard-exception.
          perform soft-exception
          stop run returning 127 
      .
      ==.
      end program curl-rosetta.
     *> ***************************************************************
     *> ***************************************************************
     *>
     *> The function hiding all the curl details
     *>
     *> Purpose:   Call libcURL and read into memory
     *> ***************************************************************
      identification division.
      function-id. read-url.
      environment division.
      configuration section.
      repository.
          function all intrinsic.
      data division.
      working-storage section.
      copy "gccurlsym.cpy".
      replace also ==:CALL-EXCEPTION:== by
      ==
          on exception
              perform hard-exception
      ==.
      01 curl-handle          usage pointer.
      01 callback-handle      usage procedure-pointer.
      01 memory-block.
         05 memory-address    usage pointer sync.
         05 memory-size       usage binary-long sync.
         05 running-total     usage binary-long sync.
      01 curl-result          usage binary-long.
      01 cli                  pic x(7) external.
         88 helping           values "-h", "-help", "help", spaces.
         88 displaying        value "display".            
         88 summarizing       value "summary". 
      linkage section.
      01 url                  pic x any length.
      01 buffer               pic x any length.
      01 curl-status          usage binary-long.
     *> ***************************************************************
      procedure division using url buffer returning curl-status.
      if displaying or summarizing then 
          display "Read: " url upon syserr
      end-if
     *> initialize libcurl, hint at missing library if need be
      call "curl_global_init" using by value CURL_GLOBAL_ALL
          on exception
              display
                  "need libcurl, link with -lcurl" upon syserr
              stop run returning 1
      end-call
     *> initialize handle
      call "curl_easy_init" returning curl-handle
          :CALL-EXCEPTION:
      end-call
      if curl-handle equal NULL then
          display "no curl handle" upon syserr
          stop run returning 1
      end-if
     *> Set the URL
      call "curl_easy_setopt" using
          by value curl-handle
          by value CURLOPT_URL
          by reference concatenate(trim(url trailing), x"00")
          :CALL-EXCEPTION:
      end-call
     *> follow all redirects
      call "curl_easy_setopt" using
          by value curl-handle
          by value CURLOPT_FOLLOWLOCATION
          by value 1
          :CALL-EXCEPTION:
      end-call
     *> set the call back to write to memory
      set callback-handle to address of entry "curl-write-callback"
      call "curl_easy_setopt" using
          by value curl-handle
          by value CURLOPT_WRITEFUNCTION
          by value callback-handle
          :CALL-EXCEPTION:
      end-call
     *> set the curl handle data handling structure
      set memory-address to address of buffer
      move length(buffer) to memory-size
      move 1 to running-total
      call "curl_easy_setopt" using
          by value curl-handle
          by value CURLOPT_WRITEDATA
          by value address of memory-block
          :CALL-EXCEPTION:
      end-call
     *> some servers demand an agent
      call "curl_easy_setopt" using
          by value curl-handle
          by value CURLOPT_USERAGENT
          by reference concatenate("libcurl-agent/1.0", x"00")
          :CALL-EXCEPTION:
      end-call
     *> let curl do all the hard work
      call "curl_easy_perform" using
          by value curl-handle
          returning curl-result
          :CALL-EXCEPTION:
      end-call
     *> the call back will handle filling ram, return the result code
      move curl-result to curl-status
     *> curl clean up, more important if testing cookies
      call "curl_easy_cleanup" using
          by value curl-handle
          returning omitted
          :CALL-EXCEPTION:
      end-call
      goback.
      :EXCEPTION-HANDLERS:
      end function read-url.
     *> ***************************************************************
     *> ***************************************************************
     *> Supporting libcurl callback
      identification division.
      program-id. curl-write-callback.
      environment division.
      configuration section.
      repository.
          function all intrinsic.
      data division.
      working-storage section.
      01 real-size            usage binary-long.
     *> libcURL will pass a pointer to this structure in the callback
      01 memory-block         based.
         05 memory-address    usage pointer sync.
         05 memory-size       usage binary-long sync.
         05 running-total     usage binary-long sync.
      01 content-buffer       pic x(65536) based.
      01 web-space            pic x(16777216) based.
      01 left-over            usage binary-long.
      linkage section.
      01 contents             usage pointer.
      01 element-size         usage binary-long.
      01 element-count        usage binary-long.
      01 memory-structure     usage pointer.
     *> ***************************************************************
      procedure division
          using
             by value contents
             by value element-size
             by value element-count
             by value memory-structure
         returning real-size.
      set address of memory-block to memory-structure
      compute real-size = element-size * element-count end-compute
     *> Fence off the end of buffer
      compute
          left-over = memory-size - running-total
      end-compute
      if left-over > 0 and < real-size then
          move left-over to real-size
      end-if
     *> if there is more buffer, and data not zero length
      if (left-over > 0) and (real-size > 1) then
          set address of content-buffer to contents
          set address of web-space to memory-address
          move content-buffer(1:real-size)
            to web-space(running-total:real-size)
          add real-size to running-total
      else
          display "curl buffer sizing problem" upon syserr
      end-if
      goback.
      end program curl-write-callback.</lang>

and a copybook

<lang cobol> *> manifest constants for libcurl

     *> Usage: COPY occurlsym  inside data division
     *>  Taken from include/curl/curl.h 2013-12-19
     *> Functional enums
      01 CURL_MAX_HTTP_HEADER CONSTANT AS     102400.
      78 CURL_GLOBAL_ALL                      VALUE 3.
      78 CURLOPT_FOLLOWLOCATION               VALUE 52.
      78 CURLOPT_WRITEDATA                    VALUE 10001.
      78 CURLOPT_URL                          VALUE 10002.
      78 CURLOPT_USERAGENT                    VALUE 10018.
      78 CURLOPT_WRITEFUNCTION                VALUE 20011.
      78 CURLOPT_COOKIEFILE                   VALUE 10031.
      78 CURLOPT_COOKIEJAR                    VALUE 10082.
      78 CURLOPT_COOKIELIST                   VALUE 10135.
     *> Informationals
      78 CURLINFO_COOKIELIST                  VALUE 4194332.
     *> Result codes
      78 CURLE_OK                             VALUE 0.
     *> Error codes
      78 CURLE_UNSUPPORTED_PROTOCOL           VALUE 1.
      78 CURLE_FAILED_INIT                    VALUE 2.
      78 CURLE_URL_MALFORMAT                  VALUE 3.
      78 CURLE_OBSOLETE4                      VALUE 4.
      78 CURLE_COULDNT_RESOLVE_PROXY          VALUE 5.
      78 CURLE_COULDNT_RESOLVE_HOST           VALUE 6.
      78 CURLE_COULDNT_CONNECT                VALUE 7.
      78 CURLE_FTP_WEIRD_SERVER_REPLY         VALUE 8.
      78 CURLE_REMOTE_ACCESS_DENIED           VALUE 9.
      78 CURLE_OBSOLETE10                     VALUE 10.
      78 CURLE_FTP_WEIRD_PASS_REPLY           VALUE 11.
      78 CURLE_OBSOLETE12                     VALUE 12.
      78 CURLE_FTP_WEIRD_PASV_REPLY           VALUE 13.
      78 CURLE_FTP_WEIRD_227_FORMAT           VALUE 14.
      78 CURLE_FTP_CANT_GET_HOST              VALUE 15.
      78 CURLE_OBSOLETE16                     VALUE 16.
      78 CURLE_FTP_COULDNT_SET_TYPE           VALUE 17.
      78 CURLE_PARTIAL_FILE                   VALUE 18.
      78 CURLE_FTP_COULDNT_RETR_FILE          VALUE 19.
      78 CURLE_OBSOLETE20                     VALUE 20.
      78 CURLE_QUOTE_ERROR                    VALUE 21.
      78 CURLE_HTTP_RETURNED_ERROR            VALUE 22.
      78 CURLE_WRITE_ERROR                    VALUE 23.
      78 CURLE_OBSOLETE24                     VALUE 24.
      78 CURLE_UPLOAD_FAILED                  VALUE 25.
      78 CURLE_READ_ERROR                     VALUE 26.
      78 CURLE_OUT_OF_MEMORY                  VALUE 27.
      78 CURLE_OPERATION_TIMEDOUT             VALUE 28.
      78 CURLE_OBSOLETE29                     VALUE 29.
      78 CURLE_FTP_PORT_FAILED                VALUE 30.
      78 CURLE_FTP_COULDNT_USE_REST           VALUE 31.
      78 CURLE_OBSOLETE32                     VALUE 32.
      78 CURLE_RANGE_ERROR                    VALUE 33.
      78 CURLE_HTTP_POST_ERROR                VALUE 34.
      78 CURLE_SSL_CONNECT_ERROR              VALUE 35.
      78 CURLE_BAD_DOWNLOAD_RESUME            VALUE 36.
      78 CURLE_FILE_COULDNT_READ_FILE         VALUE 37.
      78 CURLE_LDAP_CANNOT_BIND               VALUE 38.
      78 CURLE_LDAP_SEARCH_FAILED             VALUE 39.
      78 CURLE_OBSOLETE40                     VALUE 40.
      78 CURLE_FUNCTION_NOT_FOUND             VALUE 41.
      78 CURLE_ABORTED_BY_CALLBACK            VALUE 42.
      78 CURLE_BAD_FUNCTION_ARGUMENT          VALUE 43.
      78 CURLE_OBSOLETE44                     VALUE 44.
      78 CURLE_INTERFACE_FAILED               VALUE 45.
      78 CURLE_OBSOLETE46                     VALUE 46.
      78 CURLE_TOO_MANY_REDIRECTS             VALUE 47.
      78 CURLE_UNKNOWN_TELNET_OPTION          VALUE 48.
      78 CURLE_TELNET_OPTION_SYNTAX           VALUE 49.
      78 CURLE_OBSOLETE50                     VALUE 50.
      78 CURLE_PEER_FAILED_VERIFICATION       VALUE 51.
      78 CURLE_GOT_NOTHING                    VALUE 52.
      78 CURLE_SSL_ENGINE_NOTFOUND            VALUE 53.
      78 CURLE_SSL_ENGINE_SETFAILED           VALUE 54.
      78 CURLE_SEND_ERROR                     VALUE 55.
      78 CURLE_RECV_ERROR                     VALUE 56.
      78 CURLE_OBSOLETE57                     VALUE 57.
      78 CURLE_SSL_CERTPROBLEM                VALUE 58.
      78 CURLE_SSL_CIPHER                     VALUE 59.
      78 CURLE_SSL_CACERT                     VALUE 60.
      78 CURLE_BAD_CONTENT_ENCODING           VALUE 61.
      78 CURLE_LDAP_INVALID_URL               VALUE 62.
      78 CURLE_FILESIZE_EXCEEDED              VALUE 63.
      78 CURLE_USE_SSL_FAILED                 VALUE 64.
      78 CURLE_SEND_FAIL_REWIND               VALUE 65.
      78 CURLE_SSL_ENGINE_INITFAILED          VALUE 66.
      78 CURLE_LOGIN_DENIED                   VALUE 67.
      78 CURLE_TFTP_NOTFOUND                  VALUE 68.
      78 CURLE_TFTP_PERM                      VALUE 69.
      78 CURLE_REMOTE_DISK_FULL               VALUE 70.
      78 CURLE_TFTP_ILLEGAL                   VALUE 71.
      78 CURLE_TFTP_UNKNOWNID                 VALUE 72.
      78 CURLE_REMOTE_FILE_EXISTS             VALUE 73.
      78 CURLE_TFTP_NOSUCHUSER                VALUE 74.
      78 CURLE_CONV_FAILED                    VALUE 75.
      78 CURLE_CONV_REQD                      VALUE 76.
      78 CURLE_SSL_CACERT_BADFILE             VALUE 77.
      78 CURLE_REMOTE_FILE_NOT_FOUND          VALUE 78.
      78 CURLE_SSH                            VALUE 79.
      78 CURLE_SSL_SHUTDOWN_FAILED            VALUE 80.
      78 CURLE_AGAIN                          VALUE 81.
     *> Error strings
      01 LIBCURL_ERRORS.
         02 CURLEVALUES.
            03 FILLER PIC X(30) VALUE "CURLE_UNSUPPORTED_PROTOCOL    ".
            03 FILLER PIC X(30) VALUE "CURLE_FAILED_INIT             ".
            03 FILLER PIC X(30) VALUE "CURLE_URL_MALFORMAT           ".
            03 FILLER PIC X(30) VALUE "CURLE_OBSOLETE4               ".
            03 FILLER PIC X(30) VALUE "CURLE_COULDNT_RESOLVE_PROXY   ".
            03 FILLER PIC X(30) VALUE "CURLE_COULDNT_RESOLVE_HOST    ".
            03 FILLER PIC X(30) VALUE "CURLE_COULDNT_CONNECT         ".
            03 FILLER PIC X(30) VALUE "CURLE_FTP_WEIRD_SERVER_REPLY  ".
            03 FILLER PIC X(30) VALUE "CURLE_REMOTE_ACCESS_DENIED    ".
            03 FILLER PIC X(30) VALUE "CURLE_OBSOLETE10              ".
            03 FILLER PIC X(30) VALUE "CURLE_FTP_WEIRD_PASS_REPLY    ".
            03 FILLER PIC X(30) VALUE "CURLE_OBSOLETE12              ".
            03 FILLER PIC X(30) VALUE "CURLE_FTP_WEIRD_PASV_REPLY    ".
            03 FILLER PIC X(30) VALUE "CURLE_FTP_WEIRD_227_FORMAT    ".
            03 FILLER PIC X(30) VALUE "CURLE_FTP_CANT_GET_HOST       ".
            03 FILLER PIC X(30) VALUE "CURLE_OBSOLETE16              ".
            03 FILLER PIC X(30) VALUE "CURLE_FTP_COULDNT_SET_TYPE    ".
            03 FILLER PIC X(30) VALUE "CURLE_PARTIAL_FILE            ".
            03 FILLER PIC X(30) VALUE "CURLE_FTP_COULDNT_RETR_FILE   ".
            03 FILLER PIC X(30) VALUE "CURLE_OBSOLETE20              ".
            03 FILLER PIC X(30) VALUE "CURLE_QUOTE_ERROR             ".
            03 FILLER PIC X(30) VALUE "CURLE_HTTP_RETURNED_ERROR     ".
            03 FILLER PIC X(30) VALUE "CURLE_WRITE_ERROR             ".
            03 FILLER PIC X(30) VALUE "CURLE_OBSOLETE24              ".
            03 FILLER PIC X(30) VALUE "CURLE_UPLOAD_FAILED           ".
            03 FILLER PIC X(30) VALUE "CURLE_READ_ERROR              ".
            03 FILLER PIC X(30) VALUE "CURLE_OUT_OF_MEMORY           ".
            03 FILLER PIC X(30) VALUE "CURLE_OPERATION_TIMEDOUT      ".
            03 FILLER PIC X(30) VALUE "CURLE_OBSOLETE29              ".
            03 FILLER PIC X(30) VALUE "CURLE_FTP_PORT_FAILED         ".
            03 FILLER PIC X(30) VALUE "CURLE_FTP_COULDNT_USE_REST    ".
            03 FILLER PIC X(30) VALUE "CURLE_OBSOLETE32              ".
            03 FILLER PIC X(30) VALUE "CURLE_RANGE_ERROR             ".
            03 FILLER PIC X(30) VALUE "CURLE_HTTP_POST_ERROR         ".
            03 FILLER PIC X(30) VALUE "CURLE_SSL_CONNECT_ERROR       ".
            03 FILLER PIC X(30) VALUE "CURLE_BAD_DOWNLOAD_RESUME     ".
            03 FILLER PIC X(30) VALUE "CURLE_FILE_COULDNT_READ_FILE  ".
            03 FILLER PIC X(30) VALUE "CURLE_LDAP_CANNOT_BIND        ".
            03 FILLER PIC X(30) VALUE "CURLE_LDAP_SEARCH_FAILED      ".
            03 FILLER PIC X(30) VALUE "CURLE_OBSOLETE40              ".
            03 FILLER PIC X(30) VALUE "CURLE_FUNCTION_NOT_FOUND      ".
            03 FILLER PIC X(30) VALUE "CURLE_ABORTED_BY_CALLBACK     ".
            03 FILLER PIC X(30) VALUE "CURLE_BAD_FUNCTION_ARGUMENT   ".
            03 FILLER PIC X(30) VALUE "CURLE_OBSOLETE44              ".
            03 FILLER PIC X(30) VALUE "CURLE_INTERFACE_FAILED        ".
            03 FILLER PIC X(30) VALUE "CURLE_OBSOLETE46              ".
            03 FILLER PIC X(30) VALUE "CURLE_TOO_MANY_REDIRECTS      ".
            03 FILLER PIC X(30) VALUE "CURLE_UNKNOWN_TELNET_OPTION   ".
            03 FILLER PIC X(30) VALUE "CURLE_TELNET_OPTION_SYNTAX    ".
            03 FILLER PIC X(30) VALUE "CURLE_OBSOLETE50              ".
            03 FILLER PIC X(30) VALUE "CURLE_PEER_FAILED_VERIFICATION".
            03 FILLER PIC X(30) VALUE "CURLE_GOT_NOTHING             ".
            03 FILLER PIC X(30) VALUE "CURLE_SSL_ENGINE_NOTFOUND     ".
            03 FILLER PIC X(30) VALUE "CURLE_SSL_ENGINE_SETFAILED    ".
            03 FILLER PIC X(30) VALUE "CURLE_SEND_ERROR              ".
            03 FILLER PIC X(30) VALUE "CURLE_RECV_ERROR              ".
            03 FILLER PIC X(30) VALUE "CURLE_OBSOLETE57              ".
            03 FILLER PIC X(30) VALUE "CURLE_SSL_CERTPROBLEM         ".
            03 FILLER PIC X(30) VALUE "CURLE_SSL_CIPHER              ".
            03 FILLER PIC X(30) VALUE "CURLE_SSL_CACERT              ".
            03 FILLER PIC X(30) VALUE "CURLE_BAD_CONTENT_ENCODING    ".
            03 FILLER PIC X(30) VALUE "CURLE_LDAP_INVALID_URL        ".
            03 FILLER PIC X(30) VALUE "CURLE_FILESIZE_EXCEEDED       ".
            03 FILLER PIC X(30) VALUE "CURLE_USE_SSL_FAILED          ".
            03 FILLER PIC X(30) VALUE "CURLE_SEND_FAIL_REWIND        ".
            03 FILLER PIC X(30) VALUE "CURLE_SSL_ENGINE_INITFAILED   ".
            03 FILLER PIC X(30) VALUE "CURLE_LOGIN_DENIED            ".
            03 FILLER PIC X(30) VALUE "CURLE_TFTP_NOTFOUND           ".
            03 FILLER PIC X(30) VALUE "CURLE_TFTP_PERM               ".
            03 FILLER PIC X(30) VALUE "CURLE_REMOTE_DISK_FULL        ".
            03 FILLER PIC X(30) VALUE "CURLE_TFTP_ILLEGAL            ".
            03 FILLER PIC X(30) VALUE "CURLE_TFTP_UNKNOWNID          ".
            03 FILLER PIC X(30) VALUE "CURLE_REMOTE_FILE_EXISTS      ".
            03 FILLER PIC X(30) VALUE "CURLE_TFTP_NOSUCHUSER         ".
            03 FILLER PIC X(30) VALUE "CURLE_CONV_FAILED             ".
            03 FILLER PIC X(30) VALUE "CURLE_CONV_REQD               ".
            03 FILLER PIC X(30) VALUE "CURLE_SSL_CACERT_BADFILE      ".
            03 FILLER PIC X(30) VALUE "CURLE_REMOTE_FILE_NOT_FOUND   ".
            03 FILLER PIC X(30) VALUE "CURLE_SSH                     ".
            03 FILLER PIC X(30) VALUE "CURLE_SSL_SHUTDOWN_FAILED     ".
            03 FILLER PIC X(30) VALUE "CURLE_AGAIN                   ".
      01 FILLER REDEFINES LIBCURL_ERRORS.
         02 CURLEMSG OCCURS 81 TIMES PIC X(30).</lang>
Output:
prompt$ ./curl-rosetta summary
Read: http://www.rosettacode.org
Length: 000024043

prompt$ ./curl-rosetta display
Read: http://www.rosettacode.org
<!DOCTYPE html>
<html lang="en" dir="ltr" class="client-nojs">
<head>
...

ColdFusion

<lang coldfusion>

 <cfhttp url="http://www.rosettacode.org" result="result">
 <cfoutput>#result.FileContent#</cfoutput>

</lang>

Common Lisp

CLISP provides an extension function to read http sources. Other implementations may do this differently.

Works with: CLISP

<lang lisp> (defun wget-clisp (url)

   (ext:with-http-input (stream url)
       (loop for line = (read-line stream nil nil)
           while line
           do (format t "~a~%" line))))

</lang>

Library: DRAKMA

First grabbing the entire body as a string, and then by pulling from a stream (as in the CLISP example).

<lang lisp> (defun wget-drakma-string (url &optional (out *standard-output*))

 "Grab the body as a string, and write it to out."
 (write-string (drakma:http-request url) out))

(defun wget-drakma-stream (url &optional (out *standard-output*))

 "Grab the body as a stream, and write it to out."
 (loop with body = (drakma:http-request url :want-stream t)
       for line = (read-line body nil nil)
       while line do (write-line line out)
       finally (close body)))

</lang>

Crystal

<lang crystal> require "http/client"

HTTP::Client.get("http://google.com") </lang>

D

Library: phobos

<lang D> void main() {

 import std.stdio, std.net.curl;
 writeln(get("http://google.com"));

} </lang>

Library: tango

<lang D> import tango.io.Console; import tango.net.http.HttpGet;

void main() {

 Cout.stream.copy( (new HttpGet("http://google.com")).open );

} </lang>

Or more operating directly on the socket:

<lang D> import tango.io.Console; import tango.net.InternetAddress; import tango.net.device.Socket;

void main() {

 auto site = new Socket;
 site.connect (new InternetAddress("google.com",80)).write ("GET / HTTP/1.0\n\n");
 Cout.stream.copy (site);

} </lang>

Dart

Using the stand-alone VM: <lang d>import 'dart:io'; void main(){

 var url = 'http://rosettacode.org';
 var client = new HttpClient();
 client.getUrl(Uri.parse(url))
       .then((HttpClientRequest request)   => request.close())
       .then((HttpClientResponse response) => response.pipe(stdout));

}</lang>

Delphi

Simple example using the free Synapse TCP/IP library [1]

<lang Delphi> program HTTP;

{$APPTYPE CONSOLE}

{$DEFINE DEBUG}

uses

 Classes,
 httpsend; // Synapse httpsend class

var

 Response: TStrings;
 HTTPObj: THTTPSend;

begin

 HTTPObj := THTTPSend.Create;
 try
   { Stringlist object to capture HTML returned
     from URL }
   Response := TStringList.Create;
   try
     if HTTPObj.HTTPMethod('GET','http://www.mgis.uk.com') then
       begin
         { Load HTTP Document into Stringlist }
         Response.LoadFromStream(HTTPObj.Document);
         { Write the response to the console window }
         Writeln(Response.Text);
       end
       else
       Writeln('Error retrieving data');
   finally
     Response.Free;
   end;
 finally
   HTTPObj.Free;
 end;
 // Keep console window open
 Readln;

end. </lang>


Using Indy:

<lang Delphi> program ShowHTTP;

{$APPTYPE CONSOLE}

uses IdHttp;

var

 s: string;
 lHTTP: TIdHTTP;

begin

 lHTTP := TIdHTTP.Create(nil);
 try
   lHTTP.HandleRedirects := True;
   s := lHTTP.Get('http://www.rosettacode.org');
   Writeln(s);
 finally
   lHTTP.Free;
 end;

end. </lang>

Dragon

<lang dragon>select "http" select "std"

http("http://www.rosettacode.org", ::echo)

</lang>

E

<lang e> when (def t := <http://www.rosettacode.org> <- getText()) -> {

   println(t)

} </lang>

EchoLisp

file->string usage: the server must allow cross-domain access, or a browser add-on like cors-everywhere must be installed to bypass cross-domain checking. <lang scheme>

asynchronous call back definition

(define (success name text) (writeln 'Loaded name) (writeln text))

(file->string success "http://www.google.com") </lang>

Emacs Lisp

url.el can download HTTP. url-retrieve-synchronously returns a buffer containing headers and body. Caller kills the buffer when no longer required.

<lang Lisp>(with-current-buffer

   (url-retrieve-synchronously "http://www.rosettacode.org")
 (goto-char (point-min))
 (search-forward "\n\n" nil t)  ;; skip headers
 (prin1 (buffer-substring (point) (point-max)))
 (kill-buffer (current-buffer)))</lang>

Erlang

Synchronous

<lang erlang> -module(main). -export([main/1]).

main([Url|[]]) ->

  inets:start(),
  case http:request(Url) of
      {ok, {_V, _H, Body}} -> io:fwrite("~p~n",[Body]);
      {error, Res} -> io:fwrite("~p~n", [Res])
  end.

</lang>

Asynchronous

<lang erlang> -module(main). -export([main/1]).

main([Url|[]]) ->

  inets:start(),
  http:request(get, {Url, [] }, [], [{sync, false}]),
  receive
      {http, {_ReqId, Res}} -> io:fwrite("~p~n",[Res]);
      _Any -> io:fwrite("Error: ~p~n",[_Any])
      after 10000 -> io:fwrite("Timed out.~n",[])
  end.

</lang>

Using it <lang erlang> |escript ./req.erl http://www.rosettacode.org </lang>

F#

In F# we can just use the .NET library to do this so its the same as the C# example.

<lang fsharp> let wget (url : string) =

   use c = new System.Net.WebClient()
   c.DownloadString(url)

printfn "%s" (wget "http://www.rosettacode.org/") </lang>

However unlike C#, F# can use an asynchronous workflow to avoid blocking any threads while waiting for a response from the server. To asynchronously download three url's at once...

<lang fsharp> open System.Net open System.IO

let wgetAsync url =

   async { let request = WebRequest.Create (url:string)
           use! response = request.AsyncGetResponse()
           use responseStream = response.GetResponseStream()
           use reader = new StreamReader(responseStream)
           return reader.ReadToEnd() }

let urls = ["http://www.rosettacode.org/"; "http://www.yahoo.com/"; "http://www.google.com/"] let content = urls

             |> List.map wgetAsync
             |> Async.Parallel
             |> Async.RunSynchronously</lang>

Factor

<lang factor>USE: http.client "http://www.rosettacode.org" http-get nip print </lang>

Forth

Works with: GNU Forth version 0.7.0

This works at the socket level, returning both the HTTP headers and page contents. <lang forth> include unix/socket.fs

s" localhost" 80 open-socket dup s\" GET / HTTP/1.0\n\n" rot write-socket dup pad 8092 read-socket type close-socket </lang>

friendly interactive shell

Translation of: UNIX Shell

<lang fishshell>curl -s -L http://rosettacode.org/</lang>

<lang fishshell>lynx -source http://rosettacode.org/</lang>

<lang fishshell>wget -O - -q http://rosettacode.org/</lang>

<lang fishshell>lftp -c "cat http://rosettacode.org/"</lang>

Works with: BSD

<lang fishshell>ftp -o - http://rosettacode.org ^ /dev/null</lang>

Frink

Frink's read[URL] function works with any URL type supported by your Java Virtual Machine, and returns the results as a single string. <lang frink> print[read["http://frinklang.org/"]] </lang>

Gastona

<lang gastona>#listix#

  <main>
     LOOP, TEXT FILE, http://www.rosettacode.org
         , BODY, @<value>

</lang>

GML

Works with: Game Maker Studio

Any Event <lang gml>get = http_get("http://www.rosettacode.org/");</lang>

HTTP Event <lang gml>if (ds_map_find_value(async_load,"id") == get)

   {
   show_message_async(ds_map_find_value(async_load,"result"));
   }</lang>

Go

<lang go> package main

import (

   "io"
   "log"
   "net/http"
   "os"

)

func main() {

   r, err := http.Get("http://rosettacode.org/robots.txt")
   if err != nil {
       log.Fatalln(err)
   }
   io.Copy(os.Stdout, r.Body)

} </lang>

Output:

User-agent: *
Allow: /mw/images/
Allow: /mw/skins/
Allow: /mw/title.png
Disallow: /w/
Disallow: /mw/
Disallow: /wiki/Special:

Groovy

<lang groovy> new URL("http://www.rosettacode.org").eachLine { println it } </lang>

GUISS

It would be more appropriate to paste to notepad:

<lang guiss>Start,Programs,Applications,Mozilla Firefox,Inputbox:address bar>www.rosettacode.org,Button:Go, Click:Area:browser window,Type:[Control A],[Control C],Start,Programs,Accessories,Notepad, Menu:Edit,Paste</lang>

Halon

<lang halon>echo http("http://www.rosettacode.org");</lang>

Haskell

Using

Library: HTTP

from HackageDB

<lang haskell> import Network.Browser import Network.HTTP import Network.URI

main = do

   rsp <- Network.Browser.browse $ do
       setAllowRedirects True
       setOutHandler $ const (return ())
       request $ getRequest "http://www.rosettacode.org/"
   putStrLn $ rspBody $ snd rsp

</lang>

Icon and Unicon

Icon

<lang icon> link cfunc procedure main(arglist)

  get(arglist[1])

end

procedure get(url)

  local f, host, port, path
  url ? {
        ="http://" | ="HTTP://"
        host := tab(upto(':/') | 0)
        if not (=":" & (port := integer(tab(upto('/'))))) then port := 80
        if pos(0) then path := "/" else path := tab(0)
  }
  write(host)
  write(path)
  f := tconnect(host, port) | stop("Unable to connect")
  writes(f, "GET ", path | "/" ," HTTP/1.0\r\n\r\n")
  while write(read(f))

end </lang>

Using it <lang icon> |icon req.icn http://www.rosettacode.org </lang>

Unicon

Unicon provides improved socket and messaging support without the need for the external function cfunc: <lang unicon> procedure main(arglist) m := open(arglist[1],"m") while write(read(m)) end </lang>

J

Using gethttp from Web Scraping

<lang j>require'web/gethttp' gethttp 'http://www.rosettacode.org' </lang>

Java

<lang java5>import java.util.Scanner; import java.net.URL;

public class Main {

   public static void main(String[] args) throws Exception {         
       Scanner sc = new Scanner(new URL("http://www.rosettacode.org").openStream());
       while (sc.hasNext())
           System.out.println(sc.nextLine());         
   }

} </lang>

<lang java5> import org.apache.commons.io.IOUtils; import java.net.URL;

public class Main {

   public static void main(String[] args) throws Exception {
       IOUtils.copy(new URL("http://rosettacode.org").openStream(),System.out);    	    	    		    
   }

}</lang>

JavaScript

Browser

<lang JavaScript>var req = new XMLHttpRequest(); req.onload = function() {

 console.log(this.responseText);

};

req.open('get', 'http://rosettacode.org', true); req.send()</lang>

Using fetch API: <lang JavaScript> fetch('http://rosettacode.org').then(function(response) {

 return response.text();

}).then(function(myText) {

 console.log(myText);

}); </lang>

As a repeatable function:

<lang JavaScript>/**

* @name _http
* @description Generic API Client using XMLHttpRequest
* @param {string} url The URI/URL to connect to
* @param {string} method The HTTP method to invoke- GET, POST, etc
* @param {function} callback Once the HTTP request has completed, responseText is passed into this function for execution
* @param {object} params Query Parameters in a JavaScript Object (Optional)
* 
*/

function _http(url, method, callback, params) {

   var xhr,
       reqUrl;
   xhr = new XMLHttpRequest();
   xhr.onreadystatechange = function xhrProc() {
       if (xhr.readyState == 4 && xhr.status == 200) {
           callback(xhr.responseText);
       }
   };


   /** If Query Parameters are present, handle them... */
   if (typeof params === 'undefined') {
       reqUrl = url;
   } else {
       switch (method) {
           case 'GET':
               reqUrl = url + procQueryParams(params);
               break;
           case 'POST':
               reqUrl = url;
               break;
           default:
       }
   }


   /** Send the HTTP Request */
   if (reqUrl) {
       xhr.open(method, reqUrl, true);
       xhr.setRequestHeader("Accept", "application/json");
       if (method === 'POST') {
           xhr.send(params);
       } else {
           xhr.send();
       }
   }


   /**
    * @name procQueryParams
    * @description Return function that converts Query Parameters from a JavaScript Object to a proper URL encoded string
    * @param {object} params Query Parameters in a JavaScript Object
    * 
    */
   function procQueryParams(params) {
       return "?" + Object
           .keys(params)
           .map(function (key) {
               return key + "=" + encodeURIComponent(params[key])
           })
           .join("&")
   }

}</lang>

Using jQuery:

<lang JavaScript>$.get('http://rosettacode.org', function(data) {

 console.log(data);

};</lang>

Node.js

With Node.js, using only the included http module.

<lang javascript>const http = require('http');

http.get('http://rosettacode.org', (resp) => {

 let data = ;

 // A chunk of data has been recieved.
 resp.on('data', (chunk) => {
   data += chunk;
 });

 // The whole response has been received. Print out the result.
 resp.on('end', () => {
   console.log("Data:", data);
 });

}).on("error", (err) => {

 console.log("Error: " + err.message);

});</lang>

Jsish

Based on Jsi_Wget that ships with Jsish.

<lang javascript>#!/usr/bin/env jsish function httpGet(fileargs:array|string, conf:object=void) {

   var options = { // Web client for downloading files from url
       headers     : [],           // Header fields to send.
       nowait      : false,        // Just return object: caller will call update.
       onDone      : null,         // Callback when done.
       wsdebug     : 0             // WebSockets debug level.
   };
   
   var self = {
       address     : ,
       done        : false,
       path        : ,
       port        : -1,
       post        : ,           // Post file upload (UNIMPL).
       scheme      : 'http',       // Url scheme
       protocol    : 'get',
       url         : null,
       response    : 
   };
   parseOpts(self, options, conf);
   
   if (self.port === -1)
       self.port = 80;
       
   function WsRecv(ws:userobj, id:number, str:string) {
       LogDebug("LEN: "+str.length);
       LogTrace("DATA", str);
       self.response += str;
   }
   
   function WsClose(ws:userobj|null, id:number) {
       LogDebug("CLOSE");
       self.done = true;
       if (self.onDone)
           self.onDone(id);
   }
   
   function main() {
       if (self.Debug)
           debugger;
       if (typeof(fileargs) === 'string')
           fileargs = [fileargs];
       if (!fileargs || fileargs.length !== 1)
           throw("expected a url arg");
       self.url = fileargs[0];
       var m = self.url.match(/^([a-zA-Z]+):\/\/([^\/]*+)(.*)$/);
       if (!m)
           throw('invalid url: '+self.url);
       self.scheme = m[1];
       self.address = m[2];
       self.path = m[3];
       var as = self.address.split(':');
       if (as.length==2) {
           self.port = parseInt(as[1]);
           self.address = as[0];
       } else  if (as.length != 1)
           throw('bad port in address: '+self.address);
       if (self.path==)
           self.path = '/index.html';
       if (self.post.length)
           self.protocol = 'post';
       
       var wsopts = {
           client:true,
           onRecv:WsRecv,
           onClose:WsClose,
           debug:self.wsdebug,
           rootdir:self.path,
           port:self.port,
           address:self.address,
           protocol:self.protocol,
           clientHost:self.address
       };
       if (self.post.length)
           wsopts.post = self.post;
       if (self.headers.length)
           wsopts.headers = self.headers;
       if (self.scheme === 'https') {
           if (!Interp.conf('hasOpenSSL'))
               puts('SSL is not compiled in: falling back to http:');
           else {
               if (self.port === 80)
                   wsopts.port = 441;
               wsopts.use_ssl = true;
           }
       }
       LogDebug("Starting:", conf, wsopts);
       self.ws = new WebSocket( wsopts );
       if (self.nowait)
           return self;
       while (!self.done) {
           update(200);
           LogTrace("UPDATE");
       }
       delete self.ws;
       return self.response;
   }
   return main();

}

provide(httpGet, "0.60");

if (isMain())

   runModule(httpGet);</lang>
Output:
prompt$ jsish
# require('httpGet')
0.6
# var page = httpGet('http://rosettacode.org/robots.txt')
variable
# page
"User-agent: *
Allow: /mw/images/
Allow: /mw/skins/
Allow: /mw/title.png
Allow: /mw/resources/
Disallow: /w/
Disallow: /mw/
Disallow: /wiki/Special:

"

Julia

<lang Julia>readurl(url) = open(readlines, download(url))

readurl("http://rosettacode.org/index.html")</lang>

Kotlin

<lang scala>// version 1.1.2

import java.net.URL import java.io.InputStreamReader import java.util.Scanner

fun main(args: Array<String>) {

   val url = URL("http://www.puzzlers.org/pub/wordlists/unixdict.txt")
   val isr = InputStreamReader(url.openStream())
   val sc = Scanner(isr)
   while (sc.hasNextLine()) println(sc.nextLine())
   sc.close()

}</lang>

Lasso

incude_url is a wrapper for Lasso's curl datatype, however it can be achieved in several ways. <lang Lasso>// using include_url wrapper: include_url('http://rosettacode.org/index.html')

// one line curl curl('http://rosettacode.org/index')->result->asString

// using curl for more complex operations and feedback local(x = curl('http://rosettacode.org/index')) local(y = #x->result)

  1. y->asString</lang>

LFE

Synchronous

<lang lisp>(: inets start) (case (: httpc request '"http://lfe.github.io")

 ((tuple 'ok result)
   (: io format '"Result: ~p" (list result))) 
 ((tuple 'error reason) 
   (: io format '"Error: ~p~n" (list reason))))

</lang>

Asynchronous

<lang lisp>(: inets start) (let* ((method 'get)

      (url '"http://lfe.github.io")
      (headers ())
      (request-data (tuple url headers))
      (http-options ())
      (request-options (list (tuple 'sync 'false))))
 (: httpc request method request-data http-options request-options)
 (receive
   ((tuple 'http (tuple request-id (tuple 'error reason)))
    (: io format '"Error: ~p~n" (list reason)))
   ((tuple 'http (tuple request-id result))
    (: io format '"Result: ~p~n" (list result))))))

</lang>

Liberty BASIC

Uses a dll call and a timer to allow time to receive the file. <lang lb> result = DownloadToFile( "http://rosettacode.org/wiki/Main_Page", "in.html") timer 2000, [on] wait [on] timer 0 if result <> 0 then print "Error downloading."

end

Function DownloadToFile( urlfile$, localfile$)

   open "URLmon" for dll as #url
   calldll #url, "URLDownloadToFileA",_
   0 as long,_         'null
   urlfile$ as ptr,_   'url to download
   localfile$ as ptr,_ 'save file name
   0 as long,_         'reserved, must be 0
   0 as long,_         'callback address, can be 0
   DownloadToFile as ulong  '0=success
   close #url

end function </lang>

Lingo

HTTP requests based on Director's native HTTP facilities - i.e. without using a 3rd party plugin ("Xtra") - are asynchronous. A simple implementation of a HTTP GET request might look like this:

Parent script "SimpleHttpGet": <lang lingo>property _netID property _cbHandler property _cbTarget


-- Simple HTTP GET request -- @param {string} url -- @param {symbol} cbHandler -- @param {object} [cbTarget=_movie]


on new (me, url, cbHandler, cbTarget)

 if voidP(cbTarget) then cbTarget = _movie
 me._netID = getNetText(url)
 me._cbHandler = cbHandler
 me._cbTarget = cbTarget
 _movie.actorList.add(me)
 return me

end


-- @callback


on stepFrame (me)

 if netDone(me._netID) then
   res = netTextResult(me._netID)
   err = netError(me._netID)
   _movie.actorList.deleteOne(me)
   call(me._cbHandler, me._cbTarget, res, err)
 end if

end</lang>

In some movie script: <lang lingo>---------------------------------------- --


on getAdobeHomePage ()

 script("SimpleHttpGet").new("http://www.adobe.com/", #printResult)

end


-- @callback


on printResult (res, err)

 if err="OK" then
   put res
 else
   put "Network Error:" && err
 end if

end</lang>

Executed in the "Message Window" (=Director's interactive Lingo console): <lang lingo>getAdobeHomePage() -- "<!doctype html> ...</lang>

LiveCode

Without a callback handler the get URL method will block until complete <lang LiveCode>put true into libURLFollowHttpRedirects get URL "http://httpbin.org/html" put it</lang> Non-blocking version <lang LiveCode>on myUrlDownloadFinished

  answer "Download Complete" with "Okay"

end myUrlDownloadFinished

command getWebResource

   load URL "http://httpbin.org/html" with message "myUrlDownloadFinished"

end getWebResource</lang>

LSL

To test it yourself; rez a box on the ground, and add the following as a New Script. <lang LSL>string sURL = "http://www.RosettaCode.Org"; key kHttpRequestId; default { state_entry() { kHttpRequestId = llHTTPRequest(sURL, [], ""); } http_response(key kRequestId, integer iStatus, list lMetaData, string sBody) { if(kRequestId==kHttpRequestId) { llOwnerSay("Status="+(string)iStatus); integer x = 0; for(x=0 ; x<llGetListLength(lMetaData) ; x++) { llOwnerSay("llList2String(lMetaData, "+(string)x+")="+llList2String(lMetaData, x)); } list lBody = llParseString2List(sBody, ["\n"], []); for(x=0 ; x<llGetListLength(lBody) ; x++) { llOwnerSay("llList2String(lBody, "+(string)x+")="+llList2String(lBody, x)); } } } } </lang> Output:

Status=200
llList2String(lMetaData, 0)=0
llList2String(lMetaData, 1)=2048
llList2String(lBody, 0)=<!DOCTYPE html>
llList2String(lBody, 1)=<html lang="en" dir="ltr" class="client-nojs">
llList2String(lBody, 2)=<head>
llList2String(lBody, 3)=<title>Rosetta Code</title>
llList2String(lBody, 4)=<meta charset="UTF-8" />
llList2String(lBody, 5)=<meta name="generator" content="MediaWiki 1.18.0" />
llList2String(lBody, 6)=<link rel="shortcut icon" href="/favicon.ico" />
llList2String(lBody, 7)=<link rel="search" type="application/opensearchdescription+xml" href="/mw/opensearch_desc.php" title="Rosetta Code (en)" />
llList2String(lBody, 8)=<link rel="EditURI" type="application/rsd+xml" href="http://rosettacode.org/mw/api.php?action=rsd" />
llList2String(lBody, 9)=<link rel="copyright" href="http://www.gnu.org/licenses/fdl-1.2.html" />
llList2String(lBody, 10)=<link rel="alternate" type="application/atom+xml" title="Rosetta Code Atom feed" href="/mw/index.php?title=Special:RecentChanges&feed=atom" />
llList2String(lBody, 11)=<link rel="stylesheet" href="/mw/load.php?debug=false&lang=en&modules=mediawiki.legacy.commonPrint%2Cshared%7Cskins.vector&only=styles&skin=vector&*" />
llList2String(lBody, 12)=<meta name="ResourceLoaderDynamicStyles" content="" />
llList2String(lBody, 13)=<link rel="stylesheet" href="/mw/load.php?debug=false&lang=en&modules=site&only=styles&skin=vector&*" />
llList2String(lBody, 14)=<style>a:lang(ar),a:lang(ckb),a:lang(fa),a:lang(kk-arab),a:lang(mzn),a:lang(ps),a:lang(ur){text-decoration:none}a.new,#quickbar a.new{color:#ba0000}
...   ...   ...   ...   ...   ...   ...   ...   ...   ...   ...   ...   ...   ...

Lua

Library: LuaSocket

<lang Lua> local http = require("socket.http") local url = require("socket.url") local page = http.request('http://www.google.com/m/search?q=' .. url.escape("lua")) print(page) </lang>

M2000 Interpreter

We use Async read from Microsoft.XMLHTTP So we use Threads (duration is in millisecond) M2000 can use COM objects, using Declare, Method and With statements. Using With statement we can make objects properties like ReadyState as variables (some of them as read only)

<lang M2000 Interpreter> Module CheckIt {

     Declare  xml "Microsoft.XMLHTTP"
     const testUrl$ = "http://www.rosettacode.org"
     With  xml, "readyState" as ReadyState
     Method xml "Open", "Get", testUrl$, True  ' True means Async
     Method xml "send"
     \\ We set a thread to count time
     k=0
     Thread {
           k++
     }  as TimeOut interval 100
     \\ In main thread we can check ReadyState and Mouse button
     Task.Main 100 {
           Print ReadyState
           If ReadyState=4 then exit
           if k>20 then exit   ' 20*100= 2 sec
           if mouse then exit ' exit if mouse click
     }
     \\ So now we can read
     if ReadyState=4 then {
           With  xml, "responseText" AS AA$
           \\ break AA$ to lines
           Document BB$=AA$
           \\ using line breaks as CRLF
           Report BB$
     }
     Declare xml Nothing

} CheckIt </lang>

Maple

In Maple 18 or later: <lang Maple> content := URL:-Get( "http://www.google.com/" ); </lang>

In Maple 17 or earlier: <lang Maple> content := HTTP:-Get( "http://www.google.com/" ); </lang>

Mathematica / Wolfram Language

<lang Mathematica> Print[Import["http://www.google.com/webhp?complete=1&hl=en", "Source"]] </lang>

MATLAB / Octave

urlread is MATLAB's function for making URL requests. The documentation for Octave is available here urlread.

In this example we initiate an HTTP request for a single random number from random.org: <lang MATLAB> >> random = urlread('http://www.random.org/integers/?num=1&min=1&max=100&col=1&base=10&format=plain&rnd=new')

random =

61 </lang>

It is possible to make more complicated requests, specifically "GET" and "POST," which is explained in the documentation.

MIRC Scripting Language

See HTTP/MIRC Scripting Language

Nanoquery

<lang nanoquery>import http import url

url = new(URL, "http://rosettacode.org/wiki/Rosetta_Code") client = new(HTTPClient, url.getHost()) client.connect()

response = client.get(url.getFile()) println response.get("body")</lang>

Nemerle

<lang Nemerle>using System; using System.Console; using System.Net; using System.IO;

module HTTP {

   Main() : void
   {
       def wc = WebClient();
       def myStream = wc.OpenRead("http://rosettacode.org");
       def sr = StreamReader(myStream);
       
       WriteLine(sr.ReadToEnd());
       myStream.Close()
   }

}</lang>

NetRexx

Translation of: Java

An implementation of the Java version shown above; demonstrating NetRexx's ability to exploit the rich Java SDK.

<lang NetRexx>/* NetRexx */ options replace format comments java crossref symbols binary

import java.util.Scanner import java.net.URL

do

 rosettaUrl = "http://www.rosettacode.org"
 sc = Scanner(URL(rosettaUrl).openStream)
 loop while sc.hasNext
   say sc.nextLine
 end

catch ex = Exception

 ex.printStackTrace

end

return</lang>

NewLisp

<lang NewLisp> (get-url "http://www.rosettacode.org") </lang>

Nim

<lang nim>import httpclient

var client = newHttpClient() echo client.getContent "http://rosettacode.org"</lang>

Objeck

<lang objeck>use HTTP; use Collection;

class HttpTest {

 function : Main(args : String[]) ~ Nil {
   lines := HttpClient->New()->Get("http://rosettacode.org");
   each(i : lines) {
     lines->Get(i)->As(String)->PrintLine();
   };
 }

}</lang>

Objective-C

<lang objc>#import <Foundation/Foundation.h>

int main (int argc, const char * argv[]) {

   @autoreleasepool {
       NSError        *error;
       NSURLResponse *response;
       NSData *data = [NSURLConnection sendSynchronousRequest:[NSURLRequest requestWithURL:[NSURL URLWithString:@"http://rosettacode.org"]]
                                               returningResponse:&response
                                                           error:&error];
       NSLog(@"%@", [[NSString alloc] initWithData:data
                                             encoding:NSUTF8StringEncoding]);
   }
   return 0;

}</lang>

OCaml

<lang ocaml> let () =

 let url = "http://www.rosettacode.org" in
 let _,_, page_content = make_request ~url ~kind:GET () in
 print_endline page_content;

</lang>

The source code of the function make_request is here.

ooRexx

Got this from a friend. Needs bsf4oorexx from sourceforge.
Note that rosettacode.org (as used by java and NetRexx) does not permit this access! <lang oorexx>url=.bsf~new("java.net.URL","http://teletext.orf.at") sc =.bsf~new("java.util.Scanner",url~openStream) loop while sc~hasNext

 say sc~nextLine
 End
requires BSF.CLS -- get Java camouflaging support</lang>
Output:

massaged to avoid problems.

<-!DOCTYPE HTML-
..
-/html-

Oz

When creating a file object, it is possible to specify an URL instead of a filename: <lang oz> declare

 fun {GetPage Url}
    F = {New Open.file init(url:Url)}
    Contents = {F read(list:$ size:all)}
 in
    {F close}
    Contents
 end

in

 {System.showInfo {GetPage "http://www.rosettacode.org"}}

</lang>

Library: OzHttpClient

If you need more fine-grained control of the request, you could use a custom library: <lang oz> declare

 [HTTPClient] = {Module.link ['x-ozlib://mesaros/net/HTTPClient.ozf']}
 fun {GetPage Url}
    Client = {New HTTPClient.urlGET

init(inPrms(toFile:false toStrm:true) httpReqPrms )}

    OutParams
    HttpResponseParams
 in
    {Client getService(Url ?OutParams ?HttpResponseParams)}
    {Client closeAll(true)}
    OutParams.sOut
 end

in

 {System.showInfo {GetPage "http://www.rosettacode.org"}}

</lang>

Pascal

Works with: Free Pascal

Using fphttpclient <lang pascal>{$mode objfpc}{$H+} uses fphttpclient;

var

 s: string;
 hc: tfphttpclient;

begin

 hc := tfphttpclient.create(nil);
 try
   s := hc.get('http://www.example.com')
 finally
   hc.free
 end;
 writeln(s)

end.</lang>


Works with: Free Pascal
Library: CThreads
Library: Classes
Library: httpsend

<lang pascal>program http;

{$mode objfpc}{$H+} {$APPTYPE CONSOLE}

{$DEFINE DEBUG}

uses

 {$IFDEF UNIX}{$IFDEF UseCThreads}
 cthreads,
 {$ENDIF}{$ENDIF}
 Classes, httpsend; // Synapse httpsend class

{$R *.res}

var

 Response: TStrings;
 HTTPObj: THTTPSend;

begin

 HTTPObj := THTTPSend.Create;
 try
   { Stringlist object to capture HTML returned
     from URL }
   Response := TStringList.Create;
   try
     if HTTPObj.HTTPMethod('GET','http://wiki.lazarus.freepascal.org/Synapse') then
       begin
         { Load HTTP Document into Stringlist }
         Response.LoadFromStream(HTTPObj.Document);
         { Write the response to the console window }
         Writeln(Response.Text);
       end
       else
       Writeln('Error retrieving data');
   finally
     Response.Free;
   end;
 finally
   HTTPObj.Free;
 end;
 // Keep console window open
 Readln;

end.</lang>

Peloton

English dialect, short form: <lang sgml> <@ SAYURLLIT>http://rosettacode.org/wiki/Main_Page</@> </lang>

English dialect, padded variable-length form: <lang sgml> <# SAY URLSOURCE LITERAL>http://rosettacode.org/wiki/Main_Page</#> </lang>

Perl

Core example

Library: HTTP/Tiny
Works with: Perl version 5.14
Works with: Perl/HTTP/Tiny

This sample is nearly identical to the LWP sample except that it uses HTTP::Tiny which was added to the core libraries in Perl/5.14.

<lang perl>use strict; use warnings; require 5.014; # check HTTP::Tiny part of core use HTTP::Tiny;

print( HTTP::Tiny->new()->get( 'http://rosettacode.org')->{content} );</lang>

Library examples

Library: LWP

Works with: Perl/LWP

Classic LWP sample.

<lang perl>use LWP::Simple qw/get $ua/; $ua->agent(undef) ; # cloudflare blocks default LWP agent print( get("http://www.rosettacode.org") );</lang>

or with more error-checking

<lang perl>use strict; use LWP::UserAgent;

my $url = 'http://www.rosettacode.org'; my $response = LWP::UserAgent->new->get( $url );

$response->is_success or die "Failed to GET '$url': ", $response->status_line;

print $response->as_string</lang>

Phix

Library: libcurl

Note that curl_easy_get_file() is better suited to multi-megabyte downloads than curl_easy_perform_ex(). <lang Phix>include builtins\libcurl.e curl_global_init() atom curl = curl_easy_init() curl_easy_setopt(curl, CURLOPT_URL, "http://rosettacode.org/robots.txt") object res = curl_easy_perform_ex(curl) curl_easy_cleanup(curl) curl_global_cleanup()

puts(1,res)</lang>

Output:
User-agent: *
Allow: /mw/images/
Allow: /mw/skins/
Allow: /mw/title.png
Allow: /mw/resources/
Disallow: /w/
Disallow: /mw/
Disallow: /wiki/Special:

PHP

<lang php> readfile("http://www.rosettacode.org"); </lang>

PicoLisp

<lang PicoLisp> (load "@lib/http.l")

(client "rosettacode.org" 80 NIL # Connect to rosettacode

  (out NIL (echo)) )                  # Echo to standard output

</lang>

Pike

<lang pike> write("%s",Protocols.HTTP.get_url_data("http://www.rosettacode.org")); </lang>

PowerShell

<lang powershell> $wc = New-Object Net.WebClient $wc.DownloadString('http://www.rosettacode.org') </lang>

Prolog

Works with SWI-Prolog and library http/http_open. (Extract from the doc).

<lang Prolog>

- use_module(library( http/http_open )).

http :- http_open('http://www.rosettacode.org/',In, []), copy_stream_data(In, user_output), close(In). </lang>

PureBasic

<lang PureBasic> InitNetwork() OpenConsole()

tmpdir$ = GetTemporaryDirectory() filename$ = tmpdir$ + "PB_tempfile" + Str(Random(200000)) + ".html"

If ReceiveHTTPFile("http://rosettacode.org/wiki/Main_Page", filename$)

 If ReadFile(1, filename$)
   Repeat
     PrintN(ReadString(1))
   Until Eof(1)
   Input()
   ; to prevent console from closing if on windows
   CloseFile(1)
 EndIf
 DeleteFile(filename$)

EndIf </lang>

Another solution using general networking commands <lang PureBasic> InitNetwork() OpenConsole() id = OpenNetworkConnection("rosettacode.org", 80) SendNetworkString(id, "GET /wiki/Main_Page HTTP/1.1" + Chr(10) + "Host: rosettacode.org" + Chr(10) + Chr(10)) Repeat

 If NetworkClientEvent(id) = 2
   a$ = Space(1000)
   ReceiveNetworkData(id, @a$, 1000)
   out$ + a$
 EndIf

Until FindString(out$, "</html>", 0) PrintN(out$)

next line only to prevent console from closing on Windows

Input() </lang>

Of course you could use wget too.

Python

Python 3

Using the urllib.request module. <lang python> import urllib.request print(urllib.request.urlopen("http://rosettacode.org").read()) </lang>

Using a more low-level http.client library. <lang python> from http.client import HTTPConnection conn = HTTPConnection("example.com")

  1. If you need to use set_tunnel, do so here.

conn.request("GET", "/")

  1. Alternatively, you can use connect(), followed by the putrequest, putheader and endheaders functions.

result = conn.getresponse() r1 = result.read() # This retrieves the entire contents. </lang>

Python 2

Using the urllib library. <lang python> import urllib print urllib.urlopen("http://rosettacode.org").read() </lang>

Using the urllib2 library. <lang python> import urllib2 print urllib2.urlopen("http://rosettacode.org").read() </lang>


Library: Requests
Works with: Python version 2.7, 3.4–3.7

<lang Python> import requests print(requests.get("http://rosettacode.org").text) </lang>

R

Library: RCurl
Library: XML

First, retrieve the webpage.

<lang R> library(RCurl) webpage <- getURL("http://rosettacode.org")

  1. If you are linking to a page that no longer exists and need to follow the redirect, use followlocation=TRUE

webpage <- getURL("http://www.rosettacode.org", .opts=list(followlocation=TRUE))

  1. If you are behind a proxy server, you will need to use something like:

webpage <- getURL("http://rosettacode.org",

  .opts=list(proxy="123.123.123.123", proxyusername="domain\\username", proxypassword="mypassword", proxyport=8080))
  1. Don't forget that backslashes in your username or password need to be escaped!

</lang>

Now parse the html code into a tree and print the html.

<lang R> library(XML) pagetree <- htmlTreeParse(webpage ) pagetree$children$html </lang>

Racket

<lang Racket>

  1. lang racket

(require net/url) (copy-port (get-pure-port (string->url "http://www.rosettacode.org")

                         #:redirections 100)
          (current-output-port))

</lang>

Raku

(formerly Perl 6)

Library: LWP

Using LWP::Simple from the Raku ecosystem.

<lang perl6>use v6;

use LWP::Simple;

print LWP::Simple.get("http://www.rosettacode.org"); </lang>

or, without LWP::Simple:

<lang perl6>use v6;

my $socket = IO::Socket::INET.new(host => "www.rosettacode.org", port => 80,); $socket.print("GET / HTTP/1.0\r\n\r\n"); print $socket.recv(); $socket.close; </lang>

REALbasic

REALBasic provides an HTTPSocket class for handling HTTP connections. The 'Get' method of the HTTPSocket is overloaded and can download data to a file or return data as a string, in both cases a timeout argument can be passed. <lang REALbasic>

 Dim sock As New HTTPSocket
 Print(sock.Get("http://www.rosettacode.org", 10))  //set the timeout period to 10 seconds.

</lang>

REBOL

<lang REBOL> print read http://rosettacode.org </lang>

REXX

This script takes an URL as an argument and displays the content on the terminal. It uses the external program `curl` to perform both the acquisition of the data and the display.

<lang Rexx>/* ft=rexx */ /* GET2.RX - Display contents of an URL on the terminal. */ /* Usage: rexx get.rx http://rosettacode.org */ parse arg url . 'curl' url</lang>

A simple change to the script will redirect the output to an internal variable for internal processing. (Our "internal processing" in this case is to display it.)

<lang Rexx>/* ft=rexx */ /* GET2.RX - Display contents of an URL on the terminal. */ /* Usage: rexx get2.rx http://rosettacode.org */ parse arg url . address system 'curl' url with output stem stuff. do i = 1 to stuff.0

 say stuff.i

end</lang>

Another simple change redirects the output to another external program like a shell pipe.

<lang Rexx>/* ft=rexx */ /* GET3.RX - Display contents of an URL on the terminal. */ /* Usage: rexx get3.rx http://rosettacode.org */ parse arg url . address system 'curl' url with output fifo address system 'more' with input fifo </lang>

Ring

<lang ring> See download("http://rosettacode.org") </lang>

RLaB

RLaB supports HTTP/FTP through its Read/Write facilities, which are organized around the concept of Universal Resource Locator (URL),

protocol://address

RLaB accepts the following values for protocol:

1. file or omitted, for generic text files or files in native binary format (partially compatible with matlab binary format);
2. h5 or hdf5 for files that use Hierarhical Data Format 5 (HDF5) version 1.8.0, and later. Here protocol can be omitted while address has to end with .h5 (file extension);
3. http, https, or ftp for accessing the data and files on web- and ftp-servers;
4. tcp, for accessing sockets over tcp/ip protocol;
5. serial, for accessing serial port on Un*x type systems.

For these URLs RLaB provides an internal book-keeping: It keeps track of the open ones and, say, upon quitting, closes them and releases the internal resources it allocated for managing them.

For accessing URLs on world wide web RLaB implements the library cURL (libcurl) [2] and its "easy" interface.

This said, this is how one would download financial data for Pfeizer from Yahoo [3].

<lang RLaB> // get cvs data from Yahoo for Pfeizer (PFE) url="http://ichart.finance.yahoo.com/table.csv?s=PFE&a=00&b=4&c=1982&d=00&e=10&f=2010&g=d&ignore=.csv";

opt = <<>>; // opt.CURLOPT_PROXY = "your.proxy.here"; // opt.CURLOPT_PROXYPORT = YOURPROXYPORT; // opt.CURLOPT_PROXYTYPE = "http"; open(url, opt); x = readm(url); close (url); </lang>

Ruby

The simple way loads the entire content into memory, then prints it.

<lang ruby> require 'open-uri'

print open("http://rosettacode.org") {|f| f.read} </lang>

If the content might be large, the better way uses FileUtils.copy_stream.

<lang ruby> require 'fileutils' require 'open-uri'

open("http://rosettacode.org/") {|f| FileUtils.copy_stream(f, $stdout)} </lang>

Run BASIC

<lang runbasic>print httpget$("http://rosettacode.org/wiki/Main_Page")</lang>

Rust

Cargo.toml <lang toml> [dependencies] hyper = "0.6" </lang> src/main.rs <lang rust> //cargo-deps: hyper="0.6" // The above line can be used with cargo-script which makes cargo's dependency handling more convenient for small programs extern crate hyper;

use std::io::Read; use hyper::client::Client;

fn main() {

   let client = Client::new();
   let mut resp = client.get("http://rosettacode.org").send().unwrap();
   let mut body = String::new();
   resp.read_to_string(&mut body).unwrap();
   println!("{}", body);

} </lang>

Scala

Library: Scala

<lang scala>import scala.io.Source

object HttpTest extends App {

 System.setProperty("http.agent", "*")
 Source.fromURL("http://www.rosettacode.org").getLines.foreach(println)

}</lang>

Scheme

Works with: Guile

<lang scheme>

Use the regular expression module to parse the url (included with Guile)

(use-modules (ice-9 regex))

Set the url and parse the hostname, port, and path into variables

(define url "http://www.rosettacode.org/wiki/HTTP") (define r (make-regexp "^(http://)?([^:/]+)(:)?(([0-9])+)?(/.*)?" regexp/icase)) (define host (match:substring (regexp-exec r url) 2)) (define port (match:substring (regexp-exec r url) 4)) (define path (match:substring (regexp-exec r url) 6))

Set port to 80 if it wasn't set above and convert from a string to a number

(if (eq? port #f) (define port "80")) (define port (string->number port))

Connect to remote host on specified port

(let ((s (socket PF_INET SOCK_STREAM 0)))

       (connect s AF_INET (car (hostent:addr-list (gethostbyname host))) port)
Send a HTTP request for the specified path
       (display "GET " s)
       (display path s)
       (display " HTTP/1.0\r\n\r\n" s)
Display the received HTML
       (do ((c (read-char s) (read-char s))) ((eof-object? c))
               (display c)))

</lang>

Works with: Chicken Scheme

Using the http-client library, this is trivial. <lang scheme> (use http-client) (print

 (with-input-from-request "http://google.com/"
                          #f read-string))

</lang>

Seed7

The gethttp.s7i library contains the function getHttp, which gets data specified by an URL using the HTTP protocol.

<lang seed7> $ include "seed7_05.s7i";

 include "gethttp.s7i";

const proc: main is func

 begin
   writeln(getHttp("www.rosettacode.org"));
 end func;</lang>

SenseTalk

<lang sensetalk>put url "http://www.rosettacode.org"</lang>

Sidef

Sidef can load and use Perl modules: <lang ruby>func get(url) {

   var lwp = (
       try   { require('LWP::UserAgent') }
       catch { warn "'LWP::UserAgent' is not installed!"; return nil }
   )
   var ua = lwp.new(agent => 'Mozilla/5.0')
   if (var resp = ua.get(url); resp.is_success) {
       return resp.decoded_content
   }
   return nil

}

print get("http://rosettacode.org")</lang>

Smalltalk

Works with: Pharo

<lang smalltalk> Transcript show: 'http://rosettacode.org' asUrl retrieveContents contentStream. </lang>

SNOBOL4

Works with: Macro SNOBOL4 in C

<lang snobol>-include "tcp.sno" tcp.open(.conn,'www.rosettacode.org','http') :s(cont1) terminal = "cannot open" :(end) cont1 conn = "GET http://rosettacode.org/wiki/Main_Page HTTP/1.0" char(10) char(10) while output = conn :s(while) tcp.close(.conn) end </lang>

Swift

<lang Swift>import Foundation

let request = NSURLRequest(URL: NSURL(string: "http://rosettacode.org/")!)

// Using trailing closure NSURLConnection.sendAsynchronousRequest(request, queue: NSOperationQueue()) {res, data, err in

   // data is binary
   if (data != nil) {
       let string = NSString(data: data!, encoding: NSUTF8StringEncoding)
       println(string)
   }

}

CFRunLoopRun() // dispatch</lang>

Tcl

Note that the http package is distributed as part of Tcl.

<lang tcl> package require http set request [http::geturl "http://www.rosettacode.org"] puts [http::data $request] http::cleanup $request</lang>

TSE SAL

<lang TSE SAL>

DLL "<urlmon.dll>"

INTEGER PROC FNUrlGetSourceApiI(
 INTEGER lpunknown,
 STRING urlS : CSTRVAL,
 STRING filenameS : CSTRVAL,
 INTEGER dword,
 INTEGER tlpbindstatuscallback

) : "URLDownloadToFileA" END

// library: url: get: source <description></description> <version control></version control> <version>1.0.0.0.3</version> (filenamemacro=geturgso.s) [kn, ri, su, 13-04-2008 05:12:53] PROC PROCUrlGetSource( STRING urlS, STRING filenameS )

FNUrlGetSourceApiI( 0, urlS, filenameS, 0, 0 )

END

PROC Main() STRING s1[255] = "http://www.google.com/index.html" STRING s2[255] = "c:\temp\ddd.txt" IF ( NOT ( Ask( "url: get: source: urlS = ", s1, _EDIT_HISTORY_ ) ) AND ( Length( s1 ) > 0 ) ) RETURN() ENDIF IF ( NOT ( AskFilename( "url: get: source: filenameS = ", s2, _DEFAULT_, _EDIT_HISTORY_ ) ) AND ( Length( s2 ) > 0 ) ) RETURN() ENDIF

PROCUrlGetSource( s1, s2 )
EditFile( s2 )

END

</lang>

TUSCRIPT

<lang tuscript> $$ MODE TUSCRIPT SET DATEN = REQUEST ("http://www.rosettacode.org")

  • {daten}

</lang>

UNIX Shell

<lang bash>curl -s -L http://rosettacode.org/</lang>

<lang bash>lynx -source http://rosettacode.org/</lang>

<lang bash>wget -O - -q http://rosettacode.org/</lang>

<lang bash>lftp -c "cat http://rosettacode.org/"</lang>

Works with: BSD

<lang bash>ftp -o - http://rosettacode.org 2>/dev/null</lang>

VBScript

Based on code at How to retrieve HTML web pages with VBScript via the Microsoft.XmlHttp object <lang vb> Option Explicit

Const sURL="http://rosettacode.org/"

Dim oHTTP Set oHTTP = CreateObject("Microsoft.XmlHTTP")

On Error Resume Next oHTTP.Open "GET", sURL, False oHTTP.Send "" If Err.Number = 0 Then

    WScript.Echo oHTTP.responseText

Else

    Wscript.Echo "error " & Err.Number & ": " & Err.Description

End If

Set oHTTP = Nothing </lang>

Visual Basic

Works with: Visual Basic version 5
Works with: Visual Basic version 6
Works with: VBA version Access 97
Works with: VBA version 6.5
Works with: VBA version 7.1

<lang vb>Sub Main() Dim HttpReq As WinHttp.WinHttpRequest ' in the "references" dialog of the IDE, check ' "Microsoft WinHTTP Services, version 5.1" (winhttp.dll) Const HTTPREQUEST_PROXYSETTING_PROXY As Long = 2

  1. Const USE_PROXY = 1
 Set HttpReq = New WinHttp.WinHttpRequest
 HttpReq.Open "GET", "http://rosettacode.org/robots.txt"
  1. If USE_PROXY Then
 HttpReq.SetProxy HTTPREQUEST_PROXYSETTING_PROXY, "my_proxy:80"
  1. End If
 HttpReq.SetTimeouts 1000, 1000, 1000, 1000
 HttpReq.Send
 Debug.Print HttpReq.ResponseText

End Sub</lang>

Visual Basic .NET

<lang vbnet> Imports System.Net

Dim client As WebClient = New WebClient() Dim content As String = client.DownloadString("http://www.google.com") Console.WriteLine(content) </lang>

zkl

File htmlGet.zkl. This uses HTTP/1.0 Protocol to avoid chunked data. Or use cURL (see https example). <lang zkl>url := ask(0,"URL: ");

host := url; dir  := "/"; port := 80; if (n := url.find("/")) { dir = url[n,*]; host = url[0,n]; } if (n := host.find(":")) { port = host[n+1,*]; host = host[0,n]; }

get := "GET %s HTTP/1.0\r\nHost: %s:%s\r\n\r\n".fmt(dir,host,port.toInt()); println("-->",get); server := Network.TCPClientSocket.connectTo(host,port); server.write(get); data := server.read(True); println(data.text);</lang> zkl htmlGet.zkl rosettacode.org/wiki/HTTP

Output:
-->GET /wiki/HTTP HTTP/1.0
Host: rosettacode.org:80


HTTP/1.1 200 OK
Server: cloudflare-nginx
Date: Tue, 11 Mar 2014 08:31:43 GMT
Content-Type: text/html; charset=UTF-8
Connection: close
Set-Cookie:XXX
23:50:00 GMT; path=/; domain=.rosettacode.org; HttpOnly
X-Powered-By: PHP/5.3.3-7+squeeze18
X-Content-Type-Options: nosniff
Content-Language: en
ETag: W/"rosettacode:pcache:idhash:3055-0!1!0!!en!2--20140227082903"
Vary: Accept-Encoding,Cookie
Cache-Control: s-maxage=86400, must-revalidate, max-age=0
Last-Modified: Thu, 27 Feb 2014 08:29:03 GMT
Age: 86011
X-Cache: HIT from prgmr2.rosettacode.org
X-Cache-Lookup: HIT from prgmr2.rosettacode.org:80
Via: 1.0 prgmr2.rosettacode.org (squid/3.1.6)
CF-RAY: 109665b7e92a012c-SJC

<!DOCTYPE html>
<html lang="en" dir="ltr" class="client-nojs">
<head>
<title>HTTP - Rosetta Code</title>
...

Zoea

<lang Zoea> program: http

 input: 'https://zoea.co.uk/examples/test.txt'
 output: 'hello from zoea'

</lang>

Zsh

<lang zsh> zmodload zsh/net/tcp ztcp example.com 80 fd=$REPLY print -l -u $fd -- 'GET / HTTP/1.1' 'Host: example.com' while read -u $fd -r -e -t 1; do; :; done ztcp -c $fd </lang>