HTTPS
Send a GET request to obtain the resource located at the URL "https://www.w3.org/", then print it to the console.
Checking the host certificate for validity is recommended.
Do not authenticate. That is the subject of other tasks.
Readers may wish to contrast with the HTTP Request task, and also the task on HTTPS request with authentication.
You are encouraged to solve this task according to the task description, using any language you may know.
- Task
Ada
Exactly the same as the HTTP task, assuming you compiled AWS with openssl support.
with AWS.Client;
with AWS.Response;
with Ada.Text_IO; use Ada.Text_IO;
procedure GetHttps is
begin
Put_Line (AWS.Response.Message_Body (AWS.Client.Get (
URL => "https://sourceforge.net/")));
end GetHttps;
Arturo
print read "https://www.w3.org/"
AutoHotkey
URL := "https://sourceforge.net/"
WININET_Init()
msgbox % html := UrlGetContents(URL)
WININET_UnInit()
return
#include urlgetcontents.ahk
#include wininet.ahk
BaCon
This code requires BaCon 3.8.2 or later.
OPTION TLS TRUE
website$ = "www.google.com"
OPEN website$ & ":443" FOR NETWORK AS mynet
SEND "GET / HTTP/1.1\r\nHost: " & website$ & "\r\n\r\n" TO mynet
WHILE WAIT(mynet, 1000)
RECEIVE dat$ FROM mynet
total$ = total$ & dat$
IF REGEX(dat$, "\r\n\r\n$") THEN BREAK : ' Quit receiving data when end indicator was reached
WEND
CLOSE NETWORK mynet
PRINT REPLACE$(total$, "\r\n[0-9a-fA-F]+\r\n", "\r\n", TRUE) : ' Remove chunk indicators from HTML data
Batch File
:: Must have curl.exe
curl.exe -k -s -L https://sourceforge.net/
C
#include <stdio.h>
#include <stdlib.h>
#include <curl/curl.h>
CURL *curl;
char buffer[CURL_ERROR_SIZE];
int main(void) {
if ((curl = curl_easy_init()) != NULL) {
curl_easy_setopt(curl, CURLOPT_URL, "https://sourceforge.net/");
curl_easy_setopt(curl, CURLOPT_FOLLOWLOCATION, 1);
curl_easy_setopt(curl, CURLOPT_ERRORBUFFER, buffer);
if (curl_easy_perform(curl) != CURLE_OK) {
fprintf(stderr, "%s\n", buffer);
return EXIT_FAILURE;
}
curl_easy_cleanup(curl);
}
return EXIT_SUCCESS;
}
C#
using System;
using System.Net;
class Program
{
static void Main(string[] args)
{
var client = new WebClient();
var data = client.DownloadString("https://www.google.com");
Console.WriteLine(data);
}
}
This does not work for urls requiring a secure (SSL) connection.
Clojure
Using the duck-streams as a convenient wrapper for Java's networking classes, grabbing the contents of an HTTPS URL is as easy as:
(use '[clojure.contrib.duck-streams :only (slurp*)])
(print (slurp* "https://sourceforge.net"))
The usual Java mechanisms can be used to manage acceptance of SSL certificates if required.
(print (slurp "https://sourceforge.net"))
D
Using curl
import std.stdio;
import std.net.curl;
auto data = get("https://sourceforge.net");
writeln(data);
Common Lisp
First grabbing the entire body as a string, and then by pulling from a stream. This is the same code as in HTTP Request; drakma:http-request
supports SSL.
(defun wget-drakma-string (url &optional (out *standard-output*))
"Grab the body as a string, and write it to out."
(write-string (drakma:http-request url) out))
(defun wget-drakma-stream (url &optional (out *standard-output*))
"Grab the body as a stream, and write it to out."
(loop with body = (drakma:http-request url :want-stream t)
for line = (read-line body nil nil)
while line do (write-line line)
finally (close body)))
;; Use
(wget-drakma-stream "https://sourceforge.net")
(format t "~a~%" (nth-value 0 (dex:get "https://www.w3.org/")))
Delphi
program ShowHTTPS;
{$APPTYPE CONSOLE}
uses IdHttp, IdSSLOpenSSL;
var
s: string;
lHTTP: TIdHTTP;
begin
lHTTP := TIdHTTP.Create(nil);
try
lHTTP.IOHandler := TIdSSLIOHandlerSocketOpenSSL.Create(lHTTP);
lHTTP.HandleRedirects := True;
s := lHTTP.Get('https://sourceforge.net/');
Writeln(s);
finally
lHTTP.Free;
end;
end.
EchoLisp
file->string usage: the server must allow cross-domain access, or a browser add-on like cors-everywhere must be installed to bypass cross-domain checking.
;; asynchronous call back definition
(define (success name text) (writeln 'Loaded name) (writeln text))
;;
(file->string success "https:/sourceforge.net")
Erlang
Synchronous
-module(main).
-export([main/1]).
main([Url|[]]) ->
inets:start(),
ssl:start(),
case http:request(get, {URL, []}, [{ssl,[{verify,0}]}], []) of
{ok, {_V, _H, Body}} -> io:fwrite("~p~n",[Body]);
{error, Res} -> io:fwrite("~p~n", [Res])
end.
Asynchronous
-module(main).
-export([main/1]).
main([Url|[]]) ->
inets:start(),
ssl:start(),
http:request(get, {Url, [] }, [{ssl,[{verify,0}]}], [{sync, false}]),
receive
{http, {_ReqId, Res}} -> io:fwrite("~p~n",[Res]);
_Any -> io:fwrite("Error: ~p~n",[_Any])
after 10000 -> io:fwrite("Timed out.~n",[])
end.
Using it
|escript ./req.erl https://sourceforge.net/
F#
The underlying .NET classes handle secure web connections the same way they manage insecure connections.
#light
let wget (url : string) =
let c = new System.Net.WebClient()
c.DownloadString(url)
FreeBASIC
#include once "windows.bi"
#include once "win/wininet.bi"
' Function to check the host certificate for validity
Function CheckCertificate(hRequest As HINTERNET) As Boolean
Dim As DWORD dwFlags, dwBuffLen = Sizeof(DWORD)
If InternetQueryOption(hRequest, INTERNET_OPTION_SECURITY_FLAGS, @dwFlags, @dwBuffLen) Then
If (dwFlags And SECURITY_FLAG_IGNORE_UNKNOWN_CA) = 0 Then Return True
End If
Return False
End Function
' Main program
Dim As HINTERNET hInternet, hConnect, hRequest
Dim As String url = "https://www.w3.org/"
Dim As String buffer
Dim As DWORD bytesRead
hInternet = InternetOpen("FreeBASIC HTTP Request", INTERNET_OPEN_TYPE_DIRECT, null, null, 0)
If hInternet Then
hConnect = InternetConnect(hInternet, "www.w3.org", INTERNET_DEFAULT_HTTPS_PORT, null, null, INTERNET_SERVICE_HTTP, 0, 0)
If hConnect Then
hRequest = HttpOpenRequest(hConnect, "GET", "/", null, null, null, INTERNET_FLAG_SECURE, 0)
If hRequest Then
If HttpSendRequest(hRequest, null, 0, null, 0) Then
If CheckCertificate(hRequest) Then
Do
buffer = Space(1024)
If InternetReadFile(hRequest, Strptr(buffer), Len(buffer), @bytesRead) = 0 Then Exit Do
If bytesRead = 0 Then Exit Do
Print Left(buffer, bytesRead);
Loop
Else
Print "Invalid certificate."
End If
Else
Print "Failed to send request."
End If
InternetCloseHandle(hRequest)
Else
Print "Failed to open request."
End If
InternetCloseHandle(hConnect)
Else
Print "Failed to connect."
End If
InternetCloseHandle(hInternet)
Else
Print "Failed to open internet."
End If
Sleep
Frink
print[read["https://sourceforge.net/"]]
FutureBasic
include "NSLog.incl"
local fn GET_HTTPS
CFStringRef response = unix @"curl -ksL https://sourceforge.net/"
CFDataRef dta = fn StringData( response, NSUTF8StringEncoding )
CFDictionaryRef options = @{NSDocumentTypeDocumentAttribute: NSHTMLTextDocumentType, NSCharacterEncodingDocumentAttribute: @(NSUTF8StringEncoding)}
CFAttributedStringRef aStr = fn AttributedStringWithHTML( dta, options )
NSLog( @"%@", aStr )
end fn
fn GET_HTTPS
HandleEvents
- Output:
We're sorry -- the Sourceforge site is currently in Disaster Recovery mode. Please check back later. { NSColor = "sRGB IEC61966-2.1 colorspace 0 0 0 1"; NSFont = "\"Times-Roman 12.00 pt. P [] (0x7f8f94e11ce0) fobj=0x7f8f94e0fda0, spc=3.00\""; NSKern = 0; NSParagraphStyle = "Alignment 4, LineSpacing 0, ParagraphSpacing 0, ParagraphSpacingBefore 0, HeadIndent 0, TailIndent 0, FirstLineHeadIndent 0, LineHeight 0/0, LineHeightMultiple 0, LineBreakMode 0, Tabs (\n), DefaultTabInterval 36, Blocks (\n), Lists (\n), BaseWritingDirection 0, HyphenationFactor 0, TighteningForTruncation YES, HeaderLevel 0 LineBreakStrategy 0"; NSStrokeColor = "sRGB IEC61966-2.1 colorspace 0 0 0 1"; NSStrokeWidth = 0; }
Go
package main
import (
"io"
"log"
"net/http"
"os"
)
func main() {
r, err := http.Get("https://sourceforge.net/")
if err != nil {
log.Fatalln(err)
}
io.Copy(os.Stdout, r.Body)
}
Groovy
new URL("https://sourceforge.net").eachLine { println it }
Haskell
This is just the example from Network.HTTP.Conduit, with the http URL replaced with an https one, since http-conduit natively supports https without needing any additional work.
#!/usr/bin/runhaskell
import Network.HTTP.Conduit
import qualified Data.ByteString.Lazy as L
import Network (withSocketsDo)
main = withSocketsDo
$ simpleHttp "https://sourceforge.net/" >>= L.putStr
Icon and Unicon
# Requires Unicon version 13
procedure main(arglist)
url := (\arglist[1] | "https://sourceforge.net/")
w := open(url, "m-") | stop("Cannot open " || url)
while write(read(w))
close(w)
end
- Output:
prompt$ unicon -s https.icn -x | head -n2 <!doctype html> <!-- Server: sfs-consume-15 -->
Ioke
connection = URL new("https://sourceforge.net") openConnection
scanner = Scanner new(connection getInputStream)
while(scanner hasNext,
scanner next println
)
J
Using gethttp from Web Scraping
#page=: gethttp'https://sourceforge.net'
0
#page=: '--no-check-certificate' gethttp'https://sourceforge.net'
900
(We can not load the example page using https unless we disable certificate checking. The numbers are the number of characters retrieved.)
Java
javax.net.ssl
Additional certificate information is available through the javax.net.ssl.HttpsURLConnection interface.
URL url = new URL("https://sourceforge.net");
HttpsURLConnection connection = (HttpsURLConnection) url.openConnection();
Scanner scanner = new Scanner(connection.getInputStream());
while (scanner.hasNext()) {
System.out.println(scanner.next());
}
java.net.http
Using the standard Java 11 HTTP Client
import java.net.URI;
import java.net.http.HttpClient;
import java.net.http.HttpRequest;
import java.net.http.HttpResponse;
import java.nio.charset.Charset;
public class Main {
public static void main(String[] args) {
var request = HttpRequest.newBuilder(URI.create("https://sourceforge.net"))
.GET()
.build();
HttpClient.newHttpClient()
.sendAsync(request, HttpResponse.BodyHandlers.ofString(Charset.defaultCharset()))
.thenApply(HttpResponse::body)
.thenAccept(System.out::println)
.join();
}
}
JavaScript
Browser
Using fetch API and async/await:
const response = await fetch('https://rosettacode.org');
const text = await response.text();
console.log(text);
fetch("https://sourceforge.net").then(function (response) {
return response.text();
}).then(function (body) {
return body;
});
Node.js
require("https").get("https://sourceforge.net", function (resp) {
let body = "";
resp.on("data", function (chunk) {
body += chunk;
});
resp.on("end", function () {
console.log(body);
});
}).on("error", function (err) {
console.error("Error: " + err.message);
});
Julia
# v0.6.0
using Requests
str = readstring(get("https://sourceforge.net/"))
Kotlin
// version 1.1.2
import java.net.URL
import javax.net.ssl.HttpsURLConnection
import java.io.InputStreamReader
import java.util.Scanner
fun main(args: Array<String>) {
val url = URL("https://en.wikipedia.org/wiki/Main_Page")
val connection = url.openConnection() as HttpsURLConnection
val isr = InputStreamReader(connection.inputStream)
val sc = Scanner(isr)
while (sc.hasNextLine()) println(sc.nextLine())
sc.close()
}
Or simplier, since Kotlin 1.2
import java.net.URL
fun main(args: Array<String>){
println(URL("https://sourceforge.net").readText())
}
Lasso
local(x = curl('https://sourceforge.net'))
local(y = #x->result)
#y->asString
If a site with an invalid SSL Cert is encountered the curl type throws the following error:
- Output:
FAILURE: 60 Peer certificate cannot be authenticated with given CA certificates
Lingo
- Windows:
ch = xtra("Curl").new()
CURLOPT_URL = 10002
ch.setOption(CURLOPT_URL, "https://sourceforge.net")
res = ch.exec(1)
if integerP(res) then
put "Error:" && curl_error(res)
else
put "Result:" && res.readRawString(res.length)
end if
-- "Result: <!doctype html> ..."
- Mac OS X:
sx = xtra("Shell").new()
put sx.shell_cmd("curl https://sourceforge.net")
LiveCode
Blocking version
libURLSetSSLVerification true --check cert
get URL "https://sourceforge.net/"
Non-blocking version, execute getWebResource
on myUrlDownloadFinished
get URL "https://sourceforge.net/" -- this will now fetch a locally cached copy
put it
end myUrlDownloadFinished
command getWebResource
libURLFollowHttpRedirects true
libURLSetSSLVerification true --check cert
load URL "https://sourceforge.net/" with message "myUrlDownloadFinished"
end getWebResource
LSL
Virtually identical to the HTTP Task.
To test it yourself; rez a box on the ground, and add the following as a New Script.
string sURL = "https://SourceForge.Net/";
key kHttpRequestId;
default {
state_entry() {
kHttpRequestId = llHTTPRequest(sURL, [], "");
}
http_response(key kRequestId, integer iStatus, list lMetaData, string sBody) {
if(kRequestId==kHttpRequestId) {
llOwnerSay("Status="+(string)iStatus);
integer x = 0;
for(x=0 ; x<llGetListLength(lMetaData) ; x++) {
llOwnerSay("llList2String(lMetaData, "+(string)x+")="+llList2String(lMetaData, x));
}
list lBody = llParseString2List(sBody, ["\n"], []);
for(x=0 ; x<llGetListLength(lBody) ; x++) {
llOwnerSay("llList2String(lBody, "+(string)x+")="+llList2String(lBody, x));
}
}
}
}
Output:
Status=200 llList2String(lMetaData, 0)=0 llList2String(lMetaData, 1)=2048 llList2String(lBody, 0)=<!doctype html> llList2String(lBody, 1)=<!-- Server: sfs-consume-7 --> llList2String(lBody, 2)=<!--[if lt IE 7 ]> <html lang="en" class="no-js ie6" > <![endif]--> llList2String(lBody, 3)=<!--[if IE 7 ]> <html lang="en" class="no-js ie7" > <![endif]--> llList2String(lBody, 4)=<!--[if IE 8 ]> <html lang="en" class="no-js ie8" > <![endif]--> llList2String(lBody, 5)=<!--[if IE 9 ]> <html lang="en" class="no-js ie9" > <![endif]--> llList2String(lBody, 6)=<!--[if (gt IE 9)|!(IE)]>--> <html lang="en" class="no-js"> <!--<![endif]--> llList2String(lBody, 7)= <head> llList2String(lBody, 8)= <meta charset="utf-8"> llList2String(lBody, 9)= llList2String(lBody, 10)= <meta id="webtracker" name="webtracker" content='{"event_id": "ea71f064-ca28-11e1-98cc-0019b9f0e8fc"}'> llList2String(lBody, 11)= <meta name="description" content="Free, secure and fast downloads from the largest Open Source applications and software directory - SourceForge.net"> llList2String(lBody, 12)= <meta name="keywords" content="Open Source, Open Source Software, Development, Community, Source Code, Secure, Downloads, Free Software"> llList2String(lBody, 13)=<meta name="msvalidate.01" content="0279349BB9CF7ACA882F86F29C50D3EA" /> llList2String(lBody, 14)= <meta name="viewport" content="width=device-width, initial-scale=1.0"> llList2String(lBody, 15)= <title>SourceForge - Download, Develop and Publish Free Open Source Software</title> llList2String(lBody, 16)= <link rel="shortcut icon" href="http://a.fsdn.com/con/img/sftheme/favicon.ico"> ... ... ... ... ... ... ... ... ... ... ... ... ... ...
Lua
local request = require('http.request')
local headers, stream = request.new_from_uri("https://sourceforge.net/"):go()
local body = stream:get_body_as_string()
local status = headers:get(':status')
io.write(string.format('Status: %d\nBody: %s\n', status, body)
HTTPS requests can be also done with the much smaller libraries like LuaSec or lua-requests, but it currently don't support redirects, which is why I used lua-http in this example.
Maple
content := URL:-Get( "https://www.google.ca/" );
Mathematica / Wolfram Language
Straight forward "Import" task. More complicated secure web access can be done using J/Link; essentially a link to Java API.
content=Import["https://sourceforge.net", "HTML"]
MATLAB / Octave
s=urlread('https://sourceforge.net/')
Nemerle
This example is essentially identical to the HTTP task because the WebClient object can be used with http:, https:, ftp: and file: uri's.
using System;
using System.Console;
using System.Net;
using System.IO;
module HTTP
{
Main() : void
{
def wc = WebClient();
def myStream = wc.OpenRead(https://sourceforge.com);
def sr = StreamReader(myStream);
WriteLine(sr.ReadToEnd());
myStream.Close()
}
}
NewLISP
(! "curl https://sourceforge.net")
Nim
Compile with nim c -d:ssl httpsClient.nim
:
import httpclient
var client = newHttpClient()
echo client.getContent("https://sourceforge.net")
Objeck
use HTTP;
class HttpsTest {
function : Main(args : String[]) ~ Nil {
client := HttpsClient->New();
lines := client->Get("https://sourceforge.net");
each(i : lines) {
lines->Get(i)->As(String)->PrintLine();
};
}
}
Ol
(import (lib curl))
(define curl (make-curl))
(curl 'url "https://www.w3.org/")
(curl 'perform)
Pascal
Using fphttpclient
{$mode objfpc}{$H+}
uses fphttpclient;
var
s: string;
hc: tfphttpclient;
begin
hc := tfphttpclient.create(nil);
try
s := hc.get('https://www.example.com')
finally
hc.free
end;
writeln(s)
end.
PascalABC.NET
##
uses System.Net;
var wc := new WebClient();
var content := wc.DownloadString('https://example.com');
content.Println
Perl
use strict;
use LWP::UserAgent;
my $url = 'https://www.rosettacode.org';
my $response = LWP::UserAgent->new->get( $url );
$response->is_success or die "Failed to GET '$url': ", $response->status_line;
print $response->as_string;
Phix
Exactly the same as the HTTP#Phix task.
without js include builtins\libcurl.e curl_global_init() atom curl = curl_easy_init() curl_easy_setopt(curl, CURLOPT_URL, "https://sourceforge.net/") object res = curl_easy_perform_ex(curl) curl_easy_cleanup(curl) curl_global_cleanup() puts(1,res)
PHP
echo file_get_contents('https://sourceforge.net');
PicoLisp
PicoLisp has no functionality for communicating with a HTTPS server (only for the other direction), but it is easy to use an external tool
(in '(curl "https://sourceforge.net") # Open a pipe to 'curl'
(out NIL (echo)) ) # Echo to standard output
Pike
int main() {
write("%s\n", Protocols.HTTP.get_url_data("https://sourceforge.net"));
}
PowerShell
Invoke-WebRequest 'https://www.rosettacode.org'
$wc = New-Object Net.WebClient
$wc.DownloadString('https://sourceforge.net')
If the certificate could not be validated (untrusted, self-signed, expired), then an Exception is thrown with the message “The underlying connection was closed: Could not establish trust relationship for the SSL/TLS secure channel.” so certificate validation is done automatically by the method.
Python
Python's urllib.request library, has support for SSL if the interpreter's underlying httplib libraries were compiled with SSL support. By default this will be the enabled for default Python installations on most platforms.
import urllib.request
print(urllib.request.urlopen("https://sourceforge.net/").read())
R
The basic idea is to use getURL (as with HTTP_Request), but with some extra parameters.
library(RCurl)
webpage <- getURL("https://sourceforge.net/", .opts=list(followlocation=TRUE, ssl.verifyhost=FALSE, ssl.verifypeer=FALSE))
In this case, the webpage output contains unprocessed characters, e.g. \" instead of " and \\ instead of \, so we need to process the markup.
wp <- readLines(tc <- textConnection(webpage))
close(tc)
Finally, we parse the HTML and find the interesting bit.
pagetree <- htmlTreeParse(wp)
pagetree$children$html
Racket
#lang racket
(require net/url)
(copy-port (get-pure-port (string->url "https://www.google.com")
#:redirections 100)
(current-output-port))
Raku
(formerly Perl 6)
There are several modules that provide HTTPS capability. WWW and HTTP::UserAgent are probably the most popular right now, but others exist.
use WWW;
say get 'https://sourceforge.net/';
or
use HTTP::UserAgent;
say HTTP::UserAgent.new.get('https://sourceforge.net/').content;
REALbasic
REALBasic provides an HTTPSecureSocket class for handling HTTPS connections. The 'Get' method of the HTTPSecureSocket is overloaded and can download data to a file or return data as a string, in both cases an optional timeout argument can be passed.
Dim sock As New HTTPSecureSocket
Print(sock.Get("https://sourceforge.net", 10)) //set the timeout period to 10 seconds.
Ring
cStr= download("http://sourceforge.net/")
see cStr + nl
RLaB
See HTTP#RLaB
Ruby
This solution doesn't use the open-uri
convenience package that the HTTP Request#Ruby solution uses: the Net::HTTP
object must be told to use SSL before the session is started.
require 'net/https'
require 'uri'
require 'pp'
uri = URI.parse('https://sourceforge.net')
http = Net::HTTP.new(uri.host,uri.port)
http.use_ssl = true
http.verify_mode = OpenSSL::SSL::VERIFY_NONE
http.start do
content = http.get(uri)
p [content.code, content.message]
pp content.to_hash
puts content.body
end
outputs
["302", "Found"] {"location"=>["http://sourceforge.net/"], "content-type"=>["text/html; charset=UTF-8"], "connection"=>["close"], "server"=>["nginx/0.7.60"], "date"=>["Sun, 30 Aug 2009 20:20:07 GMT"], "content-length"=>["229"], "set-cookie"=> ["sf.consume=89f65c6fadd222338b2f3de6f8e8a17b2c8f67c2gAJ9cQEoVQhfZXhwaXJlc3ECY2RhdGV0aW1lCmRhdGV0aW1lCnEDVQoH9gETAw4HAAAAhVJxBFUDX2lkcQVVIDEyOWI2MmVkOWMwMWYxYWZiYzE5Y2JhYzcwZDMxYTE4cQZVDl9hY2Nlc3NlZF90aW1lcQdHQdKmt73UN21VDl9jcmVhdGlvbl90aW1lcQhHQdKmt73UN2V1Lg==; expires=Tue, 19-Jan-2038 03:14:07 GMT; Path=/"]} <html> <head> <title>302 Found</title> </head> <body> <h1>302 Found</h1> The resource was found at <a href="http://sourceforge.net/">http://sourceforge.net/</a>; you should be redirected automatically. </body> </html>
Rust
extern crate reqwest;
fn main() {
let response = match reqwest::blocking::get("https://sourceforge.net") {
Ok(response) => response,
Err(e) => panic!("error encountered while making request: {:?}", e),
};
println!("{}", response.text().unwrap());
}
- Output:
<!-- Server: sfs-consume-7 --> <html class="no-js" lang="en"> <head> <meta http-equiv="Content-Type" content="text/html; charset=utf-8" /> <meta name="viewport" content="width=device-width, initial-scale=1.0, user-scalable=no" /> <script> var __gdpr = true; var __ccpa = false; ...
Scala
import scala.io.Source
object HttpsTest extends App {
System.setProperty("http.agent", "*")
Source.fromURL("https://sourceforge.net").getLines.foreach(println)
}
Seed7
The library gethttps.s7i defines the function getHttps which uses the HTTPS protocol go get a file.
$ include "seed7_05.s7i";
include "gethttps.s7i";
include "utf8.s7i";
const proc: main is func
begin
writeln(STD_UTF8_OUT, getHttps("sourceforge.net"));
end func;
Sidef
require('LWP::UserAgent')
require('LWP::Protocol::https')
func get(url) {
static ua = %O<LWP::UserAgent>.new(
agent => 'Mozilla/5.0',
ssl_opts => Hash(verify_hostname => 1),
)
var resp = ua.get(url)
if (resp.is_success) {
return resp.decoded_content
}
die "Failed to GET #{url}: #{resp.status_line}"
}
say get("https://rosettacode.org")
Swift
import Foundation
// With https
let request = NSURLRequest(URL: NSURL(string: "https://sourceforge.net")!)
NSURLConnection.sendAsynchronousRequest(request, queue: NSOperationQueue()) {res, data, err in // callback
// data is binary
if (data != nil) {
let string = NSString(data: data!, encoding: NSUTF8StringEncoding)
println(string)
}
}
CFRunLoopRun() // dispatch
TAV
The preliminary function from the standard library actually pipes a shell invocation of 'curl', thus it might not work in the playground, as shell invocation is there inhibited.
main (parms):+
url =: 'https://www.rosettacode.org'
?# lne =: network url url give lines
print lne
- Output:
html> <head><title>301 Moved Permanently</title></head> <body> <center><h1>301 Moved Permanently</h1></center> <hr><center>nginx</center> </body> </html>
Tcl
Though Tcl's built-in http
package does not understand SSL, it does support the registration of external handlers to accommodate additional protocols. This allows the use of the Tls package to supply the missing functionality with only a single line to complete the registration.
package require http
package require tls
# Tell the http package what to do with “https:” URLs.
#
# First argument is the protocol name, second the default port, and
# third the connection builder command
http::register "https" 443 ::tls::socket
# Make a secure connection, which is almost identical to normal
# connections except for the different protocol in the URL.
set token [http::geturl "https://sourceforge.net/"]
# Now as for conventional use of the “http” package
puts [http::data $token]
http::cleanup $token
TUSCRIPT
$$ MODE TUSCRIPT
SET DATEN = REQUEST ("https://sourceforge.net")
*{daten}
UNIX Shell
curl -k -s -L https://sourceforge.net/
VBScript
Based on code at How to retrieve HTML web pages with VBScript via the Microsoft.XmlHttp object
Option Explicit
Const sURL="https://sourceforge.net/"
Dim oHTTP
Set oHTTP = CreateObject("Microsoft.XmlHTTP")
On Error Resume Next
oHTTP.Open "GET", sURL, False
oHTTP.Send ""
If Err.Number = 0 Then
WScript.Echo oHTTP.responseText
Else
Wscript.Echo "error " & Err.Number & ": " & Err.Description
End If
Set oHTTP = Nothing
Visual Basic
Sub Main()
Dim HttpReq As WinHttp.WinHttpRequest
' in the "references" dialog of the IDE, check
' "Microsoft WinHTTP Services, version 5.1" (winhttp.dll)
Const HTTPREQUEST_PROXYSETTING_PROXY As Long = 2
Const WINHTTP_FLAG_SECURE_PROTOCOL_TLS1 As Long = &H80&
Const WINHTTP_FLAG_SECURE_PROTOCOL_TLS1_1 As Long = &H200&
Const WINHTTP_FLAG_SECURE_PROTOCOL_TLS1_2 As Long = &H800&
#Const USE_PROXY = 1
Set HttpReq = New WinHttp.WinHttpRequest
HttpReq.Open "GET", "https://groups.google.com/robots.txt"
HttpReq.Option(WinHttpRequestOption_SecureProtocols) = WINHTTP_FLAG_SECURE_PROTOCOL_TLS1 Or _
WINHTTP_FLAG_SECURE_PROTOCOL_TLS1_1 Or _
WINHTTP_FLAG_SECURE_PROTOCOL_TLS1_2
#If USE_PROXY Then
HttpReq.SetProxy HTTPREQUEST_PROXYSETTING_PROXY, "my_proxy:80"
#End If
HttpReq.SetTimeouts 1000, 1000, 1000, 1000
HttpReq.Send
Debug.Print HttpReq.ResponseText
End Sub
Visual Basic .NET
Imports System.Net
Dim client As WebClient = New WebClient()
Dim content As String = client.DownloadString("https://sourceforge.net")
Console.WriteLine(content)
Wren
An embedded program so we can ask the C host to communicate with libcurl for us.
/* HTTPS.wren */
var CURLOPT_URL = 10002
var CURLOPT_FOLLOWLOCATION = 52
var CURLOPT_ERRORBUFFER = 10010
foreign class Curl {
construct easyInit() {}
foreign easySetOpt(opt, param)
foreign easyPerform()
foreign easyCleanup()
}
var curl = Curl.easyInit()
if (curl == 0) {
System.print("Error initializing cURL.")
return
}
curl.easySetOpt(CURLOPT_URL, "https://www.w3.org/")
curl.easySetOpt(CURLOPT_FOLLOWLOCATION, 1)
curl.easySetOpt(CURLOPT_ERRORBUFFER, 0) // buffer to be supplied by C
var status = curl.easyPerform()
if (status != 0) {
System.print("Failed to perform task.")
return
}
curl.easyCleanup()
We now embed this in the following C program, compile and run it.
/* gcc HTTPS.c -o HTTPS -lcurl -lwren -lm */
#include <stdio.h>
#include <stdlib.h>
#include <string.h>
#include <curl/curl.h>
#include "wren.h"
/* C <=> Wren interface functions */
void C_curlAllocate(WrenVM* vm) {
CURL** pcurl = (CURL**)wrenSetSlotNewForeign(vm, 0, 0, sizeof(CURL*));
*pcurl = curl_easy_init();
}
void C_easyPerform(WrenVM* vm) {
CURL* curl = *(CURL**)wrenGetSlotForeign(vm, 0);
CURLcode cc = curl_easy_perform(curl);
wrenSetSlotDouble(vm, 0, (double)cc);
}
void C_easyCleanup(WrenVM* vm) {
CURL* curl = *(CURL**)wrenGetSlotForeign(vm, 0);
curl_easy_cleanup(curl);
}
void C_easySetOpt(WrenVM* vm) {
CURL* curl = *(CURL**)wrenGetSlotForeign(vm, 0);
CURLoption opt = (CURLoption)wrenGetSlotDouble(vm, 1);
if (opt < 10000) {
long lparam = (long)wrenGetSlotDouble(vm, 2);
curl_easy_setopt(curl, opt, lparam);
} else {
if (opt == CURLOPT_URL) {
const char *url = wrenGetSlotString(vm, 2);
curl_easy_setopt(curl, opt, url);
} else if (opt == CURLOPT_ERRORBUFFER) {
char buffer[CURL_ERROR_SIZE];
curl_easy_setopt(curl, opt, buffer);
}
}
}
WrenForeignClassMethods bindForeignClass(WrenVM* vm, const char* module, const char* className) {
WrenForeignClassMethods methods;
methods.allocate = NULL;
methods.finalize = NULL;
if (strcmp(module, "main") == 0) {
if (strcmp(className, "Curl") == 0) {
methods.allocate = C_curlAllocate;
}
}
return methods;
}
WrenForeignMethodFn bindForeignMethod(
WrenVM* vm,
const char* module,
const char* className,
bool isStatic,
const char* signature) {
if (strcmp(module, "main") == 0) {
if (strcmp(className, "Curl") == 0) {
if (!isStatic && strcmp(signature, "easySetOpt(_,_)") == 0) return C_easySetOpt;
if (!isStatic && strcmp(signature, "easyPerform()") == 0) return C_easyPerform;
if (!isStatic && strcmp(signature, "easyCleanup()") == 0) return C_easyCleanup;
}
}
return NULL;
}
static void writeFn(WrenVM* vm, const char* text) {
printf("%s", text);
}
void errorFn(WrenVM* vm, WrenErrorType errorType, const char* module, const int line, const char* msg) {
switch (errorType) {
case WREN_ERROR_COMPILE:
printf("[%s line %d] [Error] %s\n", module, line, msg);
break;
case WREN_ERROR_STACK_TRACE:
printf("[%s line %d] in %s\n", module, line, msg);
break;
case WREN_ERROR_RUNTIME:
printf("[Runtime Error] %s\n", msg);
break;
}
}
char *readFile(const char *fileName) {
FILE *f = fopen(fileName, "r");
fseek(f, 0, SEEK_END);
long fsize = ftell(f);
rewind(f);
char *script = malloc(fsize + 1);
fread(script, 1, fsize, f);
fclose(f);
script[fsize] = 0;
return script;
}
int main(int argc, char **argv) {
WrenConfiguration config;
wrenInitConfiguration(&config);
config.writeFn = &writeFn;
config.errorFn = &errorFn;
config.bindForeignClassFn = &bindForeignClass;
config.bindForeignMethodFn = &bindForeignMethod;
WrenVM* vm = wrenNewVM(&config);
const char* module = "main";
const char* fileName = "HTTPS.wren";
char *script = readFile(fileName);
WrenInterpretResult result = wrenInterpret(vm, module, script);
switch (result) {
case WREN_RESULT_COMPILE_ERROR:
printf("Compile Error!\n");
break;
case WREN_RESULT_RUNTIME_ERROR:
printf("Runtime Error!\n");
break;
case WREN_RESULT_SUCCESS:
break;
}
wrenFreeVM(vm);
free(script);
return 0;
}
zkl
Using the cURL library to do the heavy lifting:
zkl: var ZC=Import("zklCurl")
zkl: var data=ZC().get("https://sourceforge.net")
L(Data(36,265),826,0)
get returns the text of the response along with two counts: the bytes of header in front of the html code and the byte count of stuff after the end of the page. So, if you wanted to look at the header:
zkl: data[0][0,data[1]).text HTTP/1.1 200 OK Server: nginx Date: Sun, 23 Mar 2014 07:36:51 GMT Content-Type: text/html; charset=utf-8 Connection: close ...
or some of the html:
zkl: data[0][data[1],200).text <!doctype html> <!-- Server: sfs-consume-8 --> <!--[if lt IE 7 ]> <html lang="en" class="no-js ie6"> <![endif]--> <!--[if IE 7 ]> <html lang="en" class="no-js ie7"> <![endif]--> <!--[if IE 8 ]>