Jump to content

Eban numbers: Difference between revisions

m
Line 657:
{{trans|Julia}}
<lang haskell>{-# LANGUAGE NumericUnderscores #-}
import Data.List (find)
import Text.Printf (printf)
 
isEban :: Int -> Bool
Line 665 ⟶ 667:
(t, r3) = r2 `quotRem` (10 ^ 3)
z = b : map (\x -> if x >= 30 && x <= 66 then x `mod` 10 else x) [m, t, r3]
 
ebans :: [Int] -> [(Int, Int)]
ebans = go
where
ebanCount = zip [1..] (filter isEban [1..])
go [] = []
go (x:xs) = case find (\(_, e) -> x < e) ebanCount of
Just (c, e) -> (x, pred c) : go xs
Nothing -> go xs
 
main :: IO ()
main = do
putStrLnuncurry $(printf "eban numbers up to and including 1000: %2d\n%s\n\n") ++$ r [1..1000]
putStrLnuncurry $(printf "eban numbers between 1000 and 4000: %2d\n%s\n\n") ++$ r [1000..4000]
putStrLnmapM_ $(uncurry (printf "eban numbers up and including 10,000%10d: %4d\n")) ++ t [1..10_000]ebanCounts
putStrLn $ "eban numbers up and including 100,000: " ++ t [1..100_000]
putStrLn $ "eban numbers up and including 1,000,000: " ++ t [1..1_000_000]
putStrLn $ "eban numbers up and including 10,000,000: " ++ t [1..10_000_000]
where
ebanCounts = ebans [ 10_000
r = (\(c, xs) -> show c <> "\n" <> show xs <> "\n") . (\xs -> (length xs, xs)) . filter isEban
, 100_000
t = show . length . filter isEban</lang>
, 1_000_000
, 10_000_000
, 100_000_000
, 1_000_000_000 ]
tr = show((,) .<$> length <*> show) . filter isEban</lang>
{{out}}
<pre>
eban numbers up to and including 1000: 19
[2,4,6,30,32,34,36,40,42,44,46,50,52,54,56,60,62,64,66]
 
eban numbers between 1000 and 4000: 21
[2000,2002,2004,2006,2030,2032,2034,2036,2040,2042,2044,2046,2050,2052,2054,2056,2060,2062,2064,2066,4000]
 
eban numbers up and including 10000: 10,000: 79
eban numbers up and including 100000: 100,000: 399
eban numbers up and including 1000000: 1,000,000: 399
eban numbers up and including 10,000,00010000000: 1599
eban numbers up and including 100000000: 7999
eban numbers up and including 1000000000: 7999
</pre>
 
Anonymous user
Cookies help us deliver our services. By using our services, you agree to our use of cookies.