File size distribution: Difference between revisions
Content added Content deleted
(Added Go) |
|||
Line 467: | Line 467: | ||
135229 total files.</pre> |
135229 total files.</pre> |
||
=={{header|Phix}}== |
|||
Works on Windows and Linux. Uses "proper" sizes, ie 1MB==1024KB. Can be quite slow at first, but is pretty fast on the second and subsequent runs, that is once the OS has cached its (low-level) directory reads. |
|||
<lang Phix>sequence sizes = {1}, |
|||
res = {0} |
|||
atom t1 = time()+1 |
|||
function store_res(string filepath, sequence dir_entry) |
|||
if not find('d', dir_entry[D_ATTRIBUTES]) then |
|||
atom size = dir_entry[D_SIZE] |
|||
integer sdx = 1 |
|||
while size>sizes[sdx] do |
|||
if sdx=length(sizes) then |
|||
sizes &= sizes[$]*iff(mod(length(sizes),3)?10:10.24) |
|||
res &= 0 |
|||
end if |
|||
sdx += 1 |
|||
end while |
|||
res[sdx] += 1 |
|||
if time()>t1 then |
|||
printf(1,"%,d files found\r",sum(res)) |
|||
t1 = time()+1 |
|||
end if |
|||
end if |
|||
return 0 -- keep going |
|||
end function |
|||
integer exit_code = walk_dir(".", routine_id("store_res"), true) |
|||
printf(1,"%,d files found\n",sum(res)) |
|||
integer w = max(res) |
|||
include builtins/pfile.e |
|||
for i=1 to length(res) do |
|||
integer ri = res[i] |
|||
string s = file_size_k(sizes[i], 5), |
|||
p = repeat('*',floor(60*ri/w)) |
|||
printf(1,"files < %s: %s%,d\n",{s,p,ri}) |
|||
end for</lang> |
|||
{{out}} |
|||
<pre> |
|||
112,160 files found |
|||
files < 1: 333 |
|||
files < 10: *911 |
|||
files < 100: ******4,731 |
|||
files < 1KB: ********************************24,332 |
|||
files < 10KB: ************************************************************45,379 |
|||
files < 100KB: *********************************25,299 |
|||
files < 1MB: *************10,141 |
|||
files < 10MB: *933 |
|||
files < 100MB: 91 |
|||
files < 1GB: 8 |
|||
files < 10GB: 2 |
|||
</pre> |
|||
=={{header|Python}}== |
=={{header|Python}}== |