Purpose: write Java program that downloads a list of URLs specified on commandline in parallel (simultaneously), reporting download completion every second.
My solution follows below, please point out areas for improvement / things you would redesign.
package org.test.mk.pjwget;
import java.io.*;
import java.net.URL;
import java.net.URLConnection;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Date;
import java.util.HashMap;
import org.apache.commons.io.FilenameUtils;
import org.apache.commons.io.IOUtils;
import org.apache.commons.io.output.CountingOutputStream;
class Downloader implements Runnable {
private Thread pth;
private String durl;
private long contlen;
private long copied = 0;
private InputStream inp;
public String getFilepath() {
return filepath;
}
private String filepath;
private CountingOutputStream cos;
public long getContlen() {
return contlen;
}
public boolean isAlive() {
return pth.isAlive();
}
public long getCopied() {
if(copied > -1)
{
copied = cos.getByteCount();
}
return copied;
}
public void join() throws InterruptedException {
pth.join();
}
Downloader (String url, String filepath) throws IOException {
durl = url;
get_http_response(url);
this.filepath = filepath;
cos = new CountingOutputStream(new FileOutputStream(filepath));
pth = new Thread(this, url);
pth.start();
}
private void get_http_response(String url) throws IOException {
URL u = new URL(url);
URLConnection conn = u.openConnection();
contlen = conn.getContentLengthLong();
inp = conn.getInputStream();
}
public void run() {
try {
IOUtils.copy(inp, cos);
cos.flush();
} catch (IOException e) {
copied = -1;
try {
pth.join();
} catch (InterruptedException e1) {
e1.printStackTrace();
}
}
}
}
class DownloadWatch implements Runnable {
Thread pth;
HashMap<String, Downloader> ths;
ArrayList<String> urls;
DownloadWatch(HashMap<String, Downloader> dmap) {
ths = dmap;
urls = new ArrayList<String>(dmap.keySet());
pth = new Thread(this, String.format("downloadWatch_%s", dmap.toString()));
pth.start();
}
public void run() {
boolean anyactive = true;
printHeader(ths);
// monotonic interval clock
long now = System.nanoTime();
for(;;) {
try {
Thread.sleep(101);
} catch (InterruptedException e) {
return;
}
long t = System.nanoTime();
if (t - now < 900000000L)
continue;
now = System.nanoTime() - (t-now) + 1000000000L;
printCompleted(ths);
boolean allfinished = true;
for(Downloader d : ths.values()) {
if(d.isAlive()) {
allfinished = false;
break;
}
}
if(allfinished)
return;
}
}
private void printCompleted(HashMap<String, Downloader> ths) {
System.out.printf(new SimpleDateFormat("HH:mm:ss ").format(new Date()));
for(String url : urls) {
Downloader d = ths.get(url);
long contlen = d.getContlen();
long copied = d.getCopied();
if (contlen == -1) {
System.out.printf("? ");
}
else
{
if(copied == -1) {
System.out.printf("E ");
}
else
{
double r = new Long(copied).doubleValue() / new Long(contlen).doubleValue();
System.out.printf("%.0f%% ", r*100);
}
}
}
System.out.println();
}
private void printHeader(HashMap<String, Downloader> ths) {
for(String url : urls) {
System.out.printf("%s ", FilenameUtils.getName(ths.get(url).getFilepath()));
}
System.out.println();
}
public void join() throws InterruptedException {
pth.join();
}
}
public class ParallelWget {
String[] urls = null;
String dirname;
ParallelWget(String dirname, String[] urllist) {
this.urls = urllist;
this.dirname = dirname;
}
private HashMap<String,String> getFilePaths(String dirname, String[] urllist) {
int cnt = 1;
HashMap<String, String> fpaths = new HashMap<String, String>();
for (String url : urllist) {
String name = FilenameUtils.getName(url);
if(!name.equals("")) {
String fpt = new File(dirname, name).getAbsolutePath();
if(fpaths.values().contains(fpt)) {
fpt = String.format("%s_%d", fpt, cnt++);
}
fpaths.put(url, fpt);
}
}
return fpaths;
}
public static void main(String[] argv) {
String cwd = System.getProperty("user.dir");
ParallelWget pw = new ParallelWget(cwd, argv);
pw.downloadWatch();
}
private void downloadWatch() {
HashMap<String, String> urlfiles = getFilePaths(dirname, this.urls);
HashMap<String, Downloader> ths = makeThreads(urlfiles);
DownloadWatch dw = new DownloadWatch(ths);
for(Downloader th : ths.values()) {
try {
th.join();
} catch (InterruptedException e) {
e.printStackTrace();
}
}
try {
dw.join();
} catch (InterruptedException e) {
e.printStackTrace();
}
printFooter(urlfiles);
}
private void printFooter(HashMap<String, String> urlfiles) {
for(String url : urlfiles.keySet()){
System.out.printf("- ");
}
System.out.println();
}
private HashMap<String, Downloader> makeThreads(HashMap<String, String> urlfiles) {
HashMap<String, Downloader> ths = new HashMap<String, Downloader>();
for(String url : urlfiles.keySet()) {
String p = urlfiles.get(url);
Downloader d;
try {
d = new Downloader(url, p);
} catch (IOException e) {
System.out.printf("Problem with URL %s : %s. Skipping URL.\n", url, e.getMessage());
continue;
}
ths.put(url, d);
}
return ths;
}
}