Do not print URLs to stdout:

* src/UrlRequestInfo.cc
pull/1/head
Tatsuhiro Tsujikawa 2007-03-24 15:15:51 +00:00
parent a37aaa9c0c
commit f9ac6bb6fd
3 changed files with 97 additions and 94 deletions

View File

@ -25,6 +25,9 @@
Throw exception if --check-integrity=true is specified but chunk
checksums are not provided:
* src/UrlRequestInfo.cc
Do not print URLs to stdout:
* src/UrlRequestInfo.cc
2007-03-21 Tatsuhiro Tsujikawa <tujikawa at rednoah dot com>

6
TODO
View File

@ -25,8 +25,6 @@
* Add an ability of seeding
* Rewrite HttpConnection::receiveResponse() using {i,o}stringstream
* -c command line option to continue the download of existing file assuming
that it was downloaded from the beginning.
* Continue file allocation with existing file
* Add usage message for -c command line option
* Netrc, mode 600, enabled in ftp, http, all
* preallocate file in MultiDiskAdaptor
* preallocate file in MultiDiskAdaptor

View File

@ -122,7 +122,7 @@ public:
void UrlRequestInfo::printUrls(const Strings& urls) const {
for(Strings::const_iterator itr = urls.begin(); itr != urls.end(); itr++) {
logger->notice("Adding URL: %s", itr->c_str());
logger->info("Adding URL: %s", itr->c_str());
}
}
@ -142,15 +142,12 @@ HeadResultHandle UrlRequestInfo::getHeadResult() {
SharedHandle<ConsoleDownloadEngine> e(DownloadEngineFactory::newConsoleEngine(op, requests, reserved));
HeadResultHandle hr = 0;
try {
e->run();
hr = new HeadResult();
hr->filename = e->segmentMan->filename;
hr->totalLength = e->segmentMan->totalSize;
} catch(RecoverableException *ex) {
logger->error("Exception caught", ex);
delete ex;
}
e->run();
hr = new HeadResult();
hr->filename = e->segmentMan->filename;
hr->totalLength = e->segmentMan->totalSize;
return hr;
}
@ -160,104 +157,109 @@ RequestInfos UrlRequestInfo::execute() {
Requests reserved;
printUrls(urls);
HeadResultHandle hr = getHeadResult();
for_each(urls.begin(), urls.end(),
CreateRequest(&requests,
op->get(PREF_REFERER),
op->getAsInt(PREF_SPLIT)));
logger->info("Head result: filename=%s, total length=%s",
hr->filename.c_str(), Util::ullitos(hr->totalLength, true).c_str());
adjustRequestSize(requests, reserved, maxConnections);
SharedHandle<ConsoleDownloadEngine> e(DownloadEngineFactory::newConsoleEngine(op, requests, reserved));
e->segmentMan->filename = hr->filename;
e->segmentMan->totalSize = hr->totalLength;
if(hr->totalLength > 0) {
e->segmentMan->downloadStarted = true;
}
#ifdef ENABLE_MESSAGE_DIGEST
if(chunkChecksumLength > 0) {
e->segmentMan->digestAlgo = digestAlgo;
e->segmentMan->chunkHashLength = chunkChecksumLength;
e->segmentMan->pieceHashes = chunkChecksums;
}
#endif // ENABLE_MESSAGE_DIGEST
if(op->get(PREF_CONTINUE) == V_TRUE && e->segmentMan->fileExists()) {
if(e->segmentMan->totalSize == 0) {
logger->notice("Cannot get file length. Download aborted.");
RequestInfo* next = 0;
try {
HeadResultHandle hr = getHeadResult();
if(hr.isNull()) {
logger->notice("No URI to download. Download aborted.");
return RequestInfos();
}
File existingFile(e->segmentMan->getFilePath());
if(e->segmentMan->totalSize < existingFile.size()) {
logger->notice("The local file length is larger than the remote file size. Download aborted.");
return RequestInfos();
logger->info("Head result: filename=%s, total length=%s",
hr->filename.c_str(), Util::ullitos(hr->totalLength, true).c_str());
for_each(urls.begin(), urls.end(),
CreateRequest(&requests,
op->get(PREF_REFERER),
op->getAsInt(PREF_SPLIT)));
adjustRequestSize(requests, reserved, maxConnections);
SharedHandle<ConsoleDownloadEngine> e(DownloadEngineFactory::newConsoleEngine(op, requests, reserved));
e->segmentMan->filename = hr->filename;
e->segmentMan->totalSize = hr->totalLength;
if(hr->totalLength > 0) {
e->segmentMan->downloadStarted = true;
}
e->segmentMan->initBitfield(e->option->getAsInt(PREF_SEGMENT_SIZE),
e->segmentMan->totalSize);
e->segmentMan->diskWriter->openExistingFile(e->segmentMan->getFilePath(),
e->segmentMan->totalSize);
if(e->option->get(PREF_CHECK_INTEGRITY) == V_TRUE) {
#ifdef ENABLE_MESSAGE_DIGEST
if(!e->segmentMan->isChunkChecksumValidationReady()) {
throw new DlAbortEx("Chunk checksums are not provided.");
}
e->segmentMan->markAllPiecesDone();
e->segmentMan->checkIntegrity();
#endif // ENABLE_MESSAGE_DIGEST
} else {
e->segmentMan->markPieceDone(existingFile.size());
}
} else if(e->segmentMan->segmentFileExists()) {
e->segmentMan->load();
e->segmentMan->diskWriter->openExistingFile(e->segmentMan->getFilePath(),
e->segmentMan->totalSize);
#ifdef ENABLE_MESSAGE_DIGEST
if(op->get(PREF_CHECK_INTEGRITY) == V_TRUE) {
if(!e->segmentMan->isChunkChecksumValidationReady()) {
throw new DlAbortEx("Chunk checksums are not provided.");
}
e->segmentMan->checkIntegrity();
if(chunkChecksumLength > 0) {
e->segmentMan->digestAlgo = digestAlgo;
e->segmentMan->chunkHashLength = chunkChecksumLength;
e->segmentMan->pieceHashes = chunkChecksums;
}
#endif // ENABLE_MESSAGE_DIGEST
} else {
if(e->segmentMan->shouldCancelDownloadForSafety()) {
throw new FatalException(EX_FILE_ALREADY_EXISTS,
e->segmentMan->getFilePath().c_str(),
e->segmentMan->getSegmentFilePath().c_str());
}
if(e->segmentMan->totalSize > 0) {
if(op->get(PREF_CONTINUE) == V_TRUE && e->segmentMan->fileExists()) {
if(e->segmentMan->totalSize == 0) {
logger->notice("Cannot get file length. Download aborted.");
return RequestInfos();
}
File existingFile(e->segmentMan->getFilePath());
if(e->segmentMan->totalSize < existingFile.size()) {
logger->notice("The local file length is larger than the remote file size. Download aborted.");
return RequestInfos();
}
e->segmentMan->initBitfield(e->option->getAsInt(PREF_SEGMENT_SIZE),
e->segmentMan->totalSize);
if(e->segmentMan->fileExists() && e->option->get(PREF_CHECK_INTEGRITY) == V_TRUE) {
e->segmentMan->diskWriter->openExistingFile(e->segmentMan->getFilePath(),
e->segmentMan->totalSize);
if(e->option->get(PREF_CHECK_INTEGRITY) == V_TRUE) {
#ifdef ENABLE_MESSAGE_DIGEST
if(!e->segmentMan->isChunkChecksumValidationReady()) {
throw new DlAbortEx("Chunk checksums are not provided.");
}
#endif // ENABLE_MESSAGE_DIGEST
e->segmentMan->diskWriter->openExistingFile(e->segmentMan->getFilePath(),
e->segmentMan->totalSize);
#ifdef ENABLE_MESSAGE_DIGEST
e->segmentMan->markAllPiecesDone();
e->segmentMan->checkIntegrity();
#endif // ENABLE_MESSAGE_DIGEST
} else {
e->segmentMan->diskWriter->initAndOpenFile(e->segmentMan->getFilePath(),
e->segmentMan->totalSize);
e->segmentMan->markPieceDone(existingFile.size());
}
} else if(e->segmentMan->segmentFileExists()) {
e->segmentMan->load();
e->segmentMan->diskWriter->openExistingFile(e->segmentMan->getFilePath(),
e->segmentMan->totalSize);
#ifdef ENABLE_MESSAGE_DIGEST
if(op->get(PREF_CHECK_INTEGRITY) == V_TRUE) {
if(!e->segmentMan->isChunkChecksumValidationReady()) {
throw new DlAbortEx("Chunk checksums are not provided.");
}
e->segmentMan->checkIntegrity();
}
#endif // ENABLE_MESSAGE_DIGEST
} else {
if(e->segmentMan->shouldCancelDownloadForSafety()) {
throw new FatalException(EX_FILE_ALREADY_EXISTS,
e->segmentMan->getFilePath().c_str(),
e->segmentMan->getSegmentFilePath().c_str());
}
if(e->segmentMan->totalSize > 0) {
e->segmentMan->initBitfield(e->option->getAsInt(PREF_SEGMENT_SIZE),
e->segmentMan->totalSize);
if(e->segmentMan->fileExists() && e->option->get(PREF_CHECK_INTEGRITY) == V_TRUE) {
#ifdef ENABLE_MESSAGE_DIGEST
if(!e->segmentMan->isChunkChecksumValidationReady()) {
throw new DlAbortEx("Chunk checksums are not provided.");
}
#endif // ENABLE_MESSAGE_DIGEST
e->segmentMan->diskWriter->openExistingFile(e->segmentMan->getFilePath(),
e->segmentMan->totalSize);
#ifdef ENABLE_MESSAGE_DIGEST
e->segmentMan->markAllPiecesDone();
e->segmentMan->checkIntegrity();
#endif // ENABLE_MESSAGE_DIGEST
} else {
e->segmentMan->diskWriter->initAndOpenFile(e->segmentMan->getFilePath(),
e->segmentMan->totalSize);
}
}
}
}
Util::setGlobalSignalHandler(SIGINT, handler, 0);
Util::setGlobalSignalHandler(SIGTERM, handler, 0);
RequestInfo* next = 0;
try {
Util::setGlobalSignalHandler(SIGINT, handler, 0);
Util::setGlobalSignalHandler(SIGTERM, handler, 0);
e->run();
if(e->segmentMan->finished()) {