Fix TAR parsing by using a custom lib written based off of a public lib
This commit is contained in:
parent
8b3659fefa
commit
75ec80ee6c
3 changed files with 439 additions and 247 deletions
|
@ -245,16 +245,10 @@ quayApp = angular.module('quay', quayDependencies, function($provide, cfpLoading
|
|||
return;
|
||||
}
|
||||
|
||||
dataFileService.arrayToString(plain, function(result) {
|
||||
if (result) {
|
||||
dataFileService.tryAsTarGzWithStringData_(result, success, failure);
|
||||
} else {
|
||||
failure();
|
||||
}
|
||||
});
|
||||
dataFileService.tryAsTar_(plain, success, failure);
|
||||
};
|
||||
|
||||
dataFileService.tryAsTarGzWithStringData_ = function(strData, success, failure) {
|
||||
dataFileService.tryAsTar_ = function(buf, success, failure) {
|
||||
var collapsePath = function(originalPath) {
|
||||
// Tar files can contain entries of the form './', so we need to collapse
|
||||
// those paths down.
|
||||
|
@ -268,33 +262,36 @@ quayApp = angular.module('quay', quayDependencies, function($provide, cfpLoading
|
|||
return parts.join('/');
|
||||
};
|
||||
|
||||
var handler = new MultiFile();
|
||||
handler.files = [];
|
||||
handler.processTarChunks(strData, 0);
|
||||
if (!handler.files.length) {
|
||||
failure();
|
||||
return;
|
||||
}
|
||||
|
||||
var files = [];
|
||||
for (var i = 0; i < handler.files.length; ++i) {
|
||||
var currentFile = handler.files[i];
|
||||
var path = collapsePath(currentFile.filename);
|
||||
var handler = new Untar(buf);
|
||||
handler.process(function(status, read, files, err) {
|
||||
switch (status) {
|
||||
case 'error':
|
||||
failure(err);
|
||||
break;
|
||||
|
||||
if (path == '') { continue; }
|
||||
case 'done':
|
||||
var processed = [];
|
||||
for (var i = 0; i < files.length; ++i) {
|
||||
var currentFile = files[i];
|
||||
var path = collapsePath(currentFile.meta.filename);
|
||||
|
||||
files.push({
|
||||
'name': dataFileService.getName_(path),
|
||||
'path': path,
|
||||
'canRead': true,
|
||||
'toBlob': (function(currentFile) {
|
||||
return function() {
|
||||
return new Blob([currentFile.data], {type: 'application/octet-binary'});
|
||||
};
|
||||
}(currentFile))
|
||||
});
|
||||
}
|
||||
success(files);
|
||||
if (path == '' || path == 'pax_global_header') { continue; }
|
||||
|
||||
processed.push({
|
||||
'name': dataFileService.getName_(path),
|
||||
'path': path,
|
||||
'canRead': true,
|
||||
'toBlob': (function(currentFile) {
|
||||
return function() {
|
||||
return new Blob([currentFile.buffer], {type: 'application/octet-binary'});
|
||||
};
|
||||
}(currentFile))
|
||||
});
|
||||
}
|
||||
success(processed);
|
||||
break;
|
||||
}
|
||||
});
|
||||
};
|
||||
|
||||
dataFileService.blobToString = function(blob, callback) {
|
||||
|
|
Reference in a new issue