Fix TAR parsing by using a custom lib written based off of a public lib
This commit is contained in:
parent
8b3659fefa
commit
75ec80ee6c
3 changed files with 439 additions and 247 deletions
|
@ -245,16 +245,10 @@ quayApp = angular.module('quay', quayDependencies, function($provide, cfpLoading
|
|||
return;
|
||||
}
|
||||
|
||||
dataFileService.arrayToString(plain, function(result) {
|
||||
if (result) {
|
||||
dataFileService.tryAsTarGzWithStringData_(result, success, failure);
|
||||
} else {
|
||||
failure();
|
||||
}
|
||||
});
|
||||
dataFileService.tryAsTar_(plain, success, failure);
|
||||
};
|
||||
|
||||
dataFileService.tryAsTarGzWithStringData_ = function(strData, success, failure) {
|
||||
dataFileService.tryAsTar_ = function(buf, success, failure) {
|
||||
var collapsePath = function(originalPath) {
|
||||
// Tar files can contain entries of the form './', so we need to collapse
|
||||
// those paths down.
|
||||
|
@ -268,33 +262,36 @@ quayApp = angular.module('quay', quayDependencies, function($provide, cfpLoading
|
|||
return parts.join('/');
|
||||
};
|
||||
|
||||
var handler = new MultiFile();
|
||||
handler.files = [];
|
||||
handler.processTarChunks(strData, 0);
|
||||
if (!handler.files.length) {
|
||||
failure();
|
||||
return;
|
||||
}
|
||||
|
||||
var files = [];
|
||||
for (var i = 0; i < handler.files.length; ++i) {
|
||||
var currentFile = handler.files[i];
|
||||
var path = collapsePath(currentFile.filename);
|
||||
var handler = new Untar(buf);
|
||||
handler.process(function(status, read, files, err) {
|
||||
switch (status) {
|
||||
case 'error':
|
||||
failure(err);
|
||||
break;
|
||||
|
||||
if (path == '') { continue; }
|
||||
case 'done':
|
||||
var processed = [];
|
||||
for (var i = 0; i < files.length; ++i) {
|
||||
var currentFile = files[i];
|
||||
var path = collapsePath(currentFile.meta.filename);
|
||||
|
||||
files.push({
|
||||
'name': dataFileService.getName_(path),
|
||||
'path': path,
|
||||
'canRead': true,
|
||||
'toBlob': (function(currentFile) {
|
||||
return function() {
|
||||
return new Blob([currentFile.data], {type: 'application/octet-binary'});
|
||||
};
|
||||
}(currentFile))
|
||||
});
|
||||
}
|
||||
success(files);
|
||||
if (path == '' || path == 'pax_global_header') { continue; }
|
||||
|
||||
processed.push({
|
||||
'name': dataFileService.getName_(path),
|
||||
'path': path,
|
||||
'canRead': true,
|
||||
'toBlob': (function(currentFile) {
|
||||
return function() {
|
||||
return new Blob([currentFile.buffer], {type: 'application/octet-binary'});
|
||||
};
|
||||
}(currentFile))
|
||||
});
|
||||
}
|
||||
success(processed);
|
||||
break;
|
||||
}
|
||||
});
|
||||
};
|
||||
|
||||
dataFileService.blobToString = function(blob, callback) {
|
||||
|
|
409
static/js/untar.js
Normal file
409
static/js/untar.js
Normal file
|
@ -0,0 +1,409 @@
|
|||
/**
|
||||
* Untar library code based on the tar-async project (MIT License):
|
||||
* https://github.com/beatgammit/tar-async
|
||||
*/
|
||||
|
||||
// Production steps of ECMA-262, Edition 5, 15.4.4.18
|
||||
// Reference: http://es5.github.com/#x15.4.4.18
|
||||
if (!Array.prototype.forEach) {
|
||||
Array.prototype.forEach = function (callback, thisArg) {
|
||||
var T, k;
|
||||
|
||||
if (this == null) {
|
||||
throw new TypeError(" this is null or not defined");
|
||||
}
|
||||
|
||||
// 1. Let O be the result of calling ToObject passing the |this| value as the argument.
|
||||
var O = Object(this);
|
||||
|
||||
// 2. Let lenValue be the result of calling the Get internal method of O with the argument "length".
|
||||
// 3. Let len be ToUint32(lenValue).
|
||||
var len = O.length >>> 0;
|
||||
|
||||
// 4. If IsCallable(callback) is false, throw a TypeError exception.
|
||||
// See: http://es5.github.com/#x9.11
|
||||
if (typeof callback !== "function") {
|
||||
throw new TypeError(callback + " is not a function");
|
||||
}
|
||||
|
||||
// 5. If thisArg was supplied, let T be thisArg; else let T be undefined.
|
||||
if (arguments.length > 1) {
|
||||
T = thisArg;
|
||||
}
|
||||
|
||||
// 6. Let k be 0
|
||||
k = 0;
|
||||
|
||||
// 7. Repeat, while k < len
|
||||
while (k < len) {
|
||||
|
||||
var kValue;
|
||||
|
||||
// a. Let Pk be ToString(k).
|
||||
// This is implicit for LHS operands of the in operator
|
||||
// b. Let kPresent be the result of calling the HasProperty internal method of O with argument Pk.
|
||||
// This step can be combined with c
|
||||
// c. If kPresent is true, then
|
||||
if (k in O) {
|
||||
|
||||
// i. Let kValue be the result of calling the Get internal method of O with argument Pk.
|
||||
kValue = O[k];
|
||||
|
||||
// ii. Call the Call internal method of callback with T as the this value and
|
||||
// argument list containing kValue, k, and O.
|
||||
callback.call(T, kValue, k, O);
|
||||
}
|
||||
// d. Increase k by 1.
|
||||
k++;
|
||||
}
|
||||
// 8. return undefined
|
||||
};
|
||||
}
|
||||
|
||||
|
||||
// Polyfill: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Array/some
|
||||
if (!Array.prototype.some) {
|
||||
Array.prototype.some = function(fun /*, thisArg */)
|
||||
{
|
||||
'use strict';
|
||||
|
||||
if (this === void 0 || this === null)
|
||||
throw new TypeError();
|
||||
|
||||
var t = Object(this);
|
||||
var len = t.length >>> 0;
|
||||
if (typeof fun !== 'function')
|
||||
throw new TypeError();
|
||||
|
||||
var thisArg = arguments.length >= 2 ? arguments[1] : void 0;
|
||||
for (var i = 0; i < len; i++)
|
||||
{
|
||||
if (i in t && fun.call(thisArg, t[i], i, t))
|
||||
return true;
|
||||
}
|
||||
|
||||
return false;
|
||||
};
|
||||
}
|
||||
|
||||
|
||||
(function() {
|
||||
|
||||
function pad(num, bytes, base) {
|
||||
num = num.toString(base || 8);
|
||||
return "000000000000".substr(num.length + 12 - bytes) + num;
|
||||
}
|
||||
|
||||
/*
|
||||
struct posix_header { // byte offset
|
||||
char name[100]; // 0
|
||||
char mode[8]; // 100
|
||||
char uid[8]; // 108
|
||||
char gid[8]; // 116
|
||||
char size[12]; // 124
|
||||
char mtime[12]; // 136
|
||||
char chksum[8]; // 148
|
||||
char typeflag; // 156
|
||||
char linkname[100]; // 157
|
||||
char magic[6]; // 257
|
||||
char version[2]; // 263
|
||||
char uname[32]; // 265
|
||||
char gname[32]; // 297
|
||||
char devmajor[8]; // 329
|
||||
char devminor[8]; // 337
|
||||
char prefix[155]; // 345
|
||||
// 500
|
||||
};
|
||||
*/
|
||||
|
||||
var headerFormat = [
|
||||
{
|
||||
'field': 'filename',
|
||||
'length': 100,
|
||||
'type': 'string'
|
||||
},
|
||||
{
|
||||
'field': 'mode',
|
||||
'length': 8,
|
||||
'type': 'number'
|
||||
},
|
||||
{
|
||||
'field': 'uid',
|
||||
'length': 8,
|
||||
'type': 'number'
|
||||
},
|
||||
{
|
||||
'field': 'gid',
|
||||
'length': 8,
|
||||
'type': 'number'
|
||||
},
|
||||
{
|
||||
'field': 'size',
|
||||
'length': 12,
|
||||
'type': 'number'
|
||||
},
|
||||
{
|
||||
'field': 'mtime',
|
||||
'length': 12,
|
||||
'type': 'number'
|
||||
},
|
||||
{
|
||||
'field': 'checksum',
|
||||
'length': 8,
|
||||
'type': 'number'
|
||||
},
|
||||
{
|
||||
'field': 'type',
|
||||
'length': 1,
|
||||
'type': 'number'
|
||||
},
|
||||
{
|
||||
'field': 'linkName',
|
||||
'length': 100,
|
||||
'type': 'string'
|
||||
},
|
||||
{
|
||||
'field': 'ustar',
|
||||
'length': 8,
|
||||
'type': 'string'
|
||||
},
|
||||
{
|
||||
'field': 'owner',
|
||||
'length': 32,
|
||||
'type': 'string'
|
||||
},
|
||||
{
|
||||
'field': 'group',
|
||||
'length': 32,
|
||||
'type': 'string'
|
||||
},
|
||||
{
|
||||
'field': 'majorNumber',
|
||||
'length': 8,
|
||||
'type': 'number'
|
||||
},
|
||||
{
|
||||
'field': 'minorNumber',
|
||||
'length': 8,
|
||||
'type': 'number'
|
||||
},
|
||||
{
|
||||
'field': 'filenamePrefix',
|
||||
'length': 155,
|
||||
'type': 'string'
|
||||
},
|
||||
{
|
||||
'field': 'padding',
|
||||
'length': 12
|
||||
}
|
||||
];
|
||||
|
||||
function clean(length) {
|
||||
var i, buffer = new Buffer(length);
|
||||
for (i = 0; i < length; i += 1) {
|
||||
buffer[i] = 0;
|
||||
}
|
||||
return buffer;
|
||||
}
|
||||
|
||||
function formatHeader(data) {
|
||||
var buffer = [];
|
||||
offset = 0;
|
||||
|
||||
headerFormat.forEach(function (value) {
|
||||
var v = data[value.field] || "";
|
||||
for (var i = 0; i < v.length; ++i) {
|
||||
buffer[offset + i] = v[i];
|
||||
}
|
||||
offset += value.length;
|
||||
});
|
||||
|
||||
return buffer;
|
||||
}
|
||||
|
||||
var totalRead = 0,
|
||||
recordSize = 512,
|
||||
fileBuffer,
|
||||
leftToRead,
|
||||
fileTypes = [
|
||||
'normal', 'hard-link', 'symbolic-link', 'character-special', 'block-special', 'directory', 'fifo', 'contiguous-file'
|
||||
];
|
||||
|
||||
function filterDecoder(input) {
|
||||
var filter = [];
|
||||
if (!input) {
|
||||
return [0, 7];
|
||||
}
|
||||
|
||||
if (typeof input === 'string') {
|
||||
input = [].push(input);
|
||||
}
|
||||
|
||||
if (!(input instanceof Array)) {
|
||||
console.error('Invalid fileType. Only Arrays or strings are accepted');
|
||||
return;
|
||||
}
|
||||
|
||||
input.forEach(function (i) {
|
||||
var index = fileTypes.indexOf(i);
|
||||
if (index < 0) {
|
||||
console.error('Filetype not valid. Ignoring input:', i);
|
||||
return;
|
||||
}
|
||||
|
||||
filter.push(i);
|
||||
});
|
||||
|
||||
return filter;
|
||||
}
|
||||
|
||||
function readInt(value) {
|
||||
return parseInt(value.replace(/^0*/, ''), 8) || 0;
|
||||
}
|
||||
|
||||
function readString(buf) {
|
||||
var str = '';
|
||||
for (var i = 0; i < buf.length; ++i) {
|
||||
if (buf[i] == 0) { break; }
|
||||
str += String.fromCharCode(buf[i]);
|
||||
}
|
||||
return str;
|
||||
}
|
||||
|
||||
function doHeader(buf, cb) {
|
||||
var data = {}, offset = 0, checksum = 0;
|
||||
|
||||
function updateChecksum(value) {
|
||||
var i, length;
|
||||
|
||||
for (i = 0, length = value.length; i < length; i += 1) {
|
||||
checksum += value.charCodeAt(i);
|
||||
}
|
||||
}
|
||||
|
||||
headerFormat.some(function (field) {
|
||||
var tBuf = buf.subarray(offset, offset + field.length),
|
||||
tString = String.fromCharCode.apply(null, tBuf);
|
||||
|
||||
offset += field.length;
|
||||
|
||||
if (field.field === 'ustar' && !/ustar/.test(tString)) {
|
||||
// end the loop if not using the extended header
|
||||
return true;
|
||||
} else if (field.field === 'checksum') {
|
||||
updateChecksum(' ');
|
||||
} else {
|
||||
updateChecksum(tString);
|
||||
}
|
||||
|
||||
if (field.type === 'string') {
|
||||
data[field.field] = readString(tBuf);
|
||||
} else if (field.type === 'number') {
|
||||
data[field.field] = readInt(tString);
|
||||
}
|
||||
});
|
||||
|
||||
if (checksum !== data.checksum) {
|
||||
cb.call(this, 'Checksum not equal', checksum, data.checksum);
|
||||
return false;
|
||||
}
|
||||
|
||||
cb.call(this, null, data, recordSize);
|
||||
return true;
|
||||
}
|
||||
|
||||
function readTarFile(state, data) {
|
||||
var fileBuffer = new Uint8Array(data.size);
|
||||
fileBuffer.set(state.buffer.subarray(0, data.size));
|
||||
state.files.push({
|
||||
'meta': data,
|
||||
'buffer': fileBuffer
|
||||
});
|
||||
}
|
||||
|
||||
function removeTrailingNulls(state) {
|
||||
// If we're not an even multiple, account for trailing nulls
|
||||
if (state.totalRead % recordSize) {
|
||||
var bytesBuffer = recordSize - (state.totalRead % recordSize);
|
||||
|
||||
// If we don't have enough bytes to account for the nulls
|
||||
if (state.buffer.length < bytesBuffer) {
|
||||
state.totalRead += bytesBuffer;
|
||||
return;
|
||||
}
|
||||
|
||||
// Throw away trailing nulls
|
||||
state.buffer = state.buffer.subarray(bytesBuffer);
|
||||
state.totalRead += bytesBuffer;
|
||||
}
|
||||
}
|
||||
|
||||
function processTar(state) {
|
||||
if (state.totalRead == 0) {
|
||||
// Remove trailing nulls.
|
||||
removeTrailingNulls(state);
|
||||
}
|
||||
|
||||
// Check to see if/when we are done.
|
||||
if (state.buffer.length < recordSize) {
|
||||
state.cb('done', state.totalRead, state.files, null);
|
||||
return;
|
||||
}
|
||||
|
||||
state.cb('working', state.totalRead, state.files, null);
|
||||
|
||||
doHeader.call(this, state.buffer, function (err, data, rOffset) {
|
||||
if (err) {
|
||||
if (rOffset === 0) {
|
||||
state.cb('done', state.totalRead, state.files, null);
|
||||
return;
|
||||
}
|
||||
return state.cb('error', state.totalRead, state.files, err);
|
||||
}
|
||||
|
||||
// Update total; rOffset should always be 512
|
||||
state.totalRead += rOffset;
|
||||
state.buffer = state.buffer.subarray(rOffset);
|
||||
|
||||
// Read the tar file contents.
|
||||
readTarFile(state, data);
|
||||
|
||||
// Update the total and offset.
|
||||
state.totalRead += data.size;
|
||||
state.buffer = state.buffer.subarray(data.size);
|
||||
|
||||
// Remove trailing nulls.
|
||||
removeTrailingNulls(state);
|
||||
|
||||
if (state.buffer.length > 0) {
|
||||
setTimeout(function() {
|
||||
processTar(state);
|
||||
}, 0);
|
||||
} else {
|
||||
state.cb('done', state.totalRead, state.files, null);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
/*
|
||||
* Extract data from an input.
|
||||
*
|
||||
* @param data The data, in Uint8Array form.
|
||||
*/
|
||||
function Untar(data) {
|
||||
this.data = data;
|
||||
}
|
||||
|
||||
Untar.prototype.process = function(cb, opt_filter) {
|
||||
return processTar({
|
||||
'cb': cb,
|
||||
'buffer': this.data,
|
||||
'fileTypes': filterDecoder(opt_filter || []),
|
||||
'totalRead': 0,
|
||||
'files': []
|
||||
});
|
||||
};
|
||||
|
||||
window.Untar = Untar;
|
||||
|
||||
})();
|
|
@ -1,214 +0,0 @@
|
|||
/* MultiFile - A JavaScript library to load multiple files from
|
||||
tar archives and json_packed files (see http://gist.github.com/407595)
|
||||
|
||||
Example: Loading multiple images from a tarball.
|
||||
|
||||
MultiFile.load('images.tar', function(xhr) {
|
||||
this.files.forEach(function(f) {
|
||||
var e = document.createElement('div');
|
||||
document.body.appendChild(e);
|
||||
var p = document.createElement('p');
|
||||
p.appendChild(document.createTextNode(f.filename + " (" + f.length + " bytes)"));
|
||||
e.appendChild(p);
|
||||
var img = new Image();
|
||||
img.src = f.toDataURL();
|
||||
e.appendChild(img);
|
||||
});
|
||||
});
|
||||
|
||||
Example 2: Streaming images from a tarball.
|
||||
|
||||
MultiFile.stream('images.tar', function(f) {
|
||||
var e = document.createElement('div');
|
||||
document.body.appendChild(e);
|
||||
var p = document.createElement('p');
|
||||
p.appendChild(document.createTextNode(f.filename + " (" + f.length + " bytes)"));
|
||||
e.appendChild(p);
|
||||
var img = new Image();
|
||||
img.src = f.toDataURL();
|
||||
e.appendChild(img);
|
||||
});
|
||||
|
||||
|
||||
Copyright (c) 2010 Ilmari Heikkinen
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in
|
||||
all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||
THE SOFTWARE.
|
||||
*/
|
||||
|
||||
MultiFile = function(){};
|
||||
|
||||
// Load and parse archive, calls onload after loading all files.
|
||||
MultiFile.load = function(url, onload) {
|
||||
var o = new MultiFile();
|
||||
o.onload = onload;
|
||||
o.load(url);
|
||||
return o;
|
||||
}
|
||||
|
||||
// Streams an archive from the given url, calling onstream after loading each file in archive.
|
||||
// Calls onload after loading all files.
|
||||
MultiFile.stream = function(url, onstream, onload) {
|
||||
var o = new MultiFile();
|
||||
o.onload = onload;
|
||||
o.onstream = onstream;
|
||||
o.load(url);
|
||||
return o;
|
||||
}
|
||||
MultiFile.prototype = {
|
||||
onerror : null,
|
||||
onload : null,
|
||||
onstream : null,
|
||||
|
||||
load : function(url) {
|
||||
var xhr = new XMLHttpRequest();
|
||||
var self = this;
|
||||
var offset = 0;
|
||||
this.files = [];
|
||||
var isTar = (/\.tar(\?.*)?$/i).test(url);
|
||||
xhr.onreadystatechange = function() {
|
||||
if (xhr.readyState == 4) {
|
||||
if (xhr.status == 200 || xhr.status == 0) {
|
||||
if (isTar)
|
||||
offset = self.processTarChunks(xhr.responseText, offset);
|
||||
else
|
||||
self.processJSONChunks(xhr.responseText);
|
||||
if (self.onload)
|
||||
self.onload(xhr);
|
||||
} else {
|
||||
if (self.onerror)
|
||||
self.onerror(xhr);
|
||||
}
|
||||
} else if (xhr.readyState == 3) {
|
||||
if (xhr.status == 200 || xhr.status == 0) {
|
||||
if (isTar)
|
||||
offset = self.processTarChunks(xhr.responseText, offset);
|
||||
else
|
||||
self.processJSONChunks(xhr.responseText);
|
||||
}
|
||||
}
|
||||
};
|
||||
xhr.open("GET", url, true);
|
||||
xhr.overrideMimeType("text/plain; charset=x-user-defined");
|
||||
xhr.setRequestHeader("Content-Type", "text/plain");
|
||||
xhr.send(null);
|
||||
},
|
||||
|
||||
onerror : function(xhr) {
|
||||
alert("Error: "+xhr.status);
|
||||
},
|
||||
|
||||
parseJSON : function(text) {
|
||||
this.processJSONChunks(text);
|
||||
},
|
||||
processJSONChunks : function(text) {
|
||||
if (this.files.length == 0) { // processing headers
|
||||
var idx = text.indexOf('\n');
|
||||
if (idx >= 0) { // got header
|
||||
this.files = JSON.parse(text.substring(0,idx));
|
||||
this.files.forEach(function(f) { f.offset += idx + 1; })
|
||||
}
|
||||
}
|
||||
if (this.files.length > 0) { // processing data
|
||||
var f = null;
|
||||
var idx=0;
|
||||
for (idx=0; idx<this.files.length; idx++) {
|
||||
if (this.files[idx].data == null) {
|
||||
f = this.files[idx];
|
||||
break;
|
||||
}
|
||||
}
|
||||
while (f && f.data == null && f.offset + f.length <= text.length) {
|
||||
f.data = text.substring(f.offset, f.offset + f.length);
|
||||
f.toDataURL = this.__toDataURL;
|
||||
if (this.onstream) this.onstream(f);
|
||||
f = this.files[idx++];
|
||||
}
|
||||
}
|
||||
},
|
||||
|
||||
cleanHighByte : function(s) {
|
||||
return s.replace(/./g, function(m) {
|
||||
return String.fromCharCode(m.charCodeAt(0) & 0xff);
|
||||
});
|
||||
},
|
||||
|
||||
parseTar : function(text) {
|
||||
this.files = [];
|
||||
this.processTarChunks(text, 0);
|
||||
},
|
||||
processTarChunks : function (responseText, offset) {
|
||||
while (responseText.length >= offset + 512) {
|
||||
var header = this.files.length == 0 ? null : this.files[this.files.length-1];
|
||||
if (header && header.data == null) {
|
||||
if (offset + header.length <= responseText.length) {
|
||||
header.data = responseText.substring(offset, offset+header.length);
|
||||
header.toDataURL = this.__toDataURL;
|
||||
offset += 512 * Math.ceil(header.length / 512);
|
||||
if (this.onstream)
|
||||
this.onstream(header);
|
||||
} else { // not loaded yet
|
||||
break;
|
||||
}
|
||||
} else {
|
||||
var header = this.parseTarHeader(responseText, offset);
|
||||
if (header.length > 0 || header.filename != '') {
|
||||
this.files.push(header);
|
||||
offset += 512;
|
||||
header.offset = offset;
|
||||
} else { // empty header, stop processing
|
||||
offset = responseText.length;
|
||||
}
|
||||
}
|
||||
}
|
||||
return offset;
|
||||
},
|
||||
parseTarHeader : function(text, offset) {
|
||||
var i = offset || 0;
|
||||
var h = {};
|
||||
h.filename = text.substring(i, i+=100).split("\0", 1)[0];
|
||||
h.mode = text.substring(i, i+=8).split("\0", 1)[0];
|
||||
h.uid = text.substring(i, i+=8).split("\0", 1)[0];
|
||||
h.gid = text.substring(i, i+=8).split("\0", 1)[0];
|
||||
h.length = this.parseTarNumber(text.substring(i, i+=12));
|
||||
h.lastModified = text.substring(i, i+=12).split("\0", 1)[0];
|
||||
h.checkSum = text.substring(i, i+=8).split("\0", 1)[0];
|
||||
h.fileType = text.substring(i, i+=1).split("\0", 1)[0];
|
||||
h.linkName = text.substring(i, i+=100).split("\0", 1)[0];
|
||||
return h;
|
||||
},
|
||||
|
||||
parseTarNumber : function(text) {
|
||||
return parseInt('0'+text.replace(/[^\d]/g, ''), 8);
|
||||
},
|
||||
|
||||
__toDataURL : function() {
|
||||
if (this.data.substring(0,40).match(/^data:[^\/]+\/[^,]+,/)) {
|
||||
return this.data;
|
||||
} else if (MultiFile.prototype.cleanHighByte(this.data.substring(0,10)).match(/\377\330\377\340..JFIF/)) {
|
||||
return 'data:image/jpeg;base64,'+btoa(MultiFile.prototype.cleanHighByte(this.data));
|
||||
} else if (MultiFile.prototype.cleanHighByte(this.data.substring(0,6)) == "\211PNG\r\n") {
|
||||
return 'data:image/png;base64,'+btoa(MultiFile.prototype.cleanHighByte(this.data));
|
||||
} else if (MultiFile.prototype.cleanHighByte(this.data.substring(0,6)).match(/GIF8[79]a/)) {
|
||||
return 'data:image/gif;base64,'+btoa(MultiFile.prototype.cleanHighByte(this.data));
|
||||
} else {
|
||||
throw("toDataURL: I don't know how to handle " + this.filename);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
Reference in a new issue