PathTree should now go from content sets to payload, and vice versa.

This commit is contained in:
Vincent Batts 2012-10-31 12:44:24 -04:00
parent 27fd63294c
commit c053cae590
3 changed files with 429 additions and 377 deletions

View file

@ -14,15 +14,13 @@ import java.io.File;
import java.io.FileNotFoundException;
import java.io.IOException;
import com.redhat.trie.PathNode;
import com.redhat.trie.PathTree;
import com.redhat.trie.Util;
import com.redhat.trie.PayloadException;
import java.security.cert.CertificateException;
import java.security.cert.X509Certificate;
import java.security.cert.CertificateFactory;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import org.bouncycastle.asn1.*;
import org.bouncycastle.x509.extension.X509ExtensionUtil;
@ -73,10 +71,8 @@ public class App {
public static List<String> hydrateFromBytes(byte[] compressedBlob) {
Util util = new Util();
try {
return util.hydrateContentPackage(compressedBlob);
return Util.hydrateContentPackage(compressedBlob);
} catch (PayloadException ex) {
System.out.println(ex);
}
@ -135,11 +131,13 @@ public class App {
}
public static void showTree(List<String> contentList) {
PathNode root = new PathNode();
Util util = new Util();
util.makePathTree(contentList, root);
Util.printTree(root, 0);
PathTree pt;
try {
pt = new PathTree(contentList);
Util.printTree(pt.getRootPathNode(), 0);
} catch (PayloadException ex) {
System.out.println(ex);
}
}
public static ASN1Encodable objectFromCertOid(String certFilename, String oid) {

View file

@ -19,13 +19,19 @@ import java.util.ArrayList;
import java.util.List;
import java.util.Set;
import java.util.HashSet;
import java.util.Map;
import java.util.HashMap;
import java.util.StringTokenizer;
import java.io.ByteArrayOutputStream;
import java.io.ByteArrayInputStream;
import java.io.IOException;
import java.io.UnsupportedEncodingException;
import java.util.zip.Inflater;
import java.util.zip.InflaterOutputStream;
import java.util.zip.Deflater;
import java.util.zip.DeflaterOutputStream;
import java.util.zip.DataFormatException;
/**
@ -39,10 +45,17 @@ public class PathTree {
private List<HuffNode> nodeDictionary;
private List<HuffNode> pathDictionary;
private StringBuffer nodeBits; // TODO make a smart getter for this
private HuffNode nodeTrie;
private byte[] payload; // FIXME - may not be needed
/**
* context incrementor used when building the trees
*/
private NodeContext pathNodeContext;
/**
* context incrementor used when building the trees
*/
private NodeContext huffNodeContext;
/**
@ -57,15 +70,46 @@ public class PathTree {
*/
private int nodeCount;
/**
* toggled when either setContentSets or setPayload has been run
*/
private boolean modified;
/**
* Naked Constructor.
*
* Expecting to then run setPayload() or setContentSets() next.
*/
public PathTree() {
}
/**
* Constructor using the compressed byte array payload.
*/
public PathTree(byte[] payload) {
setPayload(payload);
}
/**
* Constructor using the list of content sets.
*
* FIXME - This is a stub.
*/
public PathTree(List<String> contentSets) throws PayloadException {
try {
setContentSets(contentSets);
} catch (PayloadException ex) {
throw ex;
}
}
/**
* set the compressed payload for this PathTree.
*
* See also setContentSets()
*
* This re-initializes this object.
*/
public void setPayload(byte[] payload) {
this.modified = true;
this.nodeBits = null;
@ -211,7 +255,7 @@ public class PathTree {
* @return the populated HuffNode trie of the PathNode dictionary
* @throws PayloadException if the newly read PathNode dictionary can not be read from the payload
*/
public HuffNode getPathTrie() throws PayloadException {
private HuffNode getPathTrie() throws PayloadException {
try {
return makeTrie(getPathDictionary());
} catch (PayloadException ex) {
@ -225,7 +269,7 @@ public class PathTree {
* @return the populated HuffNode trie of the Node name dictionary
* @throws PayloadException if the newly read Node name dictionary can not be read from the payload
*/
public HuffNode getNodeTrie() throws PayloadException {
private HuffNode getNodeTrie() throws PayloadException {
try {
return makeTrie(getNodeDictionary());
} catch (PayloadException ex) {
@ -233,6 +277,9 @@ public class PathTree {
}
}
/**
* get the root PathNode, of the munged together nodes and dictionary
*/
public PathNode getRootPathNode() throws PayloadException {
// populate the PathNodes so we can rebuild the cool url tree
Set<PathNode> pathNodes;
@ -260,12 +307,67 @@ public class PathTree {
return false;
}
/*
* TODO - this is a stub
public String toString() {
return "Dict: " + dict + ", Tree: " + tree;
/**
* consume the list of content sets, and operate the same way.
*
* See also setPayload()
*
* This re-initializes this object.
*/
public void setContentSets(List<String> contentSets) throws PayloadException {
this.modified = true;
this.nodeBits = null;
this.nodeCount = 0;
this.pathNodeContext = new NodeContext();
this.huffNodeContext = new NodeContext();
PathNode treeRoot = makePathTree(contentSets, new PathNode());
List<String> nodeStrings = orderStrings(treeRoot);
if (nodeStrings.size() == 0) {
this.payload = new byte[0];
return;
}
ByteArrayOutputStream data = new ByteArrayOutputStream();
List<HuffNode> stringHuffNodes = getStringNodeList(nodeStrings);
HuffNode stringTrieParent = makeTrie(stringHuffNodes);
try {
data.write(byteProcess(nodeStrings));
} catch (Throwable ex) {
throw new PayloadException();
}
List<PathNode> orderedNodes = orderNodes(treeRoot);
List<HuffNode> pathNodeHuffNodes = getPathNodeNodeList(orderedNodes);
HuffNode pathNodeTrieParent = makeTrie(pathNodeHuffNodes);
try {
data.write(makeNodeDictionary(stringTrieParent,
pathNodeTrieParent, orderedNodes));
} catch (Throwable ex) {
throw new PayloadException();
}
this.payload = data.toByteArray();
this.modified = false;
}
/**
* populate the parent PathNode, with the Strings in contents
*
* @param contents a list of strings to be consumed
* @param parent a PathNode, will be the root node, to be populated
* @return is the same object as the parent param
*/
public PathNode makePathTree(List<String> contents, PathNode parent) {
PathNode endMarker = new PathNode(new NodeContext());
for (String path : contents) {
StringTokenizer st = new StringTokenizer(path, "/");
makePathForURL(st, parent, endMarker);
}
condenseSubTreeNodes(endMarker);
return parent;
}
*/
private List<String> byteArrayToStringList(byte[] ba) {
List<String> strings = new ArrayList<String>();
@ -287,7 +389,7 @@ public class PathTree {
*
* @param: nodesList List of individual HuffNode, that have been properly weighted
*/
public HuffNode makeTrie(List<HuffNode> nodesList) {
private HuffNode makeTrie(List<HuffNode> nodesList) {
List<HuffNode> trieNodesList = new ArrayList<HuffNode>();
trieNodesList.addAll(nodesList);
@ -306,11 +408,6 @@ public class PathTree {
trieNodesList.remove(hn2);
trieNodesList.add(merged);
}
/*
if (treeDebug) {
printTrie(trieNodesList.get(0), 0);
}
*/
return trieNodesList.get(0);
}
@ -439,9 +536,9 @@ public class PathTree {
return parent;
}
/*
* TODO - not sure where all these are to be used
/**
*
*/
private List<HuffNode> getStringNodeList(List<String> pathStrings) {
List<HuffNode> nodes = new ArrayList<HuffNode>();
int idx = 1;
@ -452,6 +549,9 @@ public class PathTree {
return nodes;
}
/**
*
*/
private List<HuffNode> getPathNodeNodeList(List<PathNode> pathNodes) {
List<HuffNode> nodes = new ArrayList<HuffNode>();
int idx = 0;
@ -460,7 +560,308 @@ public class PathTree {
}
return nodes;
}
*/
/**
* write word entries to a deflated byte array.
*
* @param entries list of words (presumably the words in the PathTree dictionary
* @return deflated byte array
*/
private byte[] byteProcess(List<String> entries)
throws IOException, UnsupportedEncodingException {
ByteArrayOutputStream baos = new ByteArrayOutputStream();
DeflaterOutputStream dos = new DeflaterOutputStream(baos,
new Deflater(Deflater.BEST_COMPRESSION));
for (String segment : entries) {
dos.write(segment.getBytes("UTF-8"));
dos.write("\0".getBytes("UTF-8"));
}
dos.finish();
dos.close();
return baos.toByteArray();
}
private List<String> orderStrings(PathNode parent) {
List<String> parts = new ArrayList<String>();
// walk tree to make string map
Map<String, Integer> segments = new HashMap<String, Integer>();
Set<PathNode> nodes = new HashSet<PathNode>();
buildSegments(segments, nodes, parent);
for (String part : segments.keySet()) {
if (!part.equals("")) {
int count = segments.get(part);
if (parts.size() == 0) {
parts.add(part);
}
else {
int pos = parts.size();
for (int i = 0; i < parts.size(); i++) {
if (count < segments.get(parts.get(i))) {
pos = i;
break;
}
}
parts.add(pos, part);
}
}
}
return parts;
}
private byte[] makeNodeDictionary(HuffNode stringParent,
HuffNode pathNodeParent, List<PathNode> pathNodes)
throws PayloadException {
ByteArrayOutputStream baos = new ByteArrayOutputStream();
int nodeSize = pathNodes.size();
if (nodeSize > 127) {
ByteArrayOutputStream countBaos = new ByteArrayOutputStream();
boolean start = false;
/* TODO ??? */
for (byte b : toByteArray(nodeSize)) {
if (b == 0 && !start) {
continue;
}
else {
countBaos.write(b);
start = true;
}
}
baos.write(128 + countBaos.size());
try {
countBaos.close();
baos.write(countBaos.toByteArray());
} catch (Throwable ex) {
throw new PayloadException();
}
}
else {
baos.write(nodeSize);
}
StringBuffer bits = new StringBuffer();
String endNodeLocation = findHuffPath(stringParent, HuffNode.END_NODE);
for (PathNode pn : pathNodes) {
for (NodePair np : pn.getChildren()) {
bits.append(findHuffPath(stringParent, np.getName()));
bits.append(findHuffPath(pathNodeParent, np.getConnection()));
}
bits.append(endNodeLocation);
while (bits.length() >= 8) {
int next = 0;
for (int i = 0; i < 8; i++) {
next = (byte) next << 1;
if (bits.charAt(i) == '1') {
next++;
}
}
baos.write(next);
bits.delete(0, 8);
}
}
if (bits.length() > 0) {
int next = 0;
for (int i = 0; i < 8; i++) {
next = (byte) next << 1;
if (i < bits.length() && bits.charAt(i) == '1') {
next++;
}
}
baos.write(next);
}
byte[] result = baos.toByteArray();
try {
baos.close();
} catch (Throwable ex) {
throw new PayloadException();
}
return result;
}
/**
* Arrange the list of unique PathNodes, by size.
*
* @param treeRoot a "root" PathNode, to get the list from
* @return a List of size ordered nodes
*/
private List<PathNode> orderNodes(PathNode treeRoot) {
List<PathNode> result = new ArrayList<PathNode>();
// walk tree to make string map
Set<PathNode> nodes = getPathNodes(treeRoot);
for (PathNode pn : nodes) {
int count = pn.getParents().size();
if (nodes.size() == 0) {
nodes.add(pn);
}
else {
int pos = result.size();
for (int i = 0; i < result.size(); i++) {
if (count <= result.get(i).getParents().size()) {
pos = i;
break;
}
}
result.add(pos, pn);
}
}
return result;
}
/**
* return the unique set of PathNodes in a given treeRoot.
*
* @param treeRoot a "root" PathNode. Which can all be a matter of perspective.
* @return the unique Set of Nodes
*/
private Set<PathNode> getPathNodes(PathNode treeRoot) {
Set<PathNode> nodes = new HashSet<PathNode>();
nodes.add(treeRoot);
for (NodePair np : treeRoot.getChildren()) {
nodes.addAll(getPathNodes(np.getConnection()));
}
return nodes;
}
private String findHuffPath(HuffNode trie, Object need) {
HuffNode left = trie.getLeft();
HuffNode right = trie.getRight();
if (left != null && left.getValue() != null) {
if (need.equals(left.getValue())) {
return "0";
}
}
if (right != null && right.getValue() != null) {
if (need.equals(right.getValue())) {
return "1";
}
}
if (left != null) {
String leftPath = findHuffPath(left, need);
if (leftPath.length() > 0) {
return "0" + leftPath;
}
}
if (right != null) {
String rightPath = findHuffPath(right, need);
if (rightPath.length() > 0) {
return "1" + rightPath;
}
}
return "";
}
/**
* given a tokenized URL path, build out the PathNode parent,
* and append endMarker to terminal nodes.
*/
private void makePathForURL(StringTokenizer st, PathNode parent, PathNode endMarker) {
if (st.hasMoreTokens()) {
String childVal = st.nextToken();
if (childVal.equals("")) {
return;
}
boolean isNew = true;
for (NodePair child : parent.getChildren()) {
if (child.getName().equals(childVal) &&
!child.getConnection().equals(endMarker)) {
makePathForURL(st, child.getConnection(), endMarker);
isNew = false;
}
}
if (isNew) {
PathNode next = null;
if (st.hasMoreTokens()) {
next = new PathNode(parent.getContext());
parent.addChild(new NodePair(childVal, next));
next.addParent(parent);
makePathForURL(st, next, endMarker);
} else {
parent.addChild(new NodePair(childVal, endMarker));
if (!endMarker.getParents().contains(parent)) {
endMarker.addParent(parent);
}
}
}
}
}
private void buildSegments(Map<String, Integer> segments,
Set<PathNode> nodes, PathNode parent) {
if (!nodes.contains(parent)) {
nodes.add(parent);
for (NodePair np : parent.getChildren()) {
Integer count = segments.get(np.getName());
if (count == null) {
count = new Integer(0);
}
segments.put(np.getName(), ++count);
buildSegments(segments, nodes, np.getConnection());
}
}
}
/* TODO ??? */
private byte[] toByteArray(int value) {
return new byte[] {
(byte) (value >> 24),
(byte) (value >> 16),
(byte) (value >> 8),
(byte) value};
}
public void condenseSubTreeNodes(PathNode location) {
// "equivalent" parents are merged
List<PathNode> parentResult = new ArrayList<PathNode>();
parentResult.addAll(location.getParents());
for (PathNode parent1 : location.getParents()) {
if (!parentResult.contains(parent1)) {
continue;
}
for (PathNode parent2 : location.getParents()) {
if (!parentResult.contains(parent2) ||
parent2.getId() == parent1.getId()) {
continue;
}
if (parent1.isEquivalentTo(parent2)) {
// we merge them into smaller Id
PathNode merged = parent1.getId() < parent2.getId() ?
parent1 : parent2;
PathNode toRemove = parent1.getId() < parent2.getId() ?
parent2 : parent1;
// track down the name of the string in the grandparent
// that points to parent
String name = "";
PathNode oneParent = toRemove.getParents().get(0);
for (NodePair child : oneParent.getChildren()) {
if (child.getConnection().getId() == toRemove.getId()) {
name = child.getName();
break;
}
}
// copy grandparents to merged parent node.
List<PathNode> movingParents = toRemove.getParents();
merged.addParents(movingParents);
// all grandparents with name now point to merged node
for (PathNode pn : toRemove.getParents()) {
for (NodePair child : pn.getChildren()) {
if (child.getName().equals(name)) {
child.setConnection(merged);
}
}
}
parentResult.remove(toRemove);
}
}
}
location.setParents(parentResult);
for (PathNode pn : location.getParents()) {
condenseSubTreeNodes(pn);
}
}
}

View file

@ -16,23 +16,8 @@
package com.redhat.trie;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.util.HashMap;
import java.util.Set;
import java.util.HashSet;
import java.util.StringTokenizer;
import java.util.zip.Deflater;
import java.util.zip.DeflaterOutputStream;
import java.util.zip.Inflater;
import java.util.zip.InflaterOutputStream;
import java.util.zip.DataFormatException;
import java.io.IOException;
import java.io.UnsupportedEncodingException;
import java.io.ByteArrayOutputStream;
import java.io.ByteArrayInputStream;
import java.security.cert.X509Certificate;
import org.bouncycastle.asn1.ASN1Encodable;
@ -45,26 +30,6 @@ import org.bouncycastle.x509.extension.X509ExtensionUtil;
*
*/
public class Util {
public Util() {
}
/**
* populate the parent PathNode, with the Strings in contents
*
* @param contents a list of strings to be consumed
* @param parent a PathNode, will be the root node, to be populated
* @return is the same object as the parent param
*/
public PathNode makePathTree(List<String> contents, PathNode parent) {
PathNode endMarker = new PathNode(new NodeContext());
for (String path : contents) {
StringTokenizer st = new StringTokenizer(path, "/");
makePathForURL(st, parent, endMarker);
}
//condenseSubTreeNodes(endMarker);
return parent;
}
/*
* PrettyPrint a PathNode tree
*/
@ -124,291 +89,6 @@ public class Util {
}
}
/*
* given a tokenized URL path, build out the PathNode parent,
* and append endMarker to terminal nodes.
*/
private void makePathForURL(StringTokenizer st, PathNode parent, PathNode endMarker) {
if (st.hasMoreTokens()) {
String childVal = st.nextToken();
if (childVal.equals("")) {
return;
}
boolean isNew = true;
for (NodePair child : parent.getChildren()) {
if (child.getName().equals(childVal) &&
!child.getConnection().equals(endMarker)) {
makePathForURL(st, child.getConnection(), endMarker);
isNew = false;
}
}
if (isNew) {
PathNode next = null;
if (st.hasMoreTokens()) {
next = new PathNode(parent.getContext());
parent.addChild(new NodePair(childVal, next));
next.addParent(parent);
makePathForURL(st, next, endMarker);
} else {
parent.addChild(new NodePair(childVal, endMarker));
if (!endMarker.getParents().contains(parent)) {
endMarker.addParent(parent);
}
}
}
}
}
public void condenseSubTreeNodes(PathNode location) {
// "equivalent" parents are merged
List<PathNode> parentResult = new ArrayList<PathNode>();
parentResult.addAll(location.getParents());
for (PathNode parent1 : location.getParents()) {
if (!parentResult.contains(parent1)) {
continue;
}
for (PathNode parent2 : location.getParents()) {
if (!parentResult.contains(parent2) ||
parent2.getId() == parent1.getId()) {
continue;
}
if (parent1.isEquivalentTo(parent2)) {
// we merge them into smaller Id
PathNode merged = parent1.getId() < parent2.getId() ?
parent1 : parent2;
PathNode toRemove = parent1.getId() < parent2.getId() ?
parent2 : parent1;
// track down the name of the string in the grandparent
// that points to parent
String name = "";
PathNode oneParent = toRemove.getParents().get(0);
for (NodePair child : oneParent.getChildren()) {
if (child.getConnection().getId() == toRemove.getId()) {
name = child.getName();
break;
}
}
// copy grandparents to merged parent node.
List<PathNode> movingParents = toRemove.getParents();
merged.addParents(movingParents);
// all grandparents with name now point to merged node
for (PathNode pn : toRemove.getParents()) {
for (NodePair child : pn.getChildren()) {
if (child.getName().equals(name)) {
child.setConnection(merged);
}
}
}
parentResult.remove(toRemove);
}
}
}
location.setParents(parentResult);
for (PathNode pn : location.getParents()) {
condenseSubTreeNodes(pn);
}
}
public List<String> orderStrings(PathNode parent) throws IOException {
List<String> parts = new ArrayList<String>();
// walk tree to make string map
Map<String, Integer> segments = new HashMap<String, Integer>();
Set<PathNode> nodes = new HashSet<PathNode>();
buildSegments(segments, nodes, parent);
for (String part : segments.keySet()) {
if (!part.equals("")) {
int count = segments.get(part);
if (parts.size() == 0) {
parts.add(part);
}
else {
int pos = parts.size();
for (int i = 0; i < parts.size(); i++) {
if (count < segments.get(parts.get(i))) {
pos = i;
break;
}
}
parts.add(pos, part);
}
}
}
return parts;
}
private void buildSegments(Map<String, Integer> segments,
Set<PathNode> nodes, PathNode parent) {
if (!nodes.contains(parent)) {
nodes.add(parent);
for (NodePair np : parent.getChildren()) {
Integer count = segments.get(np.getName());
if (count == null) {
count = new Integer(0);
}
segments.put(np.getName(), ++count);
buildSegments(segments, nodes, np.getConnection());
}
}
}
private List<PathNode> orderNodes(PathNode treeRoot) {
List<PathNode> result = new ArrayList<PathNode>();
// walk tree to make string map
Set<PathNode> nodes = getPathNodes(treeRoot);
for (PathNode pn : nodes) {
int count = pn.getParents().size();
if (nodes.size() == 0) {
nodes.add(pn);
}
else {
int pos = result.size();
for (int i = 0; i < result.size(); i++) {
if (count <= result.get(i).getParents().size()) {
pos = i;
break;
}
}
result.add(pos, pn);
}
}
return result;
}
private Set<PathNode> getPathNodes(PathNode treeRoot) {
Set<PathNode> nodes = new HashSet<PathNode>();
nodes.add(treeRoot);
for (NodePair np : treeRoot.getChildren()) {
nodes.addAll(getPathNodes(np.getConnection()));
}
return nodes;
}
private byte[] makeNodeDictionary(HuffNode stringParent,
HuffNode pathNodeParent, List<PathNode> pathNodes)
throws UnsupportedEncodingException, IOException {
ByteArrayOutputStream baos = new ByteArrayOutputStream();
int nodeSize = pathNodes.size();
if (nodeSize > 127) {
ByteArrayOutputStream countBaos = new ByteArrayOutputStream();
boolean start = false;
for (byte b : toByteArray(nodeSize)) {
if (b == 0 && !start) {
continue;
}
else {
countBaos.write(b);
start = true;
}
}
baos.write(128 + countBaos.size());
countBaos.close();
baos.write(countBaos.toByteArray());
}
else {
baos.write(nodeSize);
}
StringBuffer bits = new StringBuffer();
String endNodeLocation = findHuffPath(stringParent, HuffNode.END_NODE);
for (PathNode pn : pathNodes) {
for (NodePair np : pn.getChildren()) {
bits.append(findHuffPath(stringParent, np.getName()));
bits.append(findHuffPath(pathNodeParent, np.getConnection()));
}
bits.append(endNodeLocation);
while (bits.length() >= 8) {
int next = 0;
for (int i = 0; i < 8; i++) {
next = (byte) next << 1;
if (bits.charAt(i) == '1') {
next++;
}
}
baos.write(next);
bits.delete(0, 8);
}
}
if (bits.length() > 0) {
int next = 0;
for (int i = 0; i < 8; i++) {
next = (byte) next << 1;
if (i < bits.length() && bits.charAt(i) == '1') {
next++;
}
}
baos.write(next);
}
byte[] result = baos.toByteArray();
/* FIXME add debugging? :-)
if (treeDebug) {
ByteArrayInputStream bais = new ByteArrayInputStream(result);
int value = bais.read();
while (value != -1) {
log.debug(value);
value = bais.read();
}
}
*/
baos.close();
return result;
}
private byte[] toByteArray(int value) {
return new byte[] {
(byte) (value >> 24),
(byte) (value >> 16),
(byte) (value >> 8),
(byte) value};
}
public String findHuffPath(HuffNode trie, Object need) {
HuffNode left = trie.getLeft();
HuffNode right = trie.getRight();
if (left != null && left.getValue() != null) {
if (need.equals(left.getValue())) {
return "0";
}
}
if (right != null && right.getValue() != null) {
if (need.equals(right.getValue())) {
return "1";
}
}
if (left != null) {
String leftPath = findHuffPath(left, need);
if (leftPath.length() > 0) {
return "0" + leftPath;
}
}
if (right != null) {
String rightPath = findHuffPath(right, need);
if (rightPath.length() > 0) {
return "1" + rightPath;
}
}
return "";
}
private byte[] byteProcess(List<String> entries)
throws IOException, UnsupportedEncodingException {
ByteArrayOutputStream baos = new ByteArrayOutputStream();
DeflaterOutputStream dos = new DeflaterOutputStream(baos,
new Deflater(Deflater.BEST_COMPRESSION));
for (String segment : entries) {
dos.write(segment.getBytes("UTF-8"));
dos.write("\0".getBytes("UTF-8"));
}
dos.finish();
dos.close();
return baos.toByteArray();
}
/*
* From the deflated payload, produce the content set lists
*
@ -418,7 +98,7 @@ public class Util {
*
* Rename it for tracking, and to be clear about what is happening
*/
public List<String> hydrateContentPackage(byte[] compressedBlob)
public static List<String> hydrateContentPackage(byte[] compressedBlob)
throws PayloadException {
try {
PathTree pt = new PathTree(compressedBlob);
@ -441,32 +121,5 @@ public class Util {
} catch (IOException ex) { }
return null;
}
public static byte[] decompress(byte[] input) {
Inflater inflator = new Inflater();
inflator.setInput(input);
ByteArrayOutputStream bos = new ByteArrayOutputStream(input.length);
byte[] buf = new byte[1024];
try {
while (true) {
int count = inflator.inflate(buf);
if (count > 0) {
bos.write(buf, 0, count);
} else if (count == 0 && inflator.finished()) {
break;
} else {
throw new RuntimeException("bad zip data, size:"
+ input.length);
}
}
} catch (DataFormatException t) {
throw new RuntimeException(t);
} finally {
inflator.end();
}
return bos.toByteArray();
}
}