Use formatter for all Haxe code
This commit is contained in:
@@ -20,19 +20,21 @@
|
||||
* DEALINGS IN THE SOFTWARE.
|
||||
*/
|
||||
package haxe.zip;
|
||||
|
||||
import haxe.zip.Entry;
|
||||
|
||||
// see http://www.pkware.com/documents/casestudies/APPNOTE.TXT
|
||||
class Reader
|
||||
{
|
||||
var i:haxe.io.Input;
|
||||
|
||||
class Reader {
|
||||
|
||||
var i : haxe.io.Input;
|
||||
|
||||
public function new(i) {
|
||||
public function new(i)
|
||||
{
|
||||
this.i = i;
|
||||
}
|
||||
|
||||
function readZipDate() {
|
||||
function readZipDate()
|
||||
{
|
||||
var t = i.readUInt16();
|
||||
var hour = (t >> 11) & 31;
|
||||
var min = (t >> 5) & 63;
|
||||
@@ -41,53 +43,57 @@ class Reader {
|
||||
var year = d >> 9;
|
||||
var month = (d >> 5) & 15;
|
||||
var day = d & 31;
|
||||
return new Date(year + 1980, month-1, day, hour, min, sec << 1);
|
||||
return new Date(year + 1980, month - 1, day, hour, min, sec << 1);
|
||||
}
|
||||
|
||||
function readExtraFields(length) {
|
||||
function readExtraFields(length)
|
||||
{
|
||||
var fields = new List();
|
||||
while( length > 0 ) {
|
||||
if( length < 4 ) throw "Invalid extra fields data";
|
||||
while (length > 0)
|
||||
{
|
||||
if (length < 4) throw "Invalid extra fields data";
|
||||
var tag = i.readUInt16();
|
||||
var len = i.readUInt16();
|
||||
if( length < len ) throw "Invalid extra fields data";
|
||||
switch( tag ) {
|
||||
case 0x7075:
|
||||
var version = i.readByte();
|
||||
if( version != 1 ) {
|
||||
var data = new haxe.io.BytesBuffer();
|
||||
data.addByte(version);
|
||||
data.add(i.read(len-1));
|
||||
fields.add(FUnknown(tag,data.getBytes()));
|
||||
} else {
|
||||
var crc = i.readInt32();
|
||||
var name = i.read(len - 5).toString();
|
||||
fields.add(FInfoZipUnicodePath(name,crc));
|
||||
}
|
||||
default:
|
||||
fields.add(FUnknown(tag,i.read(len)));
|
||||
if (length < len) throw "Invalid extra fields data";
|
||||
switch (tag)
|
||||
{
|
||||
case 0x7075:
|
||||
var version = i.readByte();
|
||||
if (version != 1)
|
||||
{
|
||||
var data = new haxe.io.BytesBuffer();
|
||||
data.addByte(version);
|
||||
data.add(i.read(len - 1));
|
||||
fields.add(FUnknown(tag, data.getBytes()));
|
||||
}
|
||||
else
|
||||
{
|
||||
var crc = i.readInt32();
|
||||
var name = i.read(len - 5).toString();
|
||||
fields.add(FInfoZipUnicodePath(name, crc));
|
||||
}
|
||||
default:
|
||||
fields.add(FUnknown(tag, i.read(len)));
|
||||
}
|
||||
length -= 4 + len;
|
||||
}
|
||||
return fields;
|
||||
}
|
||||
|
||||
public function readEntryHeader() : Entry {
|
||||
public function readEntryHeader():Entry
|
||||
{
|
||||
var i = this.i;
|
||||
var h = i.readInt32();
|
||||
if( h == 0x02014B50 || h == 0x06054B50 )
|
||||
return null;
|
||||
if( h != 0x04034B50 )
|
||||
throw "Invalid Zip Data";
|
||||
if (h == 0x02014B50 || h == 0x06054B50) return null;
|
||||
if (h != 0x04034B50) throw "Invalid Zip Data";
|
||||
var version = i.readUInt16();
|
||||
var flags = i.readUInt16();
|
||||
var utf8 = flags & 0x800 != 0;
|
||||
//if( (flags & 0xF7F7) != 0 )
|
||||
//throw "Unsupported flags "+flags;
|
||||
// if( (flags & 0xF7F7) != 0 )
|
||||
// throw "Unsupported flags "+flags;
|
||||
var compression = i.readUInt16();
|
||||
var compressed = (compression != 0);
|
||||
if( compressed && compression != 8 )
|
||||
throw "Unsupported compression "+compression;
|
||||
if (compressed && compression != 8) throw "Unsupported compression " + compression;
|
||||
var mtime = readZipDate();
|
||||
var crc32 = i.readInt32();
|
||||
var csize = i.readInt32();
|
||||
@@ -96,39 +102,40 @@ class Reader {
|
||||
var elen = i.readInt16();
|
||||
var fname = i.readString(fnamelen);
|
||||
var fields = readExtraFields(elen);
|
||||
if( utf8 )
|
||||
fields.push(FUtf8);
|
||||
if (utf8) fields.push(FUtf8);
|
||||
var data = null;
|
||||
if( (flags & 8) != 0 )
|
||||
csize = -1;
|
||||
if ((flags & 8) != 0) csize = -1;
|
||||
return {
|
||||
fileName : fname,
|
||||
fileSize : usize,
|
||||
fileTime : mtime,
|
||||
compressed : compressed,
|
||||
dataSize : csize,
|
||||
data : data,
|
||||
crc32 : crc32,
|
||||
extraFields : fields,
|
||||
fileName: fname,
|
||||
fileSize: usize,
|
||||
fileTime: mtime,
|
||||
compressed: compressed,
|
||||
dataSize: csize,
|
||||
data: data,
|
||||
crc32: crc32,
|
||||
extraFields: fields,
|
||||
};
|
||||
}
|
||||
|
||||
public function read() : List<Entry> {
|
||||
public function read():List<Entry>
|
||||
{
|
||||
var l = new List();
|
||||
var buf = null;
|
||||
var tmp = null;
|
||||
while( true ) {
|
||||
while (true)
|
||||
{
|
||||
var e = readEntryHeader();
|
||||
if( e == null )
|
||||
break;
|
||||
if( e.dataSize < 0 ) {
|
||||
if (e == null) break;
|
||||
if (e.dataSize < 0)
|
||||
{
|
||||
#if neko
|
||||
// enter progressive mode : we use a different input which has
|
||||
// a temporary buffer, this is necessary since we have to uncompress
|
||||
// progressively, and after that we might have pending readed data
|
||||
// that needs to be processed
|
||||
var bufSize = 65536;
|
||||
if( buf == null ) {
|
||||
if (buf == null)
|
||||
{
|
||||
buf = new haxe.io.BufferInput(i, haxe.io.Bytes.alloc(bufSize));
|
||||
tmp = haxe.io.Bytes.alloc(bufSize);
|
||||
i = buf;
|
||||
@@ -136,11 +143,12 @@ class Reader {
|
||||
var out = new haxe.io.BytesBuffer();
|
||||
var z = new neko.zip.Uncompress(-15);
|
||||
z.setFlushMode(neko.zip.Flush.SYNC);
|
||||
while( true ) {
|
||||
if( buf.available == 0 )
|
||||
buf.refill();
|
||||
while (true)
|
||||
{
|
||||
if (buf.available == 0) buf.refill();
|
||||
var p = bufSize - buf.available;
|
||||
if( p != buf.pos ) {
|
||||
if (p != buf.pos)
|
||||
{
|
||||
// because of lack of "srcLen" in zip api, we need to always be stuck to the buffer end
|
||||
buf.buf.blit(p, buf.buf, buf.pos, buf.available);
|
||||
buf.pos = p;
|
||||
@@ -149,53 +157,52 @@ class Reader {
|
||||
out.addBytes(tmp, 0, r.write);
|
||||
buf.pos += r.read;
|
||||
buf.available -= r.read;
|
||||
if( r.done ) break;
|
||||
if (r.done) break;
|
||||
}
|
||||
e.data = out.getBytes();
|
||||
#else
|
||||
var bufSize = 65536;
|
||||
if( tmp == null )
|
||||
tmp = haxe.io.Bytes.alloc(bufSize);
|
||||
if (tmp == null) tmp = haxe.io.Bytes.alloc(bufSize);
|
||||
var out = new haxe.io.BytesBuffer();
|
||||
var z = new InflateImpl(i, false, false);
|
||||
while( true ) {
|
||||
while (true)
|
||||
{
|
||||
var n = z.readBytes(tmp, 0, bufSize);
|
||||
out.addBytes(tmp, 0, n);
|
||||
if( n < bufSize )
|
||||
break;
|
||||
if (n < bufSize) break;
|
||||
}
|
||||
e.data = out.getBytes();
|
||||
#end
|
||||
e.crc32 = i.readInt32();
|
||||
if( e.crc32 == 0x08074b50 )
|
||||
e.crc32 = i.readInt32();
|
||||
if (e.crc32 == 0x08074b50) e.crc32 = i.readInt32();
|
||||
e.dataSize = i.readInt32();
|
||||
e.fileSize = i.readInt32();
|
||||
// set data to uncompressed
|
||||
e.dataSize = e.fileSize;
|
||||
e.compressed = false;
|
||||
} else
|
||||
}
|
||||
else
|
||||
e.data = i.read(e.dataSize);
|
||||
l.add(e);
|
||||
}
|
||||
return l;
|
||||
}
|
||||
|
||||
public static function readZip( i : haxe.io.Input ) {
|
||||
|
||||
public static function readZip(i:haxe.io.Input)
|
||||
{
|
||||
var r = new Reader(i);
|
||||
return r.read();
|
||||
}
|
||||
|
||||
public static function unzip( f : Entry ) {
|
||||
if( !f.compressed )
|
||||
return f.data;
|
||||
public static function unzip(f:Entry)
|
||||
{
|
||||
if (!f.compressed) return f.data;
|
||||
#if neko
|
||||
var c = new neko.zip.Uncompress(-15);
|
||||
var s = haxe.io.Bytes.alloc(f.fileSize);
|
||||
var r = c.execute(f.data,0,s,0);
|
||||
var r = c.execute(f.data, 0, s, 0);
|
||||
c.close();
|
||||
if( !r.done || r.read != f.data.length || r.write != f.fileSize )
|
||||
throw "Invalid compressed data for "+f.fileName;
|
||||
if (!r.done || r.read != f.data.length || r.write != f.fileSize) throw "Invalid compressed data for " + f.fileName;
|
||||
f.compressed = false;
|
||||
f.dataSize = f.fileSize;
|
||||
f.data = s;
|
||||
@@ -204,5 +211,4 @@ class Reader {
|
||||
#end
|
||||
return f.data;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user