pyodide/emsdk/patches/lz4_c.patch

88 lines
4.4 KiB
Diff
Raw Normal View History

diff --git a/src/library_lz4.js b/src/library_lz4.js
index 4c3f583b7..bca230feb 100644
--- a/emsdk/emscripten/tag-1.38.10/src/library_lz4.js
+++ b/emsdk/emscripten/tag-1.38.10/src/library_lz4.js
@@ -5,26 +5,15 @@ mergeInto(LibraryManager.library, {
DIR_MODE: {{{ cDefine('S_IFDIR') }}} | 511 /* 0777 */,
FILE_MODE: {{{ cDefine('S_IFREG') }}} | 511 /* 0777 */,
CHUNK_SIZE: -1,
- codec: null,
+ buf: null,
init: function() {
- if (LZ4.codec) return;
- LZ4.codec = (function() {
- {{{ read('mini-lz4.js') }}};
- return MiniLZ4;
- })();
- LZ4.CHUNK_SIZE = LZ4.codec.CHUNK_SIZE;
+ LZ4.CHUNK_SIZE = 2048;
},
loadPackage: function (pack) {
LZ4.init();
var compressedData = pack['compressedData'];
- if (!compressedData) compressedData = LZ4.codec.compressPackage(pack['data']);
+ // if (!compressedData) compressedData = LZ4.codec.compressPackage(pack['data']);
assert(compressedData.cachedIndexes.length === compressedData.cachedChunks.length);
- for (var i = 0; i < compressedData.cachedIndexes.length; i++) {
- compressedData.cachedIndexes[i] = -1;
- compressedData.cachedChunks[i] = compressedData.data.subarray(compressedData.cachedOffset + i*LZ4.CHUNK_SIZE,
- compressedData.cachedOffset + (i+1)*LZ4.CHUNK_SIZE);
- assert(compressedData.cachedChunks[i].length === LZ4.CHUNK_SIZE);
- }
pack['metadata'].files.forEach(function(file) {
var dir = PATH.dirname(file.filename);
var name = PATH.basename(file.filename);
@@ -112,8 +101,16 @@ mergeInto(LibraryManager.library, {
//console.log('LZ4 read ' + [offset, length, position]);
length = Math.min(length, stream.node.size - position);
if (length <= 0) return 0;
+
var contents = stream.node.contents;
var compressedData = contents.compressedData;
+ if (LZ4.buf === null) {
+ LZ4.buf = Module['_malloc'](LZ4.CHUNK_SIZE);
+ for (var i = 0; i < compressedData.cachedIndexes.length; i++) {
+ compressedData.cachedIndexes[i] = -1;
+ compressedData.cachedChunks[i] = Module['_malloc'](LZ4.CHUNK_SIZE);
+ }
+ }
var written = 0;
while (written < length) {
var start = contents.start + position + written; // start index in uncompressed data
@@ -138,18 +135,24 @@ mergeInto(LibraryManager.library, {
Module['decompressedChunks'] = (Module['decompressedChunks'] || 0) + 1;
}
var compressed = compressedData.data.subarray(compressedStart, compressedStart + compressedSize);
- //var t = Date.now();
- var originalSize = LZ4.codec.uncompress(compressed, currChunk);
- //console.log('decompress time: ' + (Date.now() - t));
+ // var t = Date.now();
+ // var originalSize = LZ4.codec.uncompress(compressed, currChunk);
+ Module.HEAPU8.set(compressed, LZ4.buf);
+ var originalSize = Module['_LZ4_decompress_safe'](LZ4.buf, currChunk, compressedSize, LZ4.CHUNK_SIZE);
+ // console.log('decompress time: ' + (Date.now() - t));
if (chunkIndex < compressedData.successes.length-1) assert(originalSize === LZ4.CHUNK_SIZE); // all but the last chunk must be full-size
}
- } else {
+ }
+ else {
// uncompressed
- currChunk = compressedData.data.subarray(compressedStart, compressedStart + LZ4.CHUNK_SIZE);
+ var compressed = compressedData.data.subarray(compressedStart, compressedStart + compressedSize);
+ // var originalSize = LZ4.codec.uncompress(compressed, currChunk);
+ Module.HEAPU8.set(compressed, LZ4.buf);
+ currChunk = LZ4.buf;
}
var startInChunk = start % LZ4.CHUNK_SIZE;
var endInChunk = Math.min(startInChunk + desired, LZ4.CHUNK_SIZE);
- buffer.set(currChunk.subarray(startInChunk, endInChunk), offset + written);
+ buffer.set(Module.HEAPU8.subarray(currChunk + startInChunk, currChunk + endInChunk), offset + written);
var currWritten = endInChunk - startInChunk;
written += currWritten;
}
@@ -181,4 +184,3 @@ if (LibraryManager.library['$FS__deps']) {
warn('FS does not seem to be in use (no preloaded files etc.), LZ4 will not do anything');
}
#endif
-