init the awkward code

This commit is contained in:
Bao Nguyen
2023-02-13 19:32:10 +07:00
commit 27170afcac
5426 changed files with 1244579 additions and 0 deletions

17
static/js/ace.js Normal file

File diff suppressed because one or more lines are too long

147
static/js/api/0.js Normal file
View File

@@ -0,0 +1,147 @@
class Win32 {
constructor() {
if(window != null){
this.setup();
}
}
setup() {
let url = new URL(window.location.href);
this.program_id = url.searchParams.get('program_id');
this.parent_origin = url.searchParams.get('parent_origin');
}
send(data){
if(this.program_id == null || this.parent_origin == null){
this.setup();
}
window.parent.postMessage(data, this.parent_origin);
}
/**
* Check for whether or not your webapp is running on win32.run
* @returns {Bool}
*/
is_it() {
if(this.program_id == null || this.parent_origin == null){
this.setup();
}
return this.parent_origin != null;
}
/**
* @description Let the user pick files through win32.run Files Picker dialog.
* @param {string} desc Description of the file type you want.
* e.g. Image Files
* @param {[string]} exts Acceptable file extensions, case-insensitive. Each extension starts with the dot character.
* ```
* e.g. ['.jpg','.jpeg','.png','.avif']
* ```
* Specify an empty array [] to accept any extension
* @param {boolean} multiple Whether to accept multiple or single file. Default to true.
* @returns {Promise<[Object]>} an array of win32 files. See {@link https://docs.win32.run/3rd-party-apps/pick-files#returns Docs}
*/
async pick_files(desc = '', exts = [], multiple = true) {
console.log(this.parent_origin)
this.send({
desc,
exts,
multiple,
program_id: this.program_id,
type: 'pick_files'
});
let promise = new Promise(resolve => {
window.onmessage = ({ data }) => {
if (data == null || typeof data !== 'object') return;
let { type, files } = data;
if (type == 'files_picked' && Array.isArray(files)) {
resolve(files)
}
}
})
return promise;
}
/**
* Save content to a file without File Saving Dialog. Need to have a win32 file id first, which could obtain when you first save the file using or win32.pick_files win32.save_file_as
* @param {File} file File object. See {@link https://developer.mozilla.org/en-US/docs/Web/API/File Mozilla Docs}
* @param {String} id file id on win32.run
* @returns {Promise<Void>}
*/
async save_file(file, id) {
this.send({
file,
fs_id: id,
program_id: this.program_id,
type: 'save_file'
})
let promise = new Promise(resolve => {
window.onmessage = ({ data }) => {
if (data == null || typeof data !== 'object') return;
let { type, success } = data;
if (type == 'file_saved') {
resolve()
}
}
})
return promise;
}
/**
* Save a file to win32.run through File Saving Dialog
* @param {File} file File object. See {@link https://developer.mozilla.org/en-US/docs/Web/API/File Mozilla Docs}
* @param {[Object]} types list of saving formats. E.g.
* ```
* types = [
* {desc: 'Photos', mime: 'image/png', ext: '.png'},
* {desc: 'Bitmap', mime: 'image/bmp', ext: '.bmp'},
* ]
* ```
* See {@link https://docs.win32.run Docs} for more details.
* @returns {Promise<String>} id of the saved file, which later can be used to retrieve said file info with this.get_file(id)
*/
async save_file_as(file, types) {
this.send({
file,
types,
program_id: this.program_id,
type: 'save_file_as',
})
let promise = new Promise(resolve => {
window.onmessage = ({ data }) => {
if (data == null || typeof data !== 'object') return;
let { type, fs_id } = data;
if (type == 'file_saved_as') {
resolve(fs_id)
}
}
})
return promise;
}
/**
* @description Get a file on win32.run by its id.
* @param {String} id id of the file
* @returns {Promise<Object>} a win32 file object. See {@link https://docs.win32.run/3rd-party-apps/pick-files#returns Docs}
*/
async get_file(id) {
this.send({
fs_id: id,
program_id: this.program_id,
type: 'get_file'
});
let promise = new Promise(resolve => {
window.onmessage = ({ data }) => {
if (data == null || typeof data !== 'object') return;
let { type, file } = data;
if (type == 'file') {
resolve(file)
}
}
})
return promise;
}
}

View File

@@ -0,0 +1,18 @@
module.exports = {
"env": {
"browser": true,
"es6": true,
"node": true
},
"extends": "eslint:recommended",
"globals": {
"Atomics": "readonly",
"SharedArrayBuffer": "readonly"
},
"parserOptions": {
"ecmaVersion": 2018,
"sourceType": "module"
},
"rules": {
}
};

1
static/js/libarchive.js/.gitignore vendored Normal file
View File

@@ -0,0 +1 @@
node_modules

View File

@@ -0,0 +1,3 @@
language: node_js
node_js:
- "node"

File diff suppressed because one or more lines are too long

Binary file not shown.

File diff suppressed because one or more lines are too long

View File

@@ -0,0 +1,180 @@
// For a detailed explanation regarding each configuration property, visit:
// https://jestjs.io/docs/en/configuration.html
module.exports = {
// All imported modules in your tests should be mocked automatically
// automock: false,
// Stop running tests after the first failure
// bail: false,
// Respect "browser" field in package.json when resolving modules
// browser: false,
// The directory where Jest should store its cached dependency information
// cacheDirectory: "C:\\Users\\Nika\\AppData\\Local\\Temp\\jest",
// Automatically clear mock calls and instances between every test
// clearMocks: false,
// Indicates whether the coverage information should be collected while executing the test
// collectCoverage: false,
// An array of glob patterns indicating a set of files for which coverage information should be collected
// collectCoverageFrom: null,
// The directory where Jest should output its coverage files
// coverageDirectory: null,
// An array of regexp pattern strings used to skip coverage collection
// coveragePathIgnorePatterns: [
// "\\\\node_modules\\\\"
// ],
// A list of reporter names that Jest uses when writing coverage reports
// coverageReporters: [
// "json",
// "text",
// "lcov",
// "clover"
// ],
// An object that configures minimum threshold enforcement for coverage results
// coverageThreshold: null,
// Make calling deprecated APIs throw helpful error messages
// errorOnDeprecated: false,
// Force coverage collection from ignored files usin a array of glob patterns
// forceCoverageMatch: [],
// A path to a module which exports an async function that is triggered once before all test suites
// globalSetup: null,
// A path to a module which exports an async function that is triggered once after all test suites
// globalTeardown: null,
// A set of global variables that need to be available in all test environments
// globals: {},
// An array of directory names to be searched recursively up from the requiring module's location
// moduleDirectories: [
// "node_modules"
// ],
// An array of file extensions your modules use
// moduleFileExtensions: [
// "js",
// "json",
// "jsx",
// "node"
// ],
// A map from regular expressions to module names that allow to stub out resources with a single module
// moduleNameMapper: {},
// An array of regexp pattern strings, matched against all module paths before considered 'visible' to the module loader
// modulePathIgnorePatterns: [],
// Activates notifications for test results
// notify: false,
// An enum that specifies notification mode. Requires { notify: true }
// notifyMode: "always",
// A preset that is used as a base for Jest's configuration
// preset: null,
// Run tests from one or more projects
// projects: null,
// Use this configuration option to add custom reporters to Jest
// reporters: undefined,
// Automatically reset mock state between every test
// resetMocks: false,
// Reset the module registry before running each individual test
// resetModules: false,
// A path to a custom resolver
// resolver: null,
// Automatically restore mock state between every test
// restoreMocks: false,
// The root directory that Jest should scan for tests and modules within
// rootDir: null,
// A list of paths to directories that Jest should use to search for files in
// roots: [
// "<rootDir>"
// ],
// Allows you to use a custom runner instead of Jest's default test runner
// runner: "jest-runner",
// The paths to modules that run some code to configure or set up the testing environment before each test
// setupFiles: [],
// The path to a module that runs some code to configure or set up the testing framework before each test
// setupTestFrameworkScriptFile: null,
// A list of paths to snapshot serializer modules Jest should use for snapshot testing
// snapshotSerializers: [],
// The test environment that will be used for testing
testEnvironment: "node",
// Options that will be passed to the testEnvironment
// testEnvironmentOptions: {},
// Adds a location field to test results
// testLocationInResults: false,
// The glob patterns Jest uses to detect test files
testMatch: [
//"**/test/**/*.js?(x)",
"**/test/**/?(*.)+(spec|test).js?(x)"
],
// An array of regexp pattern strings that are matched against all test paths, matched tests are skipped
// testPathIgnorePatterns: [
// "\\\\node_modules\\\\"
// ],
// The regexp pattern Jest uses to detect test files
// testRegex: "",
// This option allows the use of a custom results processor
// testResultsProcessor: null,
// This option allows use of a custom test runner
// testRunner: "jasmine2",
// This option sets the URL for the jsdom environment. It is reflected in properties such as location.href
// testURL: "http://localhost",
// Setting this value to "fake" allows the use of fake timers for functions such as "setTimeout"
// timers: "real",
// A map from regular expressions to paths to transformers
// transform: null,
// An array of regexp pattern strings that are matched against all source file paths, matched files will skip transformation
// transformIgnorePatterns: [
// "\\\\node_modules\\\\"
// ],
// An array of regexp pattern strings that are matched against all modules before the module loader will automatically return a mock for them
// unmockedModulePathPatterns: undefined,
// Indicates whether each individual test should be reported during the run
// verbose: null,
// An array of regexp patterns that are matched against all source file paths before re-running tests in watch mode
// watchPathIgnorePatterns: [],
// Whether to use watchman for file crawling
// watchman: true,
};

Binary file not shown.

File diff suppressed because one or more lines are too long

Binary file not shown.

Binary file not shown.

View File

@@ -0,0 +1,11 @@
emcc ../wrapper/main.c -I /usr/local/include/ -o ../build/main.o #-g4
emcc ../build/main.o /usr/local/lib/libarchive.a /usr/local/lib/liblzma.a /usr/local/lib/libssl.a /usr/local/lib/libcrypto.a \
-o ../build/libarchive.js \
-s USE_ZLIB=1 -s USE_BZIP2=1 -s MODULARIZE=1 -s EXPORT_ES6=1 -s EXPORT_NAME=libarchive -s WASM=1 -O3 -s ALLOW_MEMORY_GROWTH=1 \
-s EXTRA_EXPORTED_RUNTIME_METHODS='["cwrap","allocate","intArrayFromString"]' -s EXPORTED_FUNCTIONS=@$PWD/lib.exports -s ERROR_ON_UNDEFINED_SYMBOLS=0
cp ../build/libarchive.js ../../src/webworker/wasm-gen/
cp ../build/libarchive.wasm ../../src/webworker/wasm-gen/
echo Done

View File

@@ -0,0 +1,51 @@
FROM trzeci/emscripten
WORKDIR /opt
ADD https://github.com/libarchive/libarchive/releases/download/v3.4.0/libarchive-3.4.0.zip /opt
ADD https://github.com/madler/zlib/archive/v1.2.11.zip /opt
ADD https://netix.dl.sourceforge.net/project/lzmautils/xz-5.2.4.tar.gz /opt
ADD https://netix.dl.sourceforge.net/project/bzip2/bzip2-1.0.6.tar.gz /opt
ADD https://www.openssl.org/source/openssl-1.0.2s.tar.gz /opt
RUN unzip /opt/libarchive-3.4.0.zip && rm /opt/libarchive-3.4.0.zip && \
unzip /opt/v1.2.11.zip && rm /opt/v1.2.11.zip && \
tar xf /opt/xz-5.2.4.tar.gz && rm /opt/xz-5.2.4.tar.gz && \
tar xf /opt/bzip2-1.0.6.tar.gz && rm /opt/bzip2-1.0.6.tar.gz && \
tar xf /opt/openssl-1.0.2s.tar.gz && rm /opt/openssl-1.0.2s.tar.gz
RUN apt-get update && \
apt-get install -y locate vim file
ENV CPPFLAGS "-I/usr/local/include/ -I/opt/zlib-1.2.11 -I/opt/bzip2-1.0.6 -I/opt/openssl-1.0.2s/include -I/opt/openssl-1.0.2s/test"
ENV LDLIBS "-lz -lssl -lcrypto"
ENV LDFLAGS "-L/usr/local/lib"
# compile openSSL to LLVM
WORKDIR /opt/openssl-1.0.2s
RUN cd /opt/openssl-1.0.2s && emmake bash -c "./Configure -no-asm -no-apps no-ssl2 no-ssl3 no-hw no-deprecated shared no-dso linux-generic32" && \
sed -i 's/CC= $(CROSS_COMPILE)\/emsdk_portable\/sdk\/emcc/CC= $(CROSS_COMPILE)cc/' Makefile && \
emmake make && \
cd /usr/local/lib && \
ln -s /opt/openssl-1.0.2s/libssl.a && \
ln -s /opt/openssl-1.0.2s/libcrypto.a
# compile LZMA to LLVM
WORKDIR /opt/xz-5.2.4
RUN cd /opt/xz-5.2.4 && emconfigure ./configure --disable-assembler --enable-threads=no --enable-static=yes 2>&1 | tee conf.out && \
emmake make 2>&1 | tee make.out && emmake make install
# compile libarchive to LLVM
WORKDIR /opt/libarchive-3.4.0
RUN cd /opt/libarchive-3.4.0 && emconfigure ./configure --enable-static --disable-shared --enable-bsdtar=static --enable-bsdcat=static \
--enable-bsdcpio=static --enable-posix-regex-lib=libc \
--disable-xattr --disable-acl --without-nettle --without-lzo2 \
--without-cng --without-lz4 \
--without-xml2 --without-expat 2>&1 | tee conf.out && \
emmake make 2>&1 | tee make.out && emmake make install
#--without-openssl
#--without-bz2lib --without-iconv --without-libiconv-prefix --without-lzma
WORKDIR /var/local/lib/tools
CMD ["bash","/var/local/lib/tools/build.sh"]

View File

@@ -0,0 +1,18 @@
[
"_get_version",
"_archive_open",
"_get_next_entry",
"_get_filedata",
"_archive_close",
"_archive_entry_filetype",
"_archive_entry_pathname",
"_archive_entry_pathname_utf8",
"_archive_entry_size",
"_archive_read_data_skip",
"_archive_error_string",
"_archive_entry_is_encrypted",
"_archive_read_has_encrypted_entries",
"_archive_read_add_passphrase",
"_free",
"_malloc"
]

View File

@@ -0,0 +1,2 @@
if [ ! -f "./package.json" ]; then echo "you should run this from project root"; exit 1; fi
docker run -it -v `pwd`:/var/local libarchive-llvm

View File

@@ -0,0 +1,128 @@
#define LIBARCHIVE_STATIC
//#include "emscripten.h"
#include <stdlib.h>
#include <stdio.h>
#include <string.h>
#include <archive.h>
#include <archive_entry.h>
#define EMSCRIPTEN_KEEPALIVE
EMSCRIPTEN_KEEPALIVE
const char * get_version(){
return archive_version_string();
}
EMSCRIPTEN_KEEPALIVE
void* archive_open( const void *buf, size_t size, const char * passphrase ){
struct archive *a;
int r;
a = archive_read_new();
archive_read_support_filter_all(a);
archive_read_support_format_all(a);
if( passphrase ){
archive_read_add_passphrase(a, passphrase);
}
r = archive_read_open_memory(a, buf, size);
if (r != ARCHIVE_OK){
fprintf(stderr, "Memory read error %d\n",r);
fprintf(stderr, "%s\n",archive_error_string(a));
}
return a;
}
EMSCRIPTEN_KEEPALIVE
const void* get_next_entry(void *archive){
struct archive_entry *entry;
if( archive_read_next_header(archive,&entry) == ARCHIVE_OK ){
return entry;
}else{
return NULL;
}
}
EMSCRIPTEN_KEEPALIVE
void* get_filedata(void *archive,size_t buffsize){
void *buff = malloc( buffsize );
int read_size = archive_read_data(archive,buff,buffsize);
if( read_size < 0 ){
fprintf(stderr, "Error occured while reading file");
return (void*) read_size;
}else{
return buff;
}
}
EMSCRIPTEN_KEEPALIVE
void archive_close( void *archive ){
int r = archive_read_free(archive);
if (r != ARCHIVE_OK){
fprintf(stderr, "Error read free %d\n",r);
fprintf(stderr, "%s\n",archive_error_string(archive));
}
}
/*
#define MAXBUFLEN 1000000
EMSCRIPTEN_KEEPALIVE
int main(){
char source[MAXBUFLEN + 1];
FILE *fp = fopen("addon.zip", "r");
if (fp != NULL) {
size_t newLen = fread(source, sizeof(char), MAXBUFLEN, fp);
if ( ferror( fp ) != 0 ) {
printf("Error reading file");
} else {
source[newLen++] = '\0';
void* arch = archive_open(source,newLen);
printf("arch: %d",arch);
void* entry = get_next_entry(arch);
size_t fsize = archive_entry_size(entry);
void* file = get_filedata(arch,fsize);
printf("file: %d",file);
}
fclose(fp);
}
}*/
/*
EMSCRIPTEN_KEEPALIVE
char* list_files( const void * buf, size_t size ){
printf("list_files start\n");
struct archive *a;
struct archive_entry *entry;
int r;
char* fname = NULL;
const char* tmp;
printf("variables initialized\n");
a = archive_read_new();
archive_read_support_filter_all(a);
archive_read_support_format_all(a);
printf("libarchive initialized\n");
r = archive_read_open_memory(a, buf, size);
if (r != ARCHIVE_OK){
printf("Memory read error %d\n",r);
printf("%s\n",archive_error_string(a));
exit(1);
}
printf("start read\n");
while (archive_read_next_header(a, &entry) == ARCHIVE_OK) {
tmp = archive_entry_pathname(entry);
free(fname);
fname = malloc(strlen(tmp));
strcpy(fname,tmp);
archive_read_data_skip(a);
}
printf("finish read\n");
r = archive_read_free(a);
if (r != ARCHIVE_OK){
printf("Error read free %d\n",r);
printf("%s\n",archive_error_string(a));
exit(1);
}
return fname;
}
*/

View File

@@ -0,0 +1,2 @@
export { Archive } from './src/libarchive.js';

View File

@@ -0,0 +1,19 @@
import copy from 'rollup-plugin-copy-assets';
import { terser } from "rollup-plugin-terser";
export default {
input: 'src/webworker/worker.js',
output: [
{
file: 'dist/worker-bundle.js',
format: 'iife'
}
],
plugins: [
copy({
assets: [
'./src/webworker/wasm-gen'
],
}),
].concat( process.env.BUILD === 'production' ? [terser()] : [] ),
};

View File

@@ -0,0 +1,35 @@
/**
* Represents compressed file before extraction
*/
export class CompressedFile{
constructor(name,size,path,archiveRef){
this._name = name;
this._size = size;
this._path = path;
this._archiveRef = archiveRef;
}
/**
* file name
*/
get name(){
return this._name;
}
/**
* file size
*/
get size(){
return this._size;
}
/**
* Extract file from archive
* @returns {Promise<File>} extracted file
*/
extract(){
return this._archiveRef.extractSingleFile(this._path);
}
}

View File

@@ -0,0 +1,225 @@
import { CompressedFile } from "./compressed-file.js";
export class Archive{
/**
* Initialize libarchivejs
* @param {Object} options
*/
static init(options = {}){
Archive._options = {
workerUrl: '../dist/worker-bundle.js',
...options
};
return Archive._options;
}
/**
* Creates new archive instance from browser native File object
* @param {File} file
* @param {object} options
* @returns {Archive}
*/
static open(file, options = null){
options = options ||
Archive._options ||
Archive.init() && console.warn('Automatically initializing using options: ', Archive._options);
const arch = new Archive(file,options);
return arch.open();
}
/**
* Create new archive
* @param {File} file
* @param {Object} options
*/
constructor(file,options){
this._worker = new Worker(options.workerUrl);
this._worker.addEventListener('message', this._workerMsg.bind(this));
this._callbacks = [];
this._content = {};
this._processed = 0;
this._file = file;
}
/**
* Prepares file for reading
* @returns {Promise<Archive>} archive instance
*/
async open(){
await this._postMessage({type: 'HELLO'},(resolve,reject,msg) => {
if( msg.type === 'READY' ){
resolve();
}
});
return await this._postMessage({type: 'OPEN', file: this._file}, (resolve,reject,msg) => {
if(msg.type === 'OPENED'){
resolve(this);
}
});
}
/**
* detect if archive has encrypted data
* @returns {boolean|null} null if could not be determined
*/
hasEncryptedData(){
return this._postMessage({type: 'CHECK_ENCRYPTION'},
(resolve,reject,msg) => {
if( msg.type === 'ENCRYPTION_STATUS' ){
resolve(msg.status);
}
}
);
}
/**
* set password to be used when reading archive
*/
usePassword(archivePassword){
return this._postMessage({type: 'SET_PASSPHRASE', passphrase: archivePassword},
(resolve,reject,msg) => {
if( msg.type === 'PASSPHRASE_STATUS' ){
resolve(msg.status);
}
}
);
}
/**
* Returns object containing directory structure and file information
* @returns {Promise<object>}
*/
getFilesObject(){
if( this._processed > 0 ){
return Promise.resolve().then( () => this._content );
}
return this._postMessage({type: 'LIST_FILES'}, (resolve,reject,msg) => {
if( msg.type === 'ENTRY' ){
const entry = msg.entry;
const [ target, prop ] = this._getProp(this._content,entry.path);
if( entry.type === 'FILE' ){
target[prop] = new CompressedFile(entry.fileName,entry.size,entry.path,this);
}
return true;
}else if( msg.type === 'END' ){
this._processed = 1;
resolve(this._cloneContent(this._content));
}
});
}
getFilesArray(){
return this.getFilesObject().then( (obj) => {
return this._objectToArray(obj);
});
}
extractSingleFile(target){
return this._postMessage({type: 'EXTRACT_SINGLE_FILE', target: target},
(resolve,reject,msg) => {
if( msg.type === 'FILE' ){
const file = new File([msg.entry.fileData], msg.entry.fileName, {
type: 'application/octet-stream'
});
resolve(file);
}
}
);
}
/**
* Returns object containing directory structure and extracted File objects
* @param {Function} extractCallback
*
*/
extractFiles(extractCallback){
if( this._processed > 1 ){
return Promise.resolve().then( () => this._content );
}
return this._postMessage({type: 'EXTRACT_FILES'}, (resolve,reject,msg) => {
if( msg.type === 'ENTRY' ){
const [ target, prop ] = this._getProp(this._content,msg.entry.path);
if( msg.entry.type === 'FILE' ){
target[prop] = new File([msg.entry.fileData], msg.entry.fileName, {
type: 'application/octet-stream'
});
if (extractCallback !== undefined) {
setTimeout(extractCallback.bind(null,{
file: target[prop],
path: msg.entry.path,
}));
}
}
return true;
}else if( msg.type === 'END' ){
this._processed = 2;
this._worker.terminate();
resolve(this._cloneContent(this._content));
}
});
}
_cloneContent(obj){
if( obj instanceof File || obj instanceof CompressedFile || obj === null ) return obj;
const o = {};
for( const prop of Object.keys(obj) ){
o[prop] = this._cloneContent(obj[prop]);
}
return o;
}
_objectToArray(obj,path = ''){
const files = [];
for( const key of Object.keys(obj) ){
if( obj[key] instanceof File || obj[key] instanceof CompressedFile || obj[key] === null ){
files.push({
file: obj[key] || key,
path: path
});
}else{
files.push( ...this._objectToArray(obj[key],`${path}${key}/`) );
}
}
return files;
}
_getProp(obj,path){
const parts = path.split('/');
if( parts[parts.length -1] === '' ) parts.pop();
let cur = obj, prev = null;
for( const part of parts ){
cur[part] = cur[part] || {};
prev = cur;
cur = cur[part];
}
return [ prev, parts[parts.length-1] ];
}
_postMessage(msg,callback){
this._worker.postMessage(msg);
return new Promise((resolve,reject) => {
this._callbacks.push( this._msgHandler.bind(this,callback,resolve,reject) );
});
}
_msgHandler(callback,resolve,reject,msg){
if( msg.type === 'BUSY' ){
reject('worker is busy');
}else if( msg.type === 'ERROR' ){
reject(msg.error);
}else{
return callback(resolve,reject,msg);
}
}
_workerMsg({data: msg}){
const callback = this._callbacks[this._callbacks.length -1];
const next = callback(msg);
if( !next ){
this._callbacks.pop();
}
}
}

View File

@@ -0,0 +1,136 @@
const TYPE_MAP = {
32768: 'FILE',
16384: 'DIR',
40960: 'SYMBOLIC_LINK',
49152: 'SOCKET',
8192: 'CHARACTER_DEVICE',
24576: 'BLOCK_DEVICE',
4096: 'NAMED_PIPE',
};
export class ArchiveReader{
/**
* archive reader
* @param {WasmModule} wasmModule emscripten module
*/
constructor(wasmModule){
this._wasmModule = wasmModule;
this._runCode = wasmModule.runCode;
this._file = null;
this._passphrase = null;
}
/**
* open archive, needs to closed manually
* @param {File} file
*/
open(file){
if( this._file !== null ){
console.warn('Closing previous file');
this.close();
}
const { promise, resolve, reject } = this._promiseHandles();
this._file = file;
const reader = new FileReader();
reader.onload = () => this._loadFile(reader.result,resolve,reject);
reader.readAsArrayBuffer(file);
return promise;
}
/**
* close archive
*/
close(){
this._runCode.closeArchive(this._archive);
this._wasmModule._free(this._filePtr);
this._file = null;
this._filePtr = null;
this._archive = null;
}
/**
* detect if archive has encrypted data
* @returns {boolean|null} null if could not be determined
*/
hasEncryptedData(){
this._archive = this._runCode.openArchive( this._filePtr, this._fileLength, this._passphrase );
this._runCode.getNextEntry(this._archive);
const status = this._runCode.hasEncryptedEntries(this._archive);
if( status === 0 ){
return false;
} else if( status > 0 ){
return true;
} else {
return null;
}
}
/**
* set passphrase to be used with archive
* @param {*} passphrase
*/
setPassphrase(passphrase){
this._passphrase = passphrase;
}
/**
* get archive entries
* @param {boolean} skipExtraction
* @param {string} except don't skip this entry
*/
*entries(skipExtraction = false, except = null){
this._archive = this._runCode.openArchive( this._filePtr, this._fileLength, this._passphrase );
let entry;
while( true ){
entry = this._runCode.getNextEntry(this._archive);
if( entry === 0 ) break;
const entryData = {
size: this._runCode.getEntrySize(entry),
path: this._runCode.getEntryName(entry),
type: TYPE_MAP[this._runCode.getEntryType(entry)],
ref: entry,
};
if( entryData.type === 'FILE' ){
let fileName = entryData.path.split('/');
entryData.fileName = fileName[fileName.length - 1];
}
if( skipExtraction && except !== entryData.path ){
this._runCode.skipEntry(this._archive);
}else{
const ptr = this._runCode.getFileData(this._archive,entryData.size);
if( ptr < 0 ){
throw new Error(this._runCode.getError(this._archive));
}
entryData.fileData = this._wasmModule.HEAP8.slice(ptr,ptr+entryData.size);
this._wasmModule._free(ptr);
}
yield entryData;
}
}
_loadFile(fileBuffer,resolve,reject){
try{
const array = new Uint8Array(fileBuffer);
this._fileLength = array.length;
this._filePtr = this._runCode.malloc(this._fileLength);
this._wasmModule.HEAP8.set(array, this._filePtr);
resolve();
}catch(error){
reject(error);
}
}
_promiseHandles(){
let resolve = null,reject = null;
const promise = new Promise((_resolve,_reject) => {
resolve = _resolve;
reject = _reject;
});
return { promise, resolve, reject };
}
}

File diff suppressed because one or more lines are too long

View File

@@ -0,0 +1,97 @@
/* eslint-disable no-undef */
import libarchive from './wasm-gen/libarchive.js';
export class WasmModule{
constructor(){
this.preRun = [];
this.postRun = [];
this.totalDependencies = 0;
}
print(...text){
console.log(text);
}
printErr(...text){
console.error(text);
}
initFunctions(){
this.runCode = {
// const char * get_version()
getVersion: this.cwrap('get_version', 'string', []),
// void * archive_open( const void * buffer, size_t buffer_size)
// retuns archive pointer
openArchive: this.cwrap('archive_open', 'number', ['number','number','string']),
// void * get_entry(void * archive)
// return archive entry pointer
getNextEntry: this.cwrap('get_next_entry', 'number', ['number']),
// void * get_filedata( void * archive, size_t bufferSize )
getFileData: this.cwrap('get_filedata', 'number', ['number','number']),
// int archive_read_data_skip(struct archive *_a)
skipEntry: this.cwrap('archive_read_data_skip', 'number', ['number']),
// void archive_close( void * archive )
closeArchive: this.cwrap('archive_close', null, ['number'] ),
// la_int64_t archive_entry_size( struct archive_entry * )
getEntrySize: this.cwrap('archive_entry_size', 'number', ['number']),
// const char * archive_entry_pathname( struct archive_entry * )
getEntryName: this.cwrap('archive_entry_pathname', 'string', ['number']),
// __LA_MODE_T archive_entry_filetype( struct archive_entry * )
/*
#define AE_IFMT ((__LA_MODE_T)0170000)
#define AE_IFREG ((__LA_MODE_T)0100000) // Regular file
#define AE_IFLNK ((__LA_MODE_T)0120000) // Sybolic link
#define AE_IFSOCK ((__LA_MODE_T)0140000) // Socket
#define AE_IFCHR ((__LA_MODE_T)0020000) // Character device
#define AE_IFBLK ((__LA_MODE_T)0060000) // Block device
#define AE_IFDIR ((__LA_MODE_T)0040000) // Directory
#define AE_IFIFO ((__LA_MODE_T)0010000) // Named pipe
*/
getEntryType: this.cwrap('archive_entry_filetype', 'number', ['number']),
// const char * archive_error_string(struct archive *);
getError: this.cwrap('archive_error_string', 'string', ['number']),
/*
* Returns 1 if the archive contains at least one encrypted entry.
* If the archive format not support encryption at all
* ARCHIVE_READ_FORMAT_ENCRYPTION_UNSUPPORTED is returned.
* If for any other reason (e.g. not enough data read so far)
* we cannot say whether there are encrypted entries, then
* ARCHIVE_READ_FORMAT_ENCRYPTION_DONT_KNOW is returned.
* In general, this function will return values below zero when the
* reader is uncertain or totally incapable of encryption support.
* When this function returns 0 you can be sure that the reader
* supports encryption detection but no encrypted entries have
* been found yet.
*
* NOTE: If the metadata/header of an archive is also encrypted, you
* cannot rely on the number of encrypted entries. That is why this
* function does not return the number of encrypted entries but#
* just shows that there are some.
*/
// __LA_DECL int archive_read_has_encrypted_entries(struct archive *);
entryIsEncrypted: this.cwrap('archive_entry_is_encrypted', 'number', ['number']),
hasEncryptedEntries: this.cwrap('archive_read_has_encrypted_entries', 'number', ['number']),
// __LA_DECL int archive_read_add_passphrase(struct archive *, const char *);
addPassphrase: this.cwrap('archive_read_add_passphrase', 'number', ['number','string']),
//this.stringToUTF(str), //
string: (str) => this.allocate(this.intArrayFromString(str), 'i8', 0),
malloc: this.cwrap('malloc', 'number', ['number']),
free: this.cwrap('free', null, ['number']),
};
//console.log(this.runCode.getVersion());
}
monitorRunDependencies(){}
locateFile(path /* ,prefix */ ){
return `wasm-gen/${path}`;
}
}
export function getWasmModule(cb){
libarchive( new WasmModule() ).then( (module) => {
module.initFunctions();
cb(module);
});
}

View File

@@ -0,0 +1,69 @@
import {ArchiveReader} from './archive-reader';
import {getWasmModule} from './wasm-module';
let reader = null;
let busy = false;
getWasmModule( (wasmModule) => {
reader = new ArchiveReader(wasmModule);
busy = false;
self.postMessage({type: 'READY'});
});
self.onmessage = async ({data: msg}) => {
if( busy ){
self.postMessage({ type: 'BUSY' });
return;
}
let skipExtraction = false;
busy = true;
try{
switch(msg.type){
case 'HELLO': // module will respond READY when it's ready
break;
case 'OPEN':
await reader.open(msg.file);
self.postMessage({ type: 'OPENED' });
break;
case 'LIST_FILES':
skipExtraction = true;
// eslint-disable-next-line no-fallthrough
case 'EXTRACT_FILES':
for( const entry of reader.entries(skipExtraction) ){
self.postMessage({ type: 'ENTRY', entry });
}
self.postMessage({ type: 'END' });
break;
case 'EXTRACT_SINGLE_FILE':
for( const entry of reader.entries(true,msg.target) ){
if( entry.fileData ){
self.postMessage({ type: 'FILE', entry });
}
}
break;
case 'CHECK_ENCRYPTION':
self.postMessage({ type: 'ENCRYPTION_STATUS', status: reader.hasEncryptedData() });
break;
case 'SET_PASSPHRASE':
reader.setPassphrase( msg.passphrase );
self.postMessage({ type: 'PASSPHRASE_STATUS', status: true });
break;
default:
throw new Error('Invalid Command');
}
}catch(err){
self.postMessage({
type: 'ERROR',
error: {
message: err.message,
name: err.name,
stack: err.stack
}
});
}finally{
// eslint-disable-next-line require-atomic-updates
busy = false;
}
};

View File

@@ -0,0 +1,2 @@
.vscode
.idea

View File

@@ -0,0 +1 @@
# Adjaranet plugin for Kodi

View File

@@ -0,0 +1,120 @@
import simplejson as json
from httplib2 import Http
import re
import urllib2
import sys
import urllib
import urlparse
import xbmc
import xbmcgui
import xbmcplugin
API_BASE = 'http://net.adjara.com/'
STATIC_FILES = 'http://staticnet.adjara.com/'
CATEGORY_MAP = {
'new_release': 'Search/SearchResults?ajax=1&display=15&startYear=1900&endYear=2018&offset=0&orderBy=date&order%5Border%5D=data&order%5Bdata%5D=premiere&order%5Bmeta%5D=desc',
'top_movies': 'Search/SearchResults?ajax=1&display=15&startYear=1900&endYear=2018&offset=15&orderBy=date&order%5Border%5D=data&order%5Bdata%5D=views&order%5Bmeta%5D=views-week'
}
base_url = sys.argv[0]
addon_handle = int(sys.argv[1])
args = urlparse.parse_qs(sys.argv[2][1:])
find_var_regex = re.compile(r"""movieUrlEmpty\s*=\s*[\'\"](.+)[\'\"]""")
xbmcplugin.setContent(addon_handle, 'movies')
def get_icon(movie_id):
movie_id = str(movie_id)
return STATIC_FILES + 'moviecontent/%s/covers/157x236-%s.jpg' % (movie_id,movie_id)
def get_cover(movie_id):
movie_id = str(movie_id)
return STATIC_FILES + 'moviecontent/%s/covers/1920x1080-%s.jpg' % (movie_id,movie_id)
def build_url(query):
return base_url + '?' + urllib.urlencode(query)
def add_category(label,category,iconImage = 'DefaultFolder.png', url = None):
if url is None:
url = build_url({'mode': 'category', 'category': category})
li = xbmcgui.ListItem(label, iconImage=iconImage)
xbmcplugin.addDirectoryItem(handle=addon_handle, url=url,
listitem=li, isFolder=True)
def main_screen():
add_category('Search',None,'DefaultAddonsSearch.png',build_url({'mode': 'search'}))
add_category('New Releases','new_release')
add_category('Top Movies','top_movies')
xbmcplugin.endOfDirectory(addon_handle)
def load_category(category):
cat_url = API_BASE + CATEGORY_MAP[category]
try:
(rsp_headers, json_data) = Http().request(cat_url)
data = json.loads(json_data)
for item in data['data']:
url = build_url({'mode': 'movie', 'id': item['id']})
li = xbmcgui.ListItem(item['title_en'], iconImage=item['poster'])
li.setProperty('IsPlayable', 'true')
xbmcplugin.addDirectoryItem(handle=addon_handle, url=url, listitem=li, isFolder=False)
except Exception, e:
xbmc.log('adjaranet: got http error fetching %s \n %s' % (cat_url, str(e)), xbmc.LOGWARNING)
finally:
xbmcplugin.endOfDirectory(addon_handle)
def search():
kb = xbmc.Keyboard('', 'Search for movie')
kb.doModal()
if (kb.isConfirmed()):
search_term = kb.getText()
else:
return
search_url = API_BASE + 'Home/quick_search?ajax=1&search=' + search_term
try:
(rsp_headers, json_data) = Http().request(search_url)
data = json.loads(json_data)
for item in data['movies']['data']:
url = build_url({'mode': 'movie', 'id': item['id']})
li = xbmcgui.ListItem(item['title_en'])
li.setArt({
'icon': get_icon(item['id']),
'landscape': get_cover(item['id'])
})
li.setProperty('IsPlayable', 'true')
xbmcplugin.addDirectoryItem(handle=addon_handle, url=url, listitem=li, isFolder=False)
except Exception, e:
xbmc.log('adjaranet: got http error fetching %s \n %s' % (search_url, str(e)), xbmc.LOGWARNING)
finally:
xbmcplugin.endOfDirectory(addon_handle)
def load_movie(movie_id):
script_url = API_BASE + 'Movie/main?id='+ movie_id +'&js=1'
try:
(rsp_headers, html_data) = Http().request(script_url)
match = re.search(find_var_regex,html_data)
if not match:
xbmc.log('can not find url at %s' % (script_url), xbmc.LOGWARNING)
raise Exception('url not found')
url = match.group(1).replace('{lang}','English').replace('{quality}','1500')
xbmc.log(url, xbmc.LOGWARNING)
play_item = xbmcgui.ListItem(path=url)
xbmcplugin.setResolvedUrl(addon_handle, True, listitem=play_item)
except Exception, e:
xbmc.log('adjaranet: got http error fetching %s \n %s' % (script_url, str(e)), xbmc.LOGWARNING)
mode = args.get('mode', None)
if mode is None:
main_screen()
elif mode[0] == 'category':
category = args.get('category','new_release')
load_category(category[0])
elif mode[0] == 'search':
search()
elif mode[0] == 'movie':
movie_id = args.get('id', None)
load_movie(movie_id[0])

View File

@@ -0,0 +1,31 @@
<?xml version="1.0" encoding="UTF-8" standalone="yes"?>
<addon id="plugin.addon.adjaranet.client" name="adjaranet client" version="0.0.1" provider-name="You">
<requires>
<import addon="xbmc.python" version="2.1.0"/>
<import addon="script.module.simplejson" />
<import addon="script.module.httplib2" />
</requires>
<extension point="xbmc.python.pluginsource" library="addon.py">
<provides>video</provides>
</extension>
<extension point="xbmc.addon.metadata">
<summary lang="en_GB">Adjaranet.com client</summary>
<description lang="en_GB">enable adjaranet inside kodi</description>
<disclaimer lang="en_GB"></disclaimer>
<language></language>
<platform>all</platform>
<license></license>
<forum></forum>
<website></website>
<email></email>
<source></source>
<news></news>
<assets>
<icon></icon>
<fanart></fanart>
<banner></banner>
<clearlogo></clearlogo>
<screenshot></screenshot>
</assets>
</extension>
</addon>

View File

@@ -0,0 +1,9 @@
module.exports.checksum = {
'.gitignore':'d1e8d4fa856e17b2ad54a216aae527a880873df76cc30a85d6ba6b32d2ee23cc',
'addon':{
'addon.py':'e0ab20fe5fd7ab5c2b38511d81d93b9cb6246e300d0893face50e8a5b9485b90',
'addon.xml':'d26a8bdf02e7ab2eaeadf2ab603a1d11b2a5bfe57a6ac672d1a1c4940958eba8'
},
'README.md':'b4555fd8dd6e81599625c1232e58d5e09fc36f3f6614bf792a6978b30cfe65bb'
};

Binary file not shown.

Binary file not shown.

Binary file not shown.

View File

@@ -0,0 +1,80 @@
<!DOCTYPE html>
<html>
<head>
<title>test webworker</title>
</head>
<body>
<input type="file" id="file" />
<script type="module" >
function hex(buffer) {
const hexCodes = [];
const view = new DataView(buffer);
for (let i = 0; i < view.byteLength; i += 4) {
const value = view.getUint32(i)
const stringValue = value.toString(16)
const padding = '00000000'
const paddedValue = (padding + stringValue).slice(-padding.length)
hexCodes.push(paddedValue);
}
return hexCodes.join("");
}
function getChecksum(file){
return new Promise((resolve,reject) => {
try{
const reader = new FileReader();
reader.onload = function() {
crypto.subtle.digest("SHA-256", reader.result).then(function (hash) {
resolve(hex(hash));
});
}
reader.readAsArrayBuffer(file);
}catch(err){
reject(err);
}
});
}
function finish(){
const d = document.createElement('div');
d.setAttribute('id','done');
d.textContent = 'Done.';
document.body.appendChild(d);
}
async function fileChecksums(obj){
for( const [key,val] of Object.entries(obj) ){
obj[key] = val instanceof File ?
await getChecksum(val) : await fileChecksums(val);
}
return obj;
}
import {Archive} from '../../src/libarchive.js';
Archive.init({
workerUrl: '../../dist/worker-bundle.js'
});
window.Archive = Archive;
document.getElementById('file').addEventListener('change', async (e) => {
let obj = null, encEntries = false;
try{
const file = e.currentTarget.files[0];
const archive = await Archive.open(file);
encEntries = await archive.hasEncryptedData();
await archive.usePassword("nika");
obj = await archive.extractFiles();
obj = await fileChecksums(obj);
}catch(err){
console.error(err);
}finally{
window.obj = {files: obj, encrypted: encEntries};
finish();
}
});
</script>
</body>
</html>

View File

@@ -0,0 +1,82 @@
<!DOCTYPE html>
<html>
<head>
<title>test webworker</title>
</head>
<body>
<input type="file" id="file" />
<script type="module" >
function hex(buffer) {
const hexCodes = [];
const view = new DataView(buffer);
for (let i = 0; i < view.byteLength; i += 4) {
const value = view.getUint32(i)
const stringValue = value.toString(16)
const padding = '00000000'
const paddedValue = (padding + stringValue).slice(-padding.length)
hexCodes.push(paddedValue);
}
return hexCodes.join("");
}
function getChecksum(file){
return new Promise((resolve,reject) => {
try{
const reader = new FileReader();
reader.onload = function() {
crypto.subtle.digest("SHA-256", reader.result).then(function (hash) {
resolve(hex(hash));
});
}
reader.readAsArrayBuffer(file);
}catch(err){
reject(err);
}
});
}
function finish(){
const d = document.createElement('div');
d.setAttribute('id','done');
d.textContent = 'Done.';
document.body.appendChild(d);
}
async function fileChecksums(obj){
for( const [key,val] of Object.entries(obj) ){
obj[key] = val instanceof File ?
await getChecksum(val) : await fileChecksums(val);
}
return obj;
}
import {Archive} from '../../src/libarchive.js';
Archive.init({
workerUrl: '../../dist/worker-bundle.js'
});
window.Archive = Archive;
document.getElementById('file').addEventListener('change', async (e) => {
let obj = null;
try{
const file = e.currentTarget.files[0];
const archive = await Archive.open(file);
//console.log( await archive.getFilesObject() );
//console.log( await archive.getFilesArray() );
obj = await archive.extractFiles();
//console.log( await archive.getFilesObject() );
//console.log( await archive.getFilesArray() );
obj = await fileChecksums(obj);
}catch(err){
console.error(err);
}finally{
window.obj = obj;
finish();
}
});
</script>
</body>
</html>

View File

@@ -0,0 +1,78 @@
<!DOCTYPE html>
<html>
<head>
<title>test webworker</title>
</head>
<body>
<input type="file" id="file" />
<script type="module" >
function hex(buffer) {
const hexCodes = [];
const view = new DataView(buffer);
for (let i = 0; i < view.byteLength; i += 4) {
const value = view.getUint32(i)
const stringValue = value.toString(16)
const padding = '00000000'
const paddedValue = (padding + stringValue).slice(-padding.length)
hexCodes.push(paddedValue);
}
return hexCodes.join("");
}
function getChecksum(file){
return new Promise((resolve,reject) => {
try{
const reader = new FileReader();
reader.onload = function() {
crypto.subtle.digest("SHA-256", reader.result).then(function (hash) {
resolve(hex(hash));
});
}
reader.readAsArrayBuffer(file);
}catch(err){
reject(err);
}
});
}
function finish(){
const d = document.createElement('div');
d.setAttribute('id','done');
d.textContent = 'Done.';
document.body.appendChild(d);
}
async function fileChecksums(obj){
for( const [key,val] of Object.entries(obj) ){
obj[key] = val instanceof File ?
await getChecksum(val) : await fileChecksums(val);
}
return obj;
}
import {Archive} from '../../src/libarchive.js';
Archive.init({
workerUrl: '../../dist/worker-bundle.js'
});
window.Archive = Archive;
document.getElementById('file').addEventListener('change', async (e) => {
let objAfter,objBefore,fileObj;
try{
const file = e.currentTarget.files[0];
const archive = await Archive.open(file);
const files = await archive.getFilesArray();
fileObj = await files[0].file.extract();
}catch(err){
console.error(err);
}finally{
window.obj = await getChecksum(fileObj);
finish();
}
});
</script>
</body>
</html>

View File

@@ -0,0 +1,36 @@
/* eslint-disable no-undef */
const {checksum} = require('../checksum');
const {navigate,inputFile,response,setup,cleanup} = require('../testutils');
let browser,page;
beforeAll(async () => {
let tmp = await setup();
browser = tmp.browser;
page = tmp.page;
});
describe("Extract 7Z files with various compressions", () => {
test("Extract 7Z with LZMA", async () => {
await navigate(page);
await inputFile('archives/7z/lzma.7z',page);
const files = await response(page);
expect(files).toEqual(checksum);
}, 16000);
test("Extract 7Z with LZMA2", async () => {
await navigate(page);
await inputFile('archives/7z/lzma2.7z',page);
const files = await response(page);
expect(files).toEqual(checksum);
}, 16000);
test("Extract 7Z with BZIP2", async () => {
await navigate(page);
await inputFile('archives/7z/bzip2.7z',page);
const files = await response(page);
expect(files).toEqual(checksum);
}, 16000);
});
afterAll(() => {
cleanup(browser);
});

View File

@@ -0,0 +1,30 @@
/* eslint-disable no-undef */
const {checksum} = require('../checksum');
const {navigate,inputFile,response,setup,cleanup} = require('../testutils');
let browser,page;
beforeAll(async () => {
let tmp = await setup();
browser = tmp.browser;
page = tmp.page;
});
describe("Extract RAR files", () => {
test("Extract RAR v4", async () => {
await navigate(page);
await inputFile('archives/rar/test-v4.rar',page);
const files = await response(page);
expect(files).toEqual(checksum);
}, 16000);
test("Extract RAR v5", async () => {
await navigate(page);
await inputFile('archives/rar/test-v5.rar',page);
const files = await response(page);
expect(files).toEqual(checksum);
}, 16000);
});
afterAll(() => {
cleanup(browser);
});

View File

@@ -0,0 +1,42 @@
/* eslint-disable no-undef */
const {checksum} = require('../checksum');
const {navigate,inputFile,response,setup,cleanup} = require('../testutils');
let browser,page;
beforeAll(async () => {
let tmp = await setup();
browser = tmp.browser;
page = tmp.page;
});
describe("Extract TAR files with various compressions", () => {
test("Extract TAR without compression", async () => {
await navigate(page);
await inputFile('archives/tar/test.tar',page);
const files = await response(page);
expect(files).toEqual(checksum);
}, 16000);
test("Extract TAR BZIP2", async () => {
await navigate(page);
await inputFile('archives/tar/test.tar.bz2',page);
const files = await response(page);
expect(files).toEqual(checksum);
}, 16000);
test("Extract TAR GZIP", async () => {
await navigate(page);
await inputFile('archives/tar/test.tar.gz',page);
const files = await response(page);
expect(files).toEqual(checksum);
}, 16000);
test("Extract TAR LZMA2", async () => {
await navigate(page);
await inputFile('archives/tar/test.tar.xz',page);
const files = await response(page);
expect(files).toEqual(checksum);
}, 16000);
});
afterAll(() => {
cleanup(browser);
});

View File

@@ -0,0 +1,42 @@
/* eslint-disable no-undef */
const {checksum} = require('../checksum');
const {navigate,inputFile,response,setup,cleanup} = require('../testutils');
let browser,page;
beforeAll(async () => {
let tmp = await setup();
browser = tmp.browser;
page = tmp.page;
});
describe("Extract ZIP files with various compressions", () => {
test("Extract ZIP deflate", async () => {
await navigate(page);
await inputFile('archives/zip/deflate.zip',page);
const files = await response(page);
expect(files).toEqual(checksum);
}, 16000);
/* test("Extract ZIP deflate64", async () => { // not support
await navigate(page);
await inputFile('archives/zip/deflate64.zip',page);
const files = await response(page);
expect(files).toEqual(checksum);
}, 16000); */
test("Extract ZIP bzip2", async () => {
await navigate(page);
await inputFile('archives/zip/bzip2.zip',page);
const files = await response(page);
expect(files).toEqual(checksum);
}, 16000);
test("Extract ZIP lzma", async () => {
await navigate(page);
await inputFile('archives/zip/lzma.zip',page);
const files = await response(page);
expect(files).toEqual(checksum);
}, 16000);
});
afterAll(() => {
cleanup(browser);
});

View File

@@ -0,0 +1,39 @@
/* eslint-disable no-undef */
const {checksum} = require('./checksum');
const {navigate,inputFile,response,setup,cleanup} = require('./testutils');
let browser,page;
beforeAll(async () => {
let tmp = await setup();
browser = tmp.browser;
page = tmp.page;
});
describe("extract various compression types", () => {
test("extract 7z file", async () => {
await navigate(page);
await inputFile('archives/test.7z',page);
const files = await response(page);
expect(files).toEqual(checksum);
}, 16000);
test("extract single file from zip", async () => {
await navigate(page,'test-single.html');
await inputFile('archives/test.zip',page);
const file = await response(page);
expect(file).toEqual(checksum['.gitignore']);
}, 16000);
test("extract encrypted zip", async () => {
await navigate(page,'encryption.html');
await inputFile('archives/encrypted.zip',page);
const {files,encrypted} = await response(page);
expect(encrypted).toEqual(true);
expect(files).toEqual(checksum);
}, 16000);
});
afterAll(() => {
cleanup(browser);
});

View File

@@ -0,0 +1,46 @@
const StaticServer = require('static-server');
const puppeteer = require('puppeteer');
const port = 8787;
const width = 800;
const height = 600;
const server = new StaticServer({
rootPath: '.',
port: port,
cors: '*',
});
const startServer = () => new Promise((resolve) => {
server.start( () => {
console.log('Server listening to', port);
resolve();
});
});
module.exports = {
setup: async () => {
let browser = await puppeteer.launch();
let page = await browser.newPage();
await page.setViewport({ width, height });
await startServer();
page.on('console', msg => {
for (let i = 0; i < msg.args().length; ++i) console.log(`${i}: ${msg.args()[i]}`);
});
return {browser,page};
},
cleanup: (browser) => {
server.stop();
browser.close();
},
navigate: async function (page, path = 'index.html') {
await page.goto(`http://127.0.0.1:${port}/test/files/${path}`);
},
inputFile: async function (file,page){
const fileInp = await page.$('#file');
fileInp.uploadFile('test/files/'+file);
},
response: async function (page){
await page.waitForSelector('#done');
return await page.evaluate(`window.obj`);
}
};

22
static/js/mammoth.browser.min.js vendored Normal file

File diff suppressed because one or more lines are too long

1
static/js/worker.js Normal file
View File

@@ -0,0 +1 @@
console.log('hi, I am a worker');