SandboXP

Форк
0
692 строки · 21.3 Кб
1
"use strict";
2

3
/** @const */
4
var ASYNC_SAFE = false;
5

6
(function() {
7
    if (typeof XMLHttpRequest === "undefined") {
8
        v86util.load_file = load_file_nodejs;
9
    } else {
10
        v86util.load_file = load_file;
11
    }
12

13
    v86util.AsyncXHRBuffer = AsyncXHRBuffer;
14
    v86util.AsyncXHRPartfileBuffer = AsyncXHRPartfileBuffer;
15
    v86util.AsyncFileBuffer = AsyncFileBuffer;
16
    v86util.SyncFileBuffer = SyncFileBuffer;
17

18
    // Reads len characters at offset from Memory object mem as a JS string
19
    v86util.read_sized_string_from_mem = function read_sized_string_from_mem(mem, offset, len) {
20
        offset >>>= 0;
21
        len >>>= 0;
22
        return String.fromCharCode(...new Uint8Array(mem.buffer, offset, len));
23
    };
24

25
    /**
26
     * @param {string} filename
27
     * @param {Object} options
28
     * @param {number=} n_tries
29
     */
30
    function load_file(filename, options, n_tries) {
31
        var http = new XMLHttpRequest();
32

33
        http.open(options.method || "get", filename, true);
34

35
        if (options.as_json) {
36
            http.responseType = "json";
37
        } else {
38
            http.responseType = "arraybuffer";
39
        }
40

41
        if (options.headers) {
42
            var header_names = Object.keys(options.headers);
43

44
            for (var i = 0; i < header_names.length; i++) {
45
                var name = header_names[i];
46
                http.setRequestHeader(name, options.headers[name]);
47
            }
48
        }
49

50
        if (options.range) {
51
            let start = options.range.start;
52
            let end = start + options.range.length - 1;
53
            http.setRequestHeader("Range", "bytes=" + start + "-" + end);
54

55
            // Abort if server responds with complete file in response to range
56
            // request, to prevent downloading large files from broken http servers
57
            http.onreadystatechange = function() {
58
                if (http.status === 200) {
59
                    http.abort();
60
                }
61
            };
62
        }
63

64
        http.onload = function(e) {
65
            if (http.readyState === 4) {
66
                if (http.status !== 200 && http.status !== 206) {
67
                    console.error("Loading the image " + filename + " failed (status %d)", http.status);
68
                    if (http.status >= 500 && http.status < 600) {
69
                        retry();
70
                    }
71
                } else if (http.response) {
72
                    options.done && options.done(http.response, http);
73
                }
74
            }
75
        };
76

77
        http.onerror = function(e) {
78
            console.error("Loading the image " + filename + " failed", e);
79
            retry();
80
        };
81

82
        if (options.progress) {
83
            http.onprogress = function(e) {
84
                options.progress(e);
85
            };
86
        }
87

88
        http.send(null);
89

90
        function retry() {
91
            const number_of_tries = n_tries || 0;
92
            const timeout = [1, 1, 2, 3, 5, 8, 13, 21][number_of_tries] || 34;
93
            setTimeout(() => {
94
                load_file(filename, options, number_of_tries + 1);
95
            }, 1000 * timeout);
96
        }
97
    }
98

99
    function load_file_nodejs(filename, options) {
100
        let fs = require("fs");
101

102
        if (options.range) {
103
            dbg_assert(!options.as_json);
104

105
            fs["open"](filename, "r", (err, fd) => {
106
                if (err) throw err;
107

108
                let length = options.range.length;
109
                var buffer = Buffer.allocUnsafe(length);
110

111
                fs["read"](fd, buffer, 0, length, options.range.start, (err, bytes_read) => {
112
                    if (err) throw err;
113

114
                    dbg_assert(bytes_read === length);
115
                    options.done && options.done(new Uint8Array(buffer));
116

117
                    fs["close"](fd, (err) => {
118
                        if (err) throw err;
119
                    });
120
                });
121
            });
122
        } else {
123
            var o = {
124
                encoding: options.as_json ? "utf-8" : null,
125
            };
126

127
            fs["readFile"](filename, o, function(err, data) {
128
                if (err) {
129
                    console.log("Could not read file:", filename, err);
130
                } else {
131
                    var result = data;
132

133
                    if (options.as_json) {
134
                        result = JSON.parse(result);
135
                    } else {
136
                        result = new Uint8Array(result).buffer;
137
                    }
138

139
                    options.done(result);
140
                }
141
            });
142
        }
143
    }
144

145
    if (typeof XMLHttpRequest === "undefined") {
146
        var determine_size = function(path, cb) {
147
            require("fs")["stat"](path, (err, stats) => {
148
                if (err) {
149
                    cb(err);
150
                } else {
151
                    cb(null, stats.size);
152
                }
153
            });
154
        };
155
    } else {
156
        var determine_size = function(url, cb) {
157
            v86util.load_file(url, {
158
                done: (buffer, http) => {
159
                    var header = http.getResponseHeader("Content-Range") || "";
160
                    var match = header.match(/\/(\d+)\s*$/);
161

162
                    if (match) {
163
                        cb(null, +match[1]);
164
                    } else {
165
                        const error = "`Range: bytes=...` header not supported (Got `" + header + "`)";
166
                        cb(error);
167
                    }
168
                },
169
                headers: {
170
                    Range: "bytes=0-0",
171
                }
172
            });
173
        };
174
    }
175

176
    /**
177
     * Asynchronous access to ArrayBuffer, loading blocks lazily as needed,
178
     * using the `Range: bytes=...` header
179
     *
180
     * @constructor
181
     * @param {string} filename Name of the file to download
182
     * @param {number|undefined} size
183
     */
184
    function AsyncXHRBuffer(filename, size) {
185
        this.filename = filename;
186

187
        /** @const */
188
        this.block_size = 256;
189
        this.byteLength = size;
190

191
        this.loaded_blocks = Object.create(null);
192

193
        this.onload = undefined;
194
        this.onprogress = undefined;
195
    }
196

197
    AsyncXHRBuffer.prototype.load = function() {
198
        if (this.byteLength !== undefined) {
199
            this.onload && this.onload(Object.create(null));
200
            return;
201
        }
202

203
        // Determine the size using a request
204

205
        determine_size(this.filename, (error, size) => {
206
            if (error) {
207
                throw new Error("Cannot use: " + this.filename + ". " + error);
208
            } else {
209
                dbg_assert(size >= 0);
210
                this.byteLength = size;
211
                this.onload && this.onload(Object.create(null));
212
            }
213
        });
214
    };
215

216
    /**
217
     * @param {number} offset
218
     * @param {number} len
219
     * @param {function(!Uint8Array)} fn
220
     */
221
    AsyncXHRBuffer.prototype.get_from_cache = function(offset, len, fn) {
222
        var number_of_blocks = len / this.block_size;
223
        var block_index = offset / this.block_size;
224

225
        for (var i = 0; i < number_of_blocks; i++) {
226
            var block = this.loaded_blocks[block_index + i];
227

228
            if (!block) {
229
                return;
230
            }
231
        }
232

233
        if (number_of_blocks === 1) {
234
            return this.loaded_blocks[block_index];
235
        } else {
236
            var result = new Uint8Array(len);
237
            for (var i = 0; i < number_of_blocks; i++) {
238
                result.set(this.loaded_blocks[block_index + i], i * this.block_size);
239
            }
240
            return result;
241
        }
242
    };
243

244
    /**
245
     * @param {number} offset
246
     * @param {number} len
247
     * @param {function(!Uint8Array)} fn
248
     */
249
    AsyncXHRBuffer.prototype.get = function(offset, len, fn) {
250
        console.assert(offset + len <= this.byteLength);
251
        console.assert(offset % this.block_size === 0);
252
        console.assert(len % this.block_size === 0);
253
        console.assert(len);
254

255
        var block = this.get_from_cache(offset, len, fn);
256
        if (block) {
257
            if (ASYNC_SAFE) {
258
                setTimeout(fn.bind(this, block), 0);
259
            } else {
260
                fn(block);
261
            }
262
            return;
263
        }
264

265
        v86util.load_file(this.filename, {
266
            done: function done(buffer) {
267
                var block = new Uint8Array(buffer);
268
                this.handle_read(offset, len, block);
269
                fn(block);
270
            }.bind(this),
271
            range: {
272
                start: offset,
273
                length: len
274
            },
275
        });
276
    };
277

278
    /**
279
     * Relies on this.byteLength, this.loaded_blocks and this.block_size
280
     *
281
     * @this {AsyncFileBuffer|AsyncXHRBuffer|AsyncXHRPartfileBuffer}
282
     *
283
     * @param {number} start
284
     * @param {!Uint8Array} data
285
     * @param {function()} fn
286
     */
287
    AsyncXHRBuffer.prototype.set = function(start, data, fn) {
288
        console.assert(start + data.byteLength <= this.byteLength);
289

290
        var len = data.length;
291

292
        console.assert(start % this.block_size === 0);
293
        console.assert(len % this.block_size === 0);
294
        console.assert(len);
295

296
        var start_block = start / this.block_size;
297
        var block_count = len / this.block_size;
298

299
        for (var i = 0; i < block_count; i++) {
300
            var block = this.loaded_blocks[start_block + i];
301

302
            if (block === undefined) {
303
                block = this.loaded_blocks[start_block + i] = new Uint8Array(this.block_size);
304
            }
305

306
            var data_slice = data.subarray(i * this.block_size, (i + 1) * this.block_size);
307
            block.set(data_slice);
308

309
            console.assert(block.byteLength === data_slice.length);
310
        }
311

312
        fn();
313
    };
314

315
    /**
316
     * @this {AsyncFileBuffer|AsyncXHRBuffer|AsyncXHRPartfileBuffer}
317
     * @param {number} offset
318
     * @param {number} len
319
     * @param {!Uint8Array} block
320
     */
321
    AsyncXHRBuffer.prototype.handle_read = function(offset, len, block) {
322
        // Used by AsyncXHRBuffer and AsyncFileBuffer
323
        // Overwrites blocks from the original source that have been written since
324

325
        var start_block = offset / this.block_size;
326
        var block_count = len / this.block_size;
327

328
        for (var i = 0; i < block_count; i++) {
329
            var written_block = this.loaded_blocks[start_block + i];
330

331
            if (written_block) {
332
                block.set(written_block, i * this.block_size);
333
            }
334
            //else
335
            //{
336
            //    var cached = this.loaded_blocks[start_block + i] = new Uint8Array(this.block_size);
337
            //    cached.set(block.subarray(i * this.block_size, (i + 1) * this.block_size));
338
            //}
339
        }
340
    };
341

342
    AsyncXHRBuffer.prototype.get_buffer = function(fn) {
343
        // We must download all parts, unlikely a good idea for big files
344
        fn();
345
    };
346

347
    AsyncXHRBuffer.prototype.get_written_blocks = function() {
348
        var count = Object.keys(this.loaded_blocks).length;
349

350
        var buffer = new Uint8Array(count * this.block_size);
351
        var indices = [];
352

353
        var i = 0;
354
        for (var index of Object.keys(this.loaded_blocks)) {
355
            var block = this.loaded_blocks[index];
356
            dbg_assert(block.length === this.block_size);
357
            index = +index;
358
            indices.push(index);
359
            buffer.set(
360
                block,
361
                i * this.block_size
362
            );
363
            i++;
364
        }
365

366
        return {
367
            buffer,
368
            indices,
369
            block_size: this.block_size,
370
        };
371
    };
372

373
    AsyncXHRBuffer.prototype.get_state = function() {
374
        const state = [];
375
        const loaded_blocks = [];
376

377
        for (let [index, block] of Object.entries(this.loaded_blocks)) {
378
            dbg_assert(isFinite(+index));
379
            loaded_blocks.push([+index, block]);
380
        }
381

382
        state[0] = loaded_blocks;
383
        return state;
384
    };
385

386
    AsyncXHRBuffer.prototype.set_state = function(state) {
387
        const loaded_blocks = state[0];
388
        this.loaded_blocks = Object.create(null);
389

390
        for (let [index, block] of Object.values(loaded_blocks)) {
391
            this.loaded_blocks[index] = block;
392
        }
393
    };
394

395
    /**
396
     * Asynchronous access to ArrayBuffer, loading blocks lazily as needed,
397
     * downloading files named filename-\d-\d.ext.
398
     *
399
     * @constructor
400
     * @param {string} filename Name of the file to download
401
     * @param {number|undefined} size
402
     * @param {number|undefined} fixed_chunk_size
403
     */
404
    function AsyncXHRPartfileBuffer(filename, size, fixed_chunk_size) {
405
        const parts = filename.match(/(.*)(\..*)/);
406

407
        if (parts) {
408
            this.basename = parts[1];
409
            this.extension = parts[2];
410
        } else {
411
            this.basename = filename;
412
            this.extension = "";
413
        }
414

415
        /** @const */
416
        this.block_size = 256;
417
        this.byteLength = size;
418
        this.use_fixed_chunk_size = typeof fixed_chunk_size === "number";
419
        this.fixed_chunk_size = fixed_chunk_size;
420

421
        this.loaded_blocks = Object.create(null);
422

423
        this.onload = undefined;
424
        this.onprogress = undefined;
425
    }
426

427
    AsyncXHRPartfileBuffer.prototype.load = function() {
428
        if (this.byteLength !== undefined) {
429
            this.onload && this.onload(Object.create(null));
430
            return;
431
        }
432
        dbg_assert(false);
433
        this.onload && this.onload(Object.create(null));
434
    };
435
    AsyncXHRPartfileBuffer.prototype.get_from_cache = AsyncXHRBuffer.prototype.get_from_cache;
436

437
    /**
438
     * @param {number} offset
439
     * @param {number} len
440
     * @param {function(!Uint8Array)} fn
441
     */
442
    AsyncXHRPartfileBuffer.prototype.get = function(offset, len, fn) {
443
        console.assert(offset + len <= this.byteLength);
444
        console.assert(offset % this.block_size === 0);
445
        console.assert(len % this.block_size === 0);
446
        console.assert(len);
447

448
        var block = this.get_from_cache(offset, len, fn);
449
        if (block) {
450
            if (ASYNC_SAFE) {
451
                setTimeout(fn.bind(this, block), 0);
452
            } else {
453
                fn(block);
454
            }
455
            return;
456
        }
457

458
        if (this.use_fixed_chunk_size) {
459
            const fake_offset = parseInt(offset / this.fixed_chunk_size, 10) * this.fixed_chunk_size;
460
            const m_offset = offset - fake_offset;
461
            const total_count = parseInt(len / this.fixed_chunk_size, 10) + (m_offset ? 2 : 1);
462
            const blocks = new Uint8Array(m_offset + (total_count * this.fixed_chunk_size));
463
            let finished = 0;
464

465
            for (var i = 0; i < total_count; i++) {
466
                const cur = i * this.fixed_chunk_size;
467
                const part_filename = this.basename + "-" + (cur + fake_offset) + this.extension;
468

469
                v86util.load_file(part_filename, {
470
                    done: function done(buffer) {
471
                        const block = new Uint8Array(buffer);
472
                        blocks.set(block, cur);
473
                        finished++;
474
                        if (finished === total_count) {
475
                            const tmp_blocks = blocks.subarray(m_offset, m_offset + len);
476
                            this.handle_read(offset, len, tmp_blocks);
477
                            fn(tmp_blocks);
478
                        }
479
                    }.bind(this),
480
                });
481
            }
482
        } else {
483
            const part_filename = this.basename + "-" + offset + "-" + (offset + len) + this.extension;
484

485
            v86util.load_file(part_filename, {
486
                done: function done(buffer) {
487
                    dbg_assert(buffer.byteLength === len);
488
                    var block = new Uint8Array(buffer);
489
                    this.handle_read(offset, len, block);
490
                    fn(block);
491
                }.bind(this),
492
            });
493
        }
494
    };
495

496
    AsyncXHRPartfileBuffer.prototype.set = AsyncXHRBuffer.prototype.set;
497
    AsyncXHRPartfileBuffer.prototype.handle_read = AsyncXHRBuffer.prototype.handle_read;
498
    AsyncXHRPartfileBuffer.prototype.get_written_blocks = AsyncXHRBuffer.prototype.get_written_blocks;
499
    AsyncXHRPartfileBuffer.prototype.get_state = AsyncXHRBuffer.prototype.get_state;
500
    AsyncXHRPartfileBuffer.prototype.set_state = AsyncXHRBuffer.prototype.set_state;
501

502
    /**
503
     * Synchronous access to File, loading blocks from the input type=file
504
     * The whole file is loaded into memory during initialisation
505
     *
506
     * @constructor
507
     */
508
    function SyncFileBuffer(file) {
509
        this.file = file;
510
        this.byteLength = file.size;
511

512
        if (file.size > (1 << 30)) {
513
            console.warn("SyncFileBuffer: Allocating buffer of " + (file.size >> 20) + " MB ...");
514
        }
515

516
        this.buffer = new ArrayBuffer(file.size);
517
        this.onload = undefined;
518
        this.onprogress = undefined;
519
    }
520

521
    SyncFileBuffer.prototype.load = function() {
522
        this.load_next(0);
523
    };
524

525
    /**
526
     * @param {number} start
527
     */
528
    SyncFileBuffer.prototype.load_next = function(start) {
529
        /** @const */
530
        var PART_SIZE = 4 << 20;
531

532
        var filereader = new FileReader();
533

534
        filereader.onload = function(e) {
535
            var buffer = new Uint8Array(e.target.result);
536
            new Uint8Array(this.buffer, start).set(buffer);
537
            this.load_next(start + PART_SIZE);
538
        }.bind(this);
539

540
        if (this.onprogress) {
541
            this.onprogress({
542
                loaded: start,
543
                total: this.byteLength,
544
                lengthComputable: true,
545
            });
546
        }
547

548
        if (start < this.byteLength) {
549
            var end = Math.min(start + PART_SIZE, this.byteLength);
550
            var slice = this.file.slice(start, end);
551
            filereader.readAsArrayBuffer(slice);
552
        } else {
553
            this.file = undefined;
554
            this.onload && this.onload({
555
                buffer: this.buffer
556
            });
557
        }
558
    };
559

560
    /**
561
     * @param {number} start
562
     * @param {number} len
563
     * @param {function(!Uint8Array)} fn
564
     */
565
    SyncFileBuffer.prototype.get = function(start, len, fn) {
566
        console.assert(start + len <= this.byteLength);
567
        fn(new Uint8Array(this.buffer, start, len));
568
    };
569

570
    /**
571
     * @param {number} offset
572
     * @param {!Uint8Array} slice
573
     * @param {function()} fn
574
     */
575
    SyncFileBuffer.prototype.set = function(offset, slice, fn) {
576
        console.assert(offset + slice.byteLength <= this.byteLength);
577

578
        new Uint8Array(this.buffer, offset, slice.byteLength).set(slice);
579
        fn();
580
    };
581

582
    SyncFileBuffer.prototype.get_buffer = function(fn) {
583
        fn(this.buffer);
584
    };
585

586
    SyncFileBuffer.prototype.get_state = function() {
587
        const state = [];
588
        state[0] = this.byteLength;
589
        state[1] = new Uint8Array(this.buffer);
590
        return state;
591
    };
592

593
    SyncFileBuffer.prototype.set_state = function(state) {
594
        this.byteLength = state[0];
595
        this.buffer = state[1].slice().buffer;
596
    };
597

598
    /**
599
     * Asynchronous access to File, loading blocks from the input type=file
600
     *
601
     * @constructor
602
     */
603
    function AsyncFileBuffer(file) {
604
        this.file = file;
605
        this.byteLength = file.size;
606

607
        /** @const */
608
        this.block_size = 256;
609
        this.loaded_blocks = Object.create(null);
610

611
        this.onload = undefined;
612
        this.onprogress = undefined;
613
    }
614

615
    AsyncFileBuffer.prototype.load = function() {
616
        this.onload && this.onload(Object.create(null));
617
    };
618

619
    /**
620
     * @param {number} offset
621
     * @param {number} len
622
     * @param {function(!Uint8Array)} fn
623
     */
624
    AsyncFileBuffer.prototype.get = function(offset, len, fn) {
625
        console.assert(offset % this.block_size === 0);
626
        console.assert(len % this.block_size === 0);
627
        console.assert(len);
628

629
        var block = this.get_from_cache(offset, len, fn);
630
        if (block) {
631
            fn(block);
632
            return;
633
        }
634

635
        var fr = new FileReader();
636

637
        fr.onload = function(e) {
638
            var buffer = e.target.result;
639
            var block = new Uint8Array(buffer);
640

641
            this.handle_read(offset, len, block);
642
            fn(block);
643
        }.bind(this);
644

645
        fr.readAsArrayBuffer(this.file.slice(offset, offset + len));
646
    };
647
    AsyncFileBuffer.prototype.get_from_cache = AsyncXHRBuffer.prototype.get_from_cache;
648
    AsyncFileBuffer.prototype.set = AsyncXHRBuffer.prototype.set;
649
    AsyncFileBuffer.prototype.handle_read = AsyncXHRBuffer.prototype.handle_read;
650
    AsyncFileBuffer.prototype.get_state = AsyncXHRBuffer.prototype.get_state;
651

652
    AsyncFileBuffer.prototype.get_buffer = function(fn) {
653
        // We must load all parts, unlikely a good idea for big files
654
        fn();
655
    };
656

657
    AsyncFileBuffer.prototype.get_as_file = function(name) {
658
        var parts = [];
659
        var existing_blocks = Object.keys(this.loaded_blocks)
660
            .map(Number)
661
            .sort(function(x, y) {
662
                return x - y;
663
            });
664

665
        var current_offset = 0;
666

667
        for (var i = 0; i < existing_blocks.length; i++) {
668
            var block_index = existing_blocks[i];
669
            var block = this.loaded_blocks[block_index];
670
            var start = block_index * this.block_size;
671
            console.assert(start >= current_offset);
672

673
            if (start !== current_offset) {
674
                parts.push(this.file.slice(current_offset, start));
675
                current_offset = start;
676
            }
677

678
            parts.push(block);
679
            current_offset += block.length;
680
        }
681

682
        if (current_offset !== this.file.size) {
683
            parts.push(this.file.slice(current_offset));
684
        }
685

686
        var file = new File(parts, name);
687
        console.assert(file.size === this.file.size);
688

689
        return file;
690
    };
691

692
})();
693

Использование cookies

Мы используем файлы cookie в соответствии с Политикой конфиденциальности и Политикой использования cookies.

Нажимая кнопку «Принимаю», Вы даете АО «СберТех» согласие на обработку Ваших персональных данных в целях совершенствования нашего веб-сайта и Сервиса GitVerse, а также повышения удобства их использования.

Запретить использование cookies Вы можете самостоятельно в настройках Вашего браузера.