wok-current view syslinux/stuff/extra/loadhigh.u @ rev 23329

updated perl-file-desktopentry (0.04 -> 0.22)
author Hans-G?nter Theisgen
date Mon Mar 30 17:44:58 2020 +0100 (2020-03-30)
parents d94dea3c101e
children
line source
1 --- core/fs/loadhigh.c
2 +++ core/fs/loadhigh.c
3 @@ -36,6 +36,11 @@
4 #include <minmax.h>
5 #include "core.h"
6 #include "fs.h"
7 +#define LZLOAD ".c32 modules can be compressed with lz4 or lzma"
8 +#ifdef LZLOAD
9 +#include "../unlz4.c"
10 +#include "../unlzma.c"
11 +#endif
13 #define MAX_CHUNK (1 << 20) /* 1 MB */
15 @@ -51,6 +56,9 @@
16 uint32_t sector_mask;
17 size_t pad;
18 uint32_t retflags = 0;
19 +#ifdef LZLOAD
20 + char *unpacked = (char *) regs->edi.l;
21 +#endif
23 bytes = regs->eax.l;
24 zero_mask = regs->edx.w[0];
25 @@ -101,6 +109,27 @@
26 break;
27 }
28 }
29 +
30 +#ifdef LZLOAD
31 + switch (* (short *) unpacked) {
32 + char *packed;
33 + size_t packedsz, unpackedsz;
34 + case 0x005D:
35 + packedsz = buf - unpacked;
36 + unpackedsz = * (unsigned long *) (unpacked + 5);
37 + if (unpackedsz > 1000000) break; /* no size? */
38 + packed = unpacked + unpackedsz - packedsz + 1024;
39 + if (packed < unpacked + 1024) packed = unpacked + 1024;
40 + memmove(packed, unpacked, packedsz);
41 + unlzma(packed, unpacked, packed + packedsz /* heap */);
42 + buf = packed;
43 + break;
44 + case 0x2204:
45 + case 0x2102:
46 + buf = (char *) unlz4((unsigned char *) unpacked, (unsigned char *) buf);
47 + break;
48 + }
49 +#endif
51 pad = (size_t)buf & zero_mask;
52 if (pad)
53 --- /dev/null
54 +++ core/unlzma.c
55 @@ -0,0 +1,385 @@
56 +typedef unsigned char uint8_t;
57 +typedef unsigned short uint16_t;
58 +typedef unsigned uint32_t;
59 +typedef unsigned long long uint64_t;
60 +typedef unsigned size_t;
61 +#define SWAP_LE32(x) (x)
62 +#define SWAP_LE64(x) (x)
63 +/* vi: set sw=4 ts=4: */
64 +/*
65 + * Small lzma deflate implementation.
66 + * Copyright (C) 2006 Aurelien Jacobs <aurel@gnuage.org>
67 + *
68 + * Based on LzmaDecode.c from the LZMA SDK 4.22 (http://www.7-zip.org/)
69 + * Copyright (C) 1999-2005 Igor Pavlov
70 + *
71 + * Licensed under GPLv2 or later, see file LICENSE in this source tree.
72 + */
73 +
74 +#include <string.h>
75 +
76 +#define PACKED __attribute__ ((packed))
77 +#define ALWAYS_INLINE inline
78 +#define speed_inline
79 +#define size_inline ALWAYS_INLINE
80 +
81 +
82 +typedef struct {
83 + uint8_t *ptr;
84 +
85 + uint32_t code;
86 + uint32_t range;
87 + uint32_t bound;
88 +} rc_t;
89 +
90 +#define RC_TOP_BITS 24
91 +#define RC_MOVE_BITS 5
92 +#define RC_MODEL_TOTAL_BITS 11
93 +
94 +/* Called twice, but one callsite is in speed_inline'd rc_is_bit_1() */
95 +static void rc_do_normalize(rc_t *rc)
96 +{
97 + rc->range <<= 8;
98 + rc->code = (rc->code << 8) | *rc->ptr++;
99 +}
100 +
101 +static ALWAYS_INLINE void rc_normalize(rc_t *rc)
102 +{
103 + if (rc->range < (1 << RC_TOP_BITS)) {
104 + rc_do_normalize(rc);
105 + }
106 +}
107 +
108 +/* Called once */
109 +static void rc_init(rc_t *rc) /*, int buffer_size) */
110 +{
111 + int i;
112 +
113 + rc->range = 0;
114 + for (i = 0; i < 5; i++) {
115 + rc_do_normalize(rc);
116 + }
117 + rc->range = 0xffffffff;
118 +}
119 +
120 +/* rc_is_bit_1 is called 9 times */
121 +static speed_inline int rc_is_bit_1(rc_t *rc, uint16_t *p)
122 +{
123 + rc_normalize(rc);
124 + rc->bound = *p * (rc->range >> RC_MODEL_TOTAL_BITS);
125 + if (rc->code < rc->bound) {
126 + rc->range = rc->bound;
127 + *p += ((1 << RC_MODEL_TOTAL_BITS) - *p) >> RC_MOVE_BITS;
128 + return 0;
129 + }
130 + rc->range -= rc->bound;
131 + rc->code -= rc->bound;
132 + *p -= *p >> RC_MOVE_BITS;
133 + return 1;
134 +}
135 +
136 +/* Called 4 times in unlzma loop */
137 +static ALWAYS_INLINE int rc_get_bit(rc_t *rc, uint16_t *p, int *symbol)
138 +{
139 + int ret = rc_is_bit_1(rc, p);
140 + *symbol = *symbol * 2 + ret;
141 + return ret;
142 +}
143 +
144 +/* Called once */
145 +static ALWAYS_INLINE int rc_direct_bit(rc_t *rc)
146 +{
147 + rc_normalize(rc);
148 + rc->range >>= 1;
149 + if (rc->code >= rc->range) {
150 + rc->code -= rc->range;
151 + return 1;
152 + }
153 + return 0;
154 +}
155 +
156 +/* Called twice */
157 +static speed_inline void
158 +rc_bit_tree_decode(rc_t *rc, uint16_t *p, int num_levels, int *symbol)
159 +{
160 + int i = num_levels;
161 +
162 + *symbol = 1;
163 + while (i--)
164 + rc_get_bit(rc, p + *symbol, symbol);
165 + *symbol -= 1 << num_levels;
166 +}
167 +
168 +
169 +typedef struct {
170 + uint8_t pos;
171 + uint32_t dict_size;
172 + uint64_t dst_size;
173 +} PACKED lzma_header_t;
174 +
175 +
176 +/* #defines will force compiler to compute/optimize each one with each usage.
177 + * Have heart and use enum instead. */
178 +enum {
179 + LZMA_BASE_SIZE = 1846,
180 + LZMA_LIT_SIZE = 768,
181 +
182 + LZMA_NUM_POS_BITS_MAX = 4,
183 +
184 + LZMA_LEN_NUM_LOW_BITS = 3,
185 + LZMA_LEN_NUM_MID_BITS = 3,
186 + LZMA_LEN_NUM_HIGH_BITS = 8,
187 +
188 + LZMA_LEN_CHOICE = 0,
189 + LZMA_LEN_CHOICE_2 = (LZMA_LEN_CHOICE + 1),
190 + LZMA_LEN_LOW = (LZMA_LEN_CHOICE_2 + 1),
191 + LZMA_LEN_MID = (LZMA_LEN_LOW \
192 + + (1 << (LZMA_NUM_POS_BITS_MAX + LZMA_LEN_NUM_LOW_BITS))),
193 + LZMA_LEN_HIGH = (LZMA_LEN_MID \
194 + + (1 << (LZMA_NUM_POS_BITS_MAX + LZMA_LEN_NUM_MID_BITS))),
195 + LZMA_NUM_LEN_PROBS = (LZMA_LEN_HIGH + (1 << LZMA_LEN_NUM_HIGH_BITS)),
196 +
197 + LZMA_NUM_STATES = 12,
198 + LZMA_NUM_LIT_STATES = 7,
199 +
200 + LZMA_START_POS_MODEL_INDEX = 4,
201 + LZMA_END_POS_MODEL_INDEX = 14,
202 + LZMA_NUM_FULL_DISTANCES = (1 << (LZMA_END_POS_MODEL_INDEX >> 1)),
203 +
204 + LZMA_NUM_POS_SLOT_BITS = 6,
205 + LZMA_NUM_LEN_TO_POS_STATES = 4,
206 +
207 + LZMA_NUM_ALIGN_BITS = 4,
208 +
209 + LZMA_MATCH_MIN_LEN = 2,
210 +
211 + LZMA_IS_MATCH = 0,
212 + LZMA_IS_REP = (LZMA_IS_MATCH + (LZMA_NUM_STATES << LZMA_NUM_POS_BITS_MAX)),
213 + LZMA_IS_REP_G0 = (LZMA_IS_REP + LZMA_NUM_STATES),
214 + LZMA_IS_REP_G1 = (LZMA_IS_REP_G0 + LZMA_NUM_STATES),
215 + LZMA_IS_REP_G2 = (LZMA_IS_REP_G1 + LZMA_NUM_STATES),
216 + LZMA_IS_REP_0_LONG = (LZMA_IS_REP_G2 + LZMA_NUM_STATES),
217 + LZMA_POS_SLOT = (LZMA_IS_REP_0_LONG \
218 + + (LZMA_NUM_STATES << LZMA_NUM_POS_BITS_MAX)),
219 + LZMA_SPEC_POS = (LZMA_POS_SLOT \
220 + + (LZMA_NUM_LEN_TO_POS_STATES << LZMA_NUM_POS_SLOT_BITS)),
221 + LZMA_ALIGN = (LZMA_SPEC_POS \
222 + + LZMA_NUM_FULL_DISTANCES - LZMA_END_POS_MODEL_INDEX),
223 + LZMA_LEN_CODER = (LZMA_ALIGN + (1 << LZMA_NUM_ALIGN_BITS)),
224 + LZMA_REP_LEN_CODER = (LZMA_LEN_CODER + LZMA_NUM_LEN_PROBS),
225 + LZMA_LITERAL = (LZMA_REP_LEN_CODER + LZMA_NUM_LEN_PROBS),
226 +};
227 +
228 +
229 +void unlzma(char *from, char *to, char *heap)
230 +{
231 + lzma_header_t header;
232 + int lc, pb, lp;
233 + uint32_t pos_state_mask;
234 + uint32_t literal_pos_mask;
235 + uint16_t *p;
236 + rc_t *rc = (rc_t *) heap;
237 + int i;
238 + uint8_t *buffer = (void *) to;
239 + uint8_t previous_byte = 0;
240 + size_t buffer_pos = 0;
241 + int len = 0;
242 + int state = 0;
243 + uint32_t rep0 = 1, rep1 = 1, rep2 = 1, rep3 = 1;
244 +
245 + memcpy(&header, from, sizeof(header));
246 + from += sizeof(header);
247 + heap += sizeof(*rc);
248 + rc->ptr = (void *) from;
249 +
250 + i = header.pos / 9;
251 + lc = header.pos % 9;
252 + pb = i / 5;
253 + lp = i % 5;
254 + pos_state_mask = (1 << pb) - 1;
255 + literal_pos_mask = (1 << lp) - 1;
256 +
257 + /* Example values from linux-3.3.4.tar.lzma:
258 + * dict_size: 64M, dst_size: 2^64-1
259 + */
260 + header.dict_size = SWAP_LE32(header.dict_size);
261 + header.dst_size = SWAP_LE64(header.dst_size);
262 +
263 + //if (header.dict_size == 0)
264 + // header.dict_size++;
265 +
266 + rc_init(rc);
267 +
268 + {
269 + int num_probs;
270 +
271 + num_probs = LZMA_BASE_SIZE + (LZMA_LIT_SIZE << (lc + lp));
272 + //p = xmalloc(num_probs * sizeof(*p));
273 + p = (void *) heap;
274 + num_probs += LZMA_LITERAL - LZMA_BASE_SIZE;
275 + for (i = 0; i < num_probs; i++)
276 + p[i] = (1 << RC_MODEL_TOTAL_BITS) >> 1;
277 + }
278 +
279 +
280 + while (buffer_pos < header.dst_size) {
281 + int pos_state = buffer_pos & pos_state_mask;
282 + uint16_t *prob = p + LZMA_IS_MATCH + (state << LZMA_NUM_POS_BITS_MAX) + pos_state;
283 +
284 + if (!rc_is_bit_1(rc, prob)) {
285 + static const char next_state[LZMA_NUM_STATES] =
286 + { 0, 0, 0, 0, 1, 2, 3, 4, 5, 6, 4, 5 };
287 + int mi = 1;
288 +
289 + prob = (p + LZMA_LITERAL
290 + + (LZMA_LIT_SIZE * (((buffer_pos & literal_pos_mask) << lc)
291 + + (previous_byte >> (8 - lc))
292 + )
293 + )
294 + );
295 +
296 + if (state >= LZMA_NUM_LIT_STATES) {
297 + int match_byte;
298 + uint32_t pos = buffer_pos - rep0;
299 +
300 + while (pos >= header.dict_size)
301 + pos += header.dict_size;
302 + match_byte = buffer[pos];
303 + do {
304 + int bit;
305 +
306 + match_byte <<= 1;
307 + bit = match_byte & 0x100;
308 + bit ^= (rc_get_bit(rc, prob + 0x100 + bit + mi, &mi) << 8); /* 0x100 or 0 */
309 + if (bit)
310 + break;
311 + } while (mi < 0x100);
312 + }
313 + while (mi < 0x100) {
314 + rc_get_bit(rc, prob + mi, &mi);
315 + }
316 +
317 + state = next_state[state];
318 +
319 + previous_byte = (uint8_t) mi;
320 + len = 1;
321 + goto one_byte2;
322 + } else {
323 + int num_bits;
324 + int offset;
325 + uint16_t *prob2;
326 +#define prob_len prob2
327 +
328 + prob2 = p + LZMA_IS_REP + state;
329 + if (!rc_is_bit_1(rc, prob2)) {
330 + rep3 = rep2;
331 + rep2 = rep1;
332 + rep1 = rep0;
333 + state = state < LZMA_NUM_LIT_STATES ? 0 : 3;
334 + prob2 = p + LZMA_LEN_CODER;
335 + } else {
336 + prob2 += LZMA_IS_REP_G0 - LZMA_IS_REP;
337 + if (!rc_is_bit_1(rc, prob2)) {
338 + prob2 = (p + LZMA_IS_REP_0_LONG
339 + + (state << LZMA_NUM_POS_BITS_MAX)
340 + + pos_state
341 + );
342 + if (!rc_is_bit_1(rc, prob2)) {
343 + state = state < LZMA_NUM_LIT_STATES ? 9 : 11;
344 + len = 1;
345 + goto string;
346 + }
347 + } else {
348 + uint32_t distance;
349 +
350 + prob2 += LZMA_IS_REP_G1 - LZMA_IS_REP_G0;
351 + distance = rep1;
352 + if (rc_is_bit_1(rc, prob2)) {
353 + prob2 += LZMA_IS_REP_G2 - LZMA_IS_REP_G1;
354 + distance = rep2;
355 + if (rc_is_bit_1(rc, prob2)) {
356 + distance = rep3;
357 + rep3 = rep2;
358 + }
359 + rep2 = rep1;
360 + }
361 + rep1 = rep0;
362 + rep0 = distance;
363 + }
364 + state = state < LZMA_NUM_LIT_STATES ? 8 : 11;
365 + prob2 = p + LZMA_REP_LEN_CODER;
366 + }
367 +
368 + prob_len = prob2 + LZMA_LEN_CHOICE;
369 + num_bits = LZMA_LEN_NUM_LOW_BITS;
370 + if (!rc_is_bit_1(rc, prob_len)) {
371 + prob_len += LZMA_LEN_LOW - LZMA_LEN_CHOICE
372 + + (pos_state << LZMA_LEN_NUM_LOW_BITS);
373 + offset = 0;
374 + } else {
375 + prob_len += LZMA_LEN_CHOICE_2 - LZMA_LEN_CHOICE;
376 + if (!rc_is_bit_1(rc, prob_len)) {
377 + prob_len += LZMA_LEN_MID - LZMA_LEN_CHOICE_2
378 + + (pos_state << LZMA_LEN_NUM_MID_BITS);
379 + offset = 1 << LZMA_LEN_NUM_LOW_BITS;
380 + num_bits += LZMA_LEN_NUM_MID_BITS - LZMA_LEN_NUM_LOW_BITS;
381 + } else {
382 + prob_len += LZMA_LEN_HIGH - LZMA_LEN_CHOICE_2;
383 + offset = ((1 << LZMA_LEN_NUM_LOW_BITS)
384 + + (1 << LZMA_LEN_NUM_MID_BITS));
385 + num_bits += LZMA_LEN_NUM_HIGH_BITS - LZMA_LEN_NUM_LOW_BITS;
386 + }
387 + }
388 + rc_bit_tree_decode(rc, prob_len, num_bits, &len);
389 + len += offset;
390 +
391 + if (state < 4) {
392 + int pos_slot;
393 + uint16_t *prob3;
394 +
395 + state += LZMA_NUM_LIT_STATES;
396 + prob3 = p + LZMA_POS_SLOT +
397 + ((len < LZMA_NUM_LEN_TO_POS_STATES ? len :
398 + LZMA_NUM_LEN_TO_POS_STATES - 1)
399 + << LZMA_NUM_POS_SLOT_BITS);
400 + rc_bit_tree_decode(rc, prob3,
401 + LZMA_NUM_POS_SLOT_BITS, &pos_slot);
402 + rep0 = pos_slot;
403 + if (pos_slot >= LZMA_START_POS_MODEL_INDEX) {
404 + int i2, mi2, num_bits2 = (pos_slot >> 1) - 1;
405 + rep0 = 2 | (pos_slot & 1);
406 + if (pos_slot < LZMA_END_POS_MODEL_INDEX) {
407 + rep0 <<= num_bits2;
408 + prob3 = p + LZMA_SPEC_POS + rep0 - pos_slot - 1;
409 + } else {
410 + for (; num_bits2 != LZMA_NUM_ALIGN_BITS; num_bits2--)
411 + rep0 = (rep0 << 1) | rc_direct_bit(rc);
412 + rep0 <<= LZMA_NUM_ALIGN_BITS;
413 + prob3 = p + LZMA_ALIGN;
414 + }
415 + i2 = 1;
416 + mi2 = 1;
417 + while (num_bits2--) {
418 + if (rc_get_bit(rc, prob3 + mi2, &mi2))
419 + rep0 |= i2;
420 + i2 <<= 1;
421 + }
422 + }
423 + if (++rep0 == 0)
424 + break;
425 + }
426 +
427 + len += LZMA_MATCH_MIN_LEN;
428 + string:
429 + do {
430 + uint32_t pos = buffer_pos - rep0;
431 + while (pos >= header.dict_size)
432 + pos += header.dict_size;
433 + previous_byte = buffer[pos];
434 + one_byte2:
435 + buffer[buffer_pos++] = previous_byte;
436 + len--;
437 + } while (len != 0 && buffer_pos < header.dst_size);
438 + }
439 + }
440 +}
441 --- /dev/null
442 +++ core/unlz4.c
443 @@ -0,0 +1,114 @@
444 +/*
445 + * Copyright (C) 2015, pascal.bellard@slitaz.org
446 + *
447 + * This program is free software; you can redistribute it and/or modify
448 + * it under the terms of the GNU General Public License version 2 as
449 + * published by the Free Software Foundation.
450 + */
451 +
452 +#include <string.h>
453 +
454 +#define LZ4_MAGIC 0x184D2204 /* Spec 1.5.0 */
455 +#define LZ4_LEGACY 0x184C2102
456 +#define LZ4_SKIP(n) ((((n) - 0x184D2A50) >> 4) == 0)
457 +
458 +static unsigned lz4cnt(unsigned char **p, unsigned n)
459 +{
460 + int i;
461 +
462 + if (n == 0xF) do {
463 + i = *(*p)++;
464 + n += i;
465 + } while (i == 0xFF);
466 + return n;
467 +}
468 +
469 +unsigned char *unlz4(unsigned char *from, unsigned char *end)
470 +{
471 + unsigned char *p, *end_chunk, *to, flags, mask;
472 + long magic;
473 + unsigned i, n, size;
474 +
475 + for (p = from, flags = size = 0; p < end;) {
476 + while (1) {
477 + magic = * (long *) p;
478 + p += sizeof(long);
479 + if (magic == LZ4_LEGACY) continue;
480 + if (magic != LZ4_MAGIC) break;
481 + flags = *p;
482 + if (flags & 8) {
483 + size = * (unsigned *) (p + 2);
484 + goto sizefound;
485 + }
486 + p += 3; /* skip FLG BD HC */
487 + }
488 + if (LZ4_SKIP(magic)) {
489 + p += 4 + * (long *) p;
490 + continue;
491 + }
492 + mask = 4; /* Content checksum */
493 + if (magic) {
494 + if (magic > 0)
495 + for (end_chunk = p + magic; p < end_chunk;) {
496 + unsigned char token = *p++;
497 +
498 + n = lz4cnt(&p, token >> 4);
499 + size += n;
500 + p += n;
501 + if (p >= end_chunk) break;
502 + p += sizeof(unsigned short);
503 + size += 4 + lz4cnt(&p, token & 0xF);
504 + }
505 + else {
506 + magic &= 0x7FffFFff;
507 + p += magic;
508 + size += magic;
509 + }
510 + mask = 0x10; /* Block checksum */
511 + }
512 + if (flags & mask) p += 4; /* skip block checksum */
513 + }
514 +sizefound:
515 + size += 16 - (p - from);
516 + memmove(from + size, from, p - from);
517 + for (to = from, p = from += size, end += size, flags = 0; p < end;) {
518 + while (1) {
519 + magic = * (long *) p;
520 + p += sizeof(long);
521 + if (magic == LZ4_LEGACY) continue;
522 + if (magic != LZ4_MAGIC) break;
523 + flags = *p;
524 + if (flags & 8) p += 8; /* skip size */
525 + p += 3; /* skip FLG BD HC */
526 + }
527 + if (LZ4_SKIP(magic)) {
528 + p += 4 + * (long *) p;
529 + continue;
530 + }
531 + mask = 4; /* Content checksum */
532 + if (magic) {
533 + if (magic > 0)
534 + for (end_chunk = p + magic; p < end_chunk;) {
535 + unsigned char *dico;
536 + unsigned char token = *p++;
537 +
538 + n = lz4cnt(&p, token >> 4);
539 + for (i = 0; i < n; i++)
540 + *to++ = *p++;
541 + if (p >= end_chunk) break;
542 + dico = to - (* (unsigned short *) p);
543 + p += sizeof(unsigned short);
544 + n = 4 + lz4cnt(&p, token & 0xF);
545 + for (i = 0; i < n; i++)
546 + *to++ = *dico++;
547 + }
548 + else for (end_chunk = p + (magic & 0x7FffFFff);
549 + p < end_chunk;) {
550 + *to++ = *p++;
551 + }
552 + mask = 0x10; /* Block checksum */
553 + }
554 + if (flags & mask) p += 4; /* Skip checksum */
555 + }
556 + return to;
557 +}