Lines Matching refs:op
1182 BYTE *op = (BYTE *)dest; in LZ4_compress_generic_validated() local
1183 BYTE *const olimit = op + maxOutputSize; in LZ4_compress_generic_validated()
1352 token = op++; in LZ4_compress_generic_validated()
1354 (unlikely(op + litLength + (2 + 1 + LASTLITERALS) + (litLength / 255) > olimit))) in LZ4_compress_generic_validated()
1359 …(unlikely(op + (litLength + 240) / 255 /* litlen */ + litLength /* literals */ + 2 /* offset */ + … in LZ4_compress_generic_validated()
1361 op--; in LZ4_compress_generic_validated()
1369 *op++ = 255; in LZ4_compress_generic_validated()
1370 *op++ = (BYTE)len; in LZ4_compress_generic_validated()
1376 LZ4_wildCopy8(op, anchor, op + litLength); in LZ4_compress_generic_validated()
1377 op += litLength; in LZ4_compress_generic_validated()
1392 …(op + 2 /* offset */ + 1 /* token */ + MFLIMIT - MINMATCH /* min last literals so last match is <=… in LZ4_compress_generic_validated()
1395 op = token; in LZ4_compress_generic_validated()
1404 LZ4_writeLE16(op, (U16)offset); in LZ4_compress_generic_validated()
1405 op += 2; in LZ4_compress_generic_validated()
1411 LZ4_writeLE16(op, (U16)(ip - match)); in LZ4_compress_generic_validated()
1412 op += 2; in LZ4_compress_generic_validated()
1443 (unlikely(op + (1 + LASTLITERALS) + (matchCode + 240) / 255 > olimit))) in LZ4_compress_generic_validated()
1448 …15 /* in token */ - 1 /* to avoid needing a zero byte */ + ((U32)(olimit - op) - 1 - LASTLITERALS)… in LZ4_compress_generic_validated()
1478 LZ4_write32(op, 0xFFFFFFFF); in LZ4_compress_generic_validated()
1481 op += 4; in LZ4_compress_generic_validated()
1482 LZ4_write32(op, 0xFFFFFFFF); in LZ4_compress_generic_validated()
1485 op += matchCode / 255; in LZ4_compress_generic_validated()
1486 *op++ = (BYTE)(matchCode % 255); in LZ4_compress_generic_validated()
1492 assert(!(outputDirective == fillOutput && op + 1 + LASTLITERALS > olimit)); in LZ4_compress_generic_validated()
1511 token = op++; in LZ4_compress_generic_validated()
1560 token = op++; in LZ4_compress_generic_validated()
1579 (op + lastRun + 1 + ((lastRun + 255 - RUN_MASK) / 255) > olimit)) in LZ4_compress_generic_validated()
1584 assert(olimit >= op); in LZ4_compress_generic_validated()
1585 lastRun = (size_t)(olimit - op) - 1 /*token*/; in LZ4_compress_generic_validated()
1598 *op++ = RUN_MASK << ML_BITS; in LZ4_compress_generic_validated()
1600 *op++ = 255; in LZ4_compress_generic_validated()
1601 *op++ = (BYTE)accumulator; in LZ4_compress_generic_validated()
1605 *op++ = (BYTE)(lastRun << ML_BITS); in LZ4_compress_generic_validated()
1607 LZ4_memcpy(op, anchor, lastRun); in LZ4_compress_generic_validated()
1609 op += lastRun; in LZ4_compress_generic_validated()
1616 result = (int)(((char *)op) - dest); in LZ4_compress_generic_validated()
2240 BYTE *op = (BYTE *)dst; in LZ4_decompress_generic() local
2241 BYTE *const oend = op + outputSize; in LZ4_decompress_generic()
2261 assert(lowPrefix <= op); in LZ4_decompress_generic()
2280 if ((oend - op) < FASTLOOP_SAFE_DISTANCE) in LZ4_decompress_generic()
2290 assert(oend - op >= FASTLOOP_SAFE_DISTANCE); in LZ4_decompress_generic()
2309 if ((safeDecode) && unlikely((uptrval)(op) + length < (uptrval)(op))) in LZ4_decompress_generic()
2319 cpy = op + length; in LZ4_decompress_generic()
2327 LZ4_wildCopy32(op, ip, cpy); in LZ4_decompress_generic()
2335 … LZ4_wildCopy8(op, ip, cpy); /* LZ4_decompress_fast() cannot copy more than 8 bytes at a time : in LZ4_decompress_generic()
2339 op = cpy; in LZ4_decompress_generic()
2343 cpy = op + length; in LZ4_decompress_generic()
2353 LZ4_memcpy(op, ip, 16); in LZ4_decompress_generic()
2359 LZ4_memcpy(op, ip, 8); in LZ4_decompress_generic()
2362 LZ4_memcpy(op + 8, ip + 8, 8); in LZ4_decompress_generic()
2366 op = cpy; in LZ4_decompress_generic()
2372 match = op - offset; in LZ4_decompress_generic()
2373 assert(match <= op); in LZ4_decompress_generic()
2390 if ((safeDecode) && unlikely((uptrval)(op) + length < (uptrval)op)) in LZ4_decompress_generic()
2395 if (op + length >= oend - FASTLOOP_SAFE_DISTANCE) in LZ4_decompress_generic()
2403 if (op + length >= oend - FASTLOOP_SAFE_DISTANCE) in LZ4_decompress_generic()
2414 assert(match <= op); in LZ4_decompress_generic()
2415 assert(op + 18 <= oend); in LZ4_decompress_generic()
2417 LZ4_memcpy(op, match, 8); in LZ4_decompress_generic()
2418 LZ4_memcpy(op + 8, match + 8, 8); in LZ4_decompress_generic()
2419 LZ4_memcpy(op + 16, match + 16, 2); in LZ4_decompress_generic()
2420 op += length; in LZ4_decompress_generic()
2433 if (unlikely(op + length > oend - LASTLITERALS)) in LZ4_decompress_generic()
2438 length = MIN(length, (size_t)(oend - op)); in LZ4_decompress_generic()
2449 memmove(op, dictEnd - (lowPrefix - match), length); in LZ4_decompress_generic()
2450 op += length; in LZ4_decompress_generic()
2457 LZ4_memcpy(op, dictEnd - copySize, copySize); in LZ4_decompress_generic()
2458 op += copySize; in LZ4_decompress_generic()
2459 if (restSize > (size_t)(op - lowPrefix)) in LZ4_decompress_generic()
2461 BYTE *const endOfMatch = op + restSize; in LZ4_decompress_generic()
2463 while (op < endOfMatch) in LZ4_decompress_generic()
2465 *op++ = *copyFrom++; in LZ4_decompress_generic()
2470 LZ4_memcpy(op, lowPrefix, restSize); in LZ4_decompress_generic()
2471 op += restSize; in LZ4_decompress_generic()
2478 cpy = op + length; in LZ4_decompress_generic()
2480 assert((op <= oend) && (oend - op >= 32)); in LZ4_decompress_generic()
2483 LZ4_memcpy_using_offset(op, match, cpy, offset); in LZ4_decompress_generic()
2487 LZ4_wildCopy32(op, match, cpy); in LZ4_decompress_generic()
2490 op = cpy; /* wildcopy correction */ in LZ4_decompress_generic()
2514 && likely((endOnInput ? ip < shortiend : 1) & (op <= shortoend))) in LZ4_decompress_generic()
2517 LZ4_memcpy(op, ip, endOnInput ? 16 : 8); in LZ4_decompress_generic()
2518 op += length; in LZ4_decompress_generic()
2526 match = op - offset; in LZ4_decompress_generic()
2527 assert(match <= op); /* check overflow */ in LZ4_decompress_generic()
2533 LZ4_memcpy(op + 0, match + 0, 8); in LZ4_decompress_generic()
2534 LZ4_memcpy(op + 8, match + 8, 8); in LZ4_decompress_generic()
2535 LZ4_memcpy(op + 16, match + 16, 2); in LZ4_decompress_generic()
2536 op += length + MINMATCH; in LZ4_decompress_generic()
2555 if ((safeDecode) && unlikely((uptrval)(op) + length < (uptrval)(op))) in LZ4_decompress_generic()
2566 cpy = op + length; in LZ4_decompress_generic()
2586 … DEBUGLOG(7, "partialDecoding: remaining space in dstBuffer : %i", (int)(oend - op)); in LZ4_decompress_generic()
2594 cpy = op + length; in LZ4_decompress_generic()
2602 assert(op <= oend); in LZ4_decompress_generic()
2603 length = (size_t)(oend - op); in LZ4_decompress_generic()
2626 …memmove(op, ip, length); /* supports overlapping memory regions; only matters for in-place decompr… in LZ4_decompress_generic()
2628 op += length; in LZ4_decompress_generic()
2641 LZ4_wildCopy8(op, ip, cpy); /* may overwrite up to WILDCOPYLENGTH beyond cpy */ in LZ4_decompress_generic()
2643 op = cpy; in LZ4_decompress_generic()
2649 match = op - offset; in LZ4_decompress_generic()
2661 if ((safeDecode) && unlikely((uptrval)(op) + length < (uptrval)op)) in LZ4_decompress_generic()
2674 if (unlikely(op + length > oend - LASTLITERALS)) in LZ4_decompress_generic()
2677 length = MIN(length, (size_t)(oend - op)); in LZ4_decompress_generic()
2685 memmove(op, dictEnd - (lowPrefix - match), length); in LZ4_decompress_generic()
2686 op += length; in LZ4_decompress_generic()
2693 LZ4_memcpy(op, dictEnd - copySize, copySize); in LZ4_decompress_generic()
2694 op += copySize; in LZ4_decompress_generic()
2695 if (restSize > (size_t)(op - lowPrefix)) in LZ4_decompress_generic()
2697 BYTE *const endOfMatch = op + restSize; in LZ4_decompress_generic()
2699 while (op < endOfMatch) in LZ4_decompress_generic()
2700 *op++ = *copyFrom++; in LZ4_decompress_generic()
2704 LZ4_memcpy(op, lowPrefix, restSize); in LZ4_decompress_generic()
2705 op += restSize; in LZ4_decompress_generic()
2713 cpy = op + length; in LZ4_decompress_generic()
2716 assert(op <= oend); in LZ4_decompress_generic()
2719 size_t const mlen = MIN(length, (size_t)(oend - op)); in LZ4_decompress_generic()
2721 BYTE *const copyEnd = op + mlen; in LZ4_decompress_generic()
2722 if (matchEnd > op) in LZ4_decompress_generic()
2724 while (op < copyEnd) in LZ4_decompress_generic()
2726 *op++ = *match++; in LZ4_decompress_generic()
2731 LZ4_memcpy(op, match, mlen); in LZ4_decompress_generic()
2733 op = copyEnd; in LZ4_decompress_generic()
2734 if (op == oend) in LZ4_decompress_generic()
2743 LZ4_write32(op, 0); /* silence msan warning when offset==0 */ in LZ4_decompress_generic()
2744 op[0] = match[0]; in LZ4_decompress_generic()
2745 op[1] = match[1]; in LZ4_decompress_generic()
2746 op[2] = match[2]; in LZ4_decompress_generic()
2747 op[3] = match[3]; in LZ4_decompress_generic()
2749 LZ4_memcpy(op + 4, match, 4); in LZ4_decompress_generic()
2754 LZ4_memcpy(op, match, 8); in LZ4_decompress_generic()
2757 op += 8; in LZ4_decompress_generic()
2766 if (op < oCopyLimit) in LZ4_decompress_generic()
2768 LZ4_wildCopy8(op, match, oCopyLimit); in LZ4_decompress_generic()
2769 match += oCopyLimit - op; in LZ4_decompress_generic()
2770 op = oCopyLimit; in LZ4_decompress_generic()
2772 while (op < cpy) in LZ4_decompress_generic()
2774 *op++ = *match++; in LZ4_decompress_generic()
2779 LZ4_memcpy(op, match, 8); in LZ4_decompress_generic()
2782 LZ4_wildCopy8(op + 8, match + 8, cpy); in LZ4_decompress_generic()
2785 op = cpy; /* wildcopy correction */ in LZ4_decompress_generic()
2791 DEBUGLOG(5, "decoded %i bytes", (int)(((char *)op) - dst)); in LZ4_decompress_generic()
2792 return (int)(((char *)op) - dst); /* Nb of output bytes decoded */ in LZ4_decompress_generic()