mirror of
https://github.com/lz4/lz4.git
synced 2024-11-23 09:54:00 +08:00
docs: fix some typo
Signed-off-by: Qi Wang <wangqi@linux.alibaba.com>
This commit is contained in:
parent
97d60acd89
commit
51be2943b3
@ -48,7 +48,7 @@
|
||||
* Special Note About Decompression:
|
||||
* Using the LZ4_decompress_safe() function protects against malicious (user) input. If you are using data from a
|
||||
* trusted source, or if your program is the producer (P) as well as its consumer (C) in a PC or MPMC setup, you can
|
||||
* safely use the LZ4_decompress_fast function
|
||||
* safely use the LZ4_decompress_fast function.
|
||||
*/
|
||||
|
||||
/* Since lz4 compiles with c99 and not gnu/std99 we need to enable POSIX linking for time.h structs and functions. */
|
||||
|
@ -47,7 +47,7 @@ int main(void) {
|
||||
char* compressed_data = (char*)malloc((size_t)max_dst_size);
|
||||
if (compressed_data == NULL)
|
||||
run_screaming("Failed to allocate memory for *compressed_data.", 1);
|
||||
// That's all the information and preparation LZ4 needs to compress *src into *compressed_data.
|
||||
// That's all the information and preparation LZ4 needs to compress *src into* compressed_data.
|
||||
// Invoke LZ4_compress_default now with our size values and pointers to our memory locations.
|
||||
// Save the return value for error checking.
|
||||
const int compressed_data_size = LZ4_compress_default(src, compressed_data, src_size, max_dst_size);
|
||||
|
@ -22,7 +22,7 @@ But if you want to write advanced application, it's time to use Block or Streami
|
||||
Block API (de)compresses a single contiguous memory block.
|
||||
In other words, LZ4 library finds redundancy from a single contiguous memory block.
|
||||
Streaming API does same thing but (de)compresses multiple adjacent contiguous memory blocks.
|
||||
So LZ4 library could find more redundancy than Block API.
|
||||
So Streaming API could find more redundancy than Block API.
|
||||
|
||||
The following figure shows difference between API and block sizes.
|
||||
In these figures, the original data is split into 4KiBytes contiguous chunks.
|
||||
|
Loading…
Reference in New Issue
Block a user