bzip2/
lib.rs

1//! Bzip compression for Rust
2//!
3//! This library contains bindings to libbz2 to support bzip compression and
4//! decompression for Rust. The streams offered in this library are primarily
5//! found in the `reader` and `writer` modules. Both compressors and
6//! decompressors are available in each module depending on what operation you
7//! need.
8//!
9//! Access to the raw decompression/compression stream is also provided through
10//! the `raw` module which has a much closer interface to libbz2.
11//!
12//! # Example
13//!
14//! ```
15//! use std::io::prelude::*;
16//! use bzip2::Compression;
17//! use bzip2::read::{BzEncoder, BzDecoder};
18//!
19//! // Round trip some bytes from a byte source, into a compressor, into a
20//! // decompressor, and finally into a vector.
21//! let data = "Hello, World!".as_bytes();
22//! let compressor = BzEncoder::new(data, Compression::best());
23//! let mut decompressor = BzDecoder::new(compressor);
24//!
25//! let mut contents = String::new();
26//! decompressor.read_to_string(&mut contents).unwrap();
27//! assert_eq!(contents, "Hello, World!");
28//! ```
29//!
30//! # Multistreams (e.g. Wikipedia or pbzip2)
31//!
32//! Some tools such as pbzip2 or data from sources such as Wikipedia
33//! are encoded as so called bzip2 "multistreams," meaning they
34//! contain back to back chunks of bzip'd data. `BzDecoder` does not
35//! attempt to convert anything after the the first bzip chunk in the
36//! source stream. Thus, if you wish to decode all bzip chunks from
37//! the input until end of file, use `MultiBzDecoder`.
38//!
39//! *Protip*: If you use `BzDecoder` to decode data and the output is
40//! incomplete and exactly 900K bytes, you probably need a
41//! `MultiBzDecoder`.
42//!
43//! # Async I/O
44//!
45//! This crate optionally can support async I/O streams with the Tokio stack via
46//! the `tokio` feature of this crate:
47//!
48//! ```toml
49//! bzip2 = { version = "0.4", features = ["tokio"] }
50//! ```
51//!
52//! All methods are internally capable of working with streams that may return
53//! `ErrorKind::WouldBlock` when they're not ready to perform the particular
54//! operation.
55//!
56//! Note that care needs to be taken when using these objects, however. The
57//! Tokio runtime, in particular, requires that data is fully flushed before
58//! dropping streams. For compatibility with blocking streams all streams are
59//! flushed/written when they are dropped, and this is not always a suitable
60//! time to perform I/O. If I/O streams are flushed before drop, however, then
61//! these operations will be a noop.
62
63#![deny(missing_docs)]
64#![doc(html_root_url = "https://docs.rs/bzip2/")]
65
66extern crate bzip2_sys as ffi;
67extern crate libc;
68#[cfg(test)]
69extern crate partial_io;
70#[cfg(test)]
71extern crate quickcheck;
72#[cfg(test)]
73extern crate rand;
74#[cfg(feature = "tokio")]
75#[macro_use]
76extern crate tokio_io;
77#[cfg(feature = "tokio")]
78extern crate futures;
79
80pub use mem::{Action, Compress, Decompress, Error, Status};
81
82mod mem;
83
84pub mod bufread;
85pub mod read;
86pub mod write;
87
88/// When compressing data, the compression level can be specified by a value in
89/// this enum.
90#[derive(Copy, Clone, Debug)]
91pub struct Compression(u32);
92
93impl Compression {
94    /// Create a new compression spec with a specific numeric level (0-9).
95    pub fn new(level: u32) -> Compression {
96        Compression(level)
97    }
98
99    /// Do not compress.
100    pub fn none() -> Compression {
101        Compression(0)
102    }
103
104    /// Optimize for the best speed of encoding.
105    pub fn fast() -> Compression {
106        Compression(1)
107    }
108
109    /// Optimize for the size of data being encoded.
110    pub fn best() -> Compression {
111        Compression(9)
112    }
113
114    /// Return the compression level as an integer.
115    pub fn level(&self) -> u32 {
116        self.0
117    }
118}
119
120impl Default for Compression {
121    /// Choose the default compression, a balance between speed and size.
122    fn default() -> Compression {
123        Compression(6)
124    }
125}