Skip to content

Commit

Permalink
Merge pull request #56 from flxo/pr-redis
Browse files Browse the repository at this point in the history
Add a Redis cache variant
  • Loading branch information
alexcrichton authored Mar 27, 2017
2 parents a0b7cc1 + 2349791 commit 7ae4c25
Show file tree
Hide file tree
Showing 11 changed files with 225 additions and 29 deletions.
11 changes: 11 additions & 0 deletions Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

14 changes: 9 additions & 5 deletions Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -11,28 +11,29 @@ repository = "https://github.com/mozilla/sccache/"
app_dirs = "1.1.1"
bincode = { git = 'https://github.com/TyOverby/bincode' }
byteorder = "1.0"
chrono = "0.2.25"
chrono = { version = "0.2.25", optional = true }
clap = "2.3.0"
env_logger = "0.3.3"
error-chain = { version = "0.7.2", default-features = false }
fern = "0.3.5"
filetime = "0.1"
futures = "0.1.11"
futures-cpupool = "0.1"
hyper = { git = "https://github.com/hyperium/hyper" }
hyper-tls = { git = "https://github.com/hyperium/hyper-tls" }
hyper = { git = "https://github.com/hyperium/hyper", optional = true }
hyper-tls = { git = "https://github.com/hyperium/hyper-tls", optional = true }
libc = "0.2.10"
local-encoding = "0.2.0"
log = "0.3.6"
lru-disk-cache = { path = "lru-disk-cache" }
number_prefix = "0.2.5"
redis = { version = "0.8.0", optional = true }
regex = "0.1.65"
retry = "0.4.0"
rust-crypto = "0.2.36"
rust-crypto = { version = "0.2.36", optional = true }
rustc-serialize = "0.3"
serde = "0.9"
serde_derive = "0.9"
serde_json = "0.9.0"
serde_json = { version = "0.9.0", optional = true }
sha1 = "0.2.0"
tempdir = "0.3.4"
time = "0.1.35"
Expand All @@ -58,6 +59,9 @@ mio-named-pipes = "0.1"

[features]
default = []
all = ["redis", "s3"]
s3 = ["chrono", "hyper", "hyper-tls", "rust-crypto", "serde_json", "simple-s3"]
simple-s3 = []
# Enable features that require unstable features of Nightly Rust.
unstable = []

Expand Down
8 changes: 5 additions & 3 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -17,11 +17,11 @@ Requirements

sccache is a [Rust](https://www.rust-lang.org/) program. Building it requires `cargo` (and thus `rustc`). sccache currently requires **Rust 1.16**.

We recommend you install Rust via [Rustup](https://rustup.rs/). The generated binaries can be built so that they are very portable, see [scripts/build-release.sh](scripts/build-release.sh).
We recommend you install Rust via [Rustup](https://rustup.rs/). The generated binaries can be built so that they are very portable, see [scripts/build-release.sh](scripts/build-release.sh). By default `sccache` supports a local disk cache. To build `sccache` with support for `S3` and/or `Redis` cache backends, add `--features=all` or select a specific feature by passing `s3` and/or `redis`. Refer the [Cargo Documentation](http://doc.crates.io/manifest.html#the-features-section) for details.

## Build

> $ cargo build [--release]
> $ cargo build [--features=all|redis|s3] [--release]
## Installation

Expand Down Expand Up @@ -49,7 +49,9 @@ sccache defaults to using local disk storage. You can set the `SCCACHE_DIR` envi

If you want to use S3 storage for the sccache cache, you need to set the `SCCACHE_BUCKET` environment variable to the name of the S3 bucket to use.

The environment variables are only taken into account when the server starts, so only on the first run.
Set `SCCACHE_REDIS` to a [Redis](https://redis.io/) url in format `redis://[:<passwd>@]<hostname>[:port][/<db>]` to store the cache in a Redis instance.

*Important:* The environment variables are only taken into account when the server starts, so only on the first run.

Debugging
---------
Expand Down
4 changes: 3 additions & 1 deletion appveyor.yml
Original file line number Diff line number Diff line change
Expand Up @@ -5,13 +5,15 @@ environment:
# Stable 64-bit MSVC
- channel: stable
target: x86_64-pc-windows-msvc
FEATURES: --features=all
# Beta 64-bit MSVC
- channel: beta
target: x86_64-pc-windows-msvc
FEATURES: --features=all
# Nightly 64-bit MSVC
- channel: nightly
target: x86_64-pc-windows-msvc
FEATURES: --features=unstable
FEATURES: --features="all unstable"
CARGO_TEST_EXTRA: --all

### GNU Toolchains ###
Expand Down
5 changes: 3 additions & 2 deletions scripts/build-release.sh
Original file line number Diff line number Diff line change
Expand Up @@ -17,14 +17,15 @@ case $system in
MINGW*|MSYS_NT*)
system=Windows
rm -rf target/release
rustup run nightly cargo build --release --target=x86_64-pc-windows-msvc --features=unstable && rustup run nightly cargo test --release
rustup run nightly cargo build --release --target=x86_64-pc-windows-msvc --features="all unstable" && rustup run nightly cargo test --release
cp target/release/sccache.exe "$stagedir"
compress=bz2
;;
Linux)
# Build using rust-musl-builder
rm -rf target/x86_64-unknown-linux-musl/release
docker run --rm -it -v "$(pwd)":/home/rust/src -v ~/.cargo/git:/home/rust/.cargo/git -v ~/.cargo/registry:/home/rust/.cargo/registry luser/rust-musl-builder sh -c "cargo build --release && cargo test --release"
docker run --rm -it -v "$(pwd)":/home/rust/src -v ~/.cargo/git:/home/rust/.cargo/git -v
~/.cargo/registry:/home/rust/.cargo/registry luser/rust-musl-builder sh -c "cargo build --release --features=all && cargo test --features=all --release"
cp target/x86_64-unknown-linux-musl/release/sccache "$stagedir"
strip "$stagedir/sccache"
compress=xz
Expand Down
53 changes: 37 additions & 16 deletions src/cache/cache.rs
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,9 @@ use app_dirs::{
app_dir,
};
use cache::disk::DiskCache;
#[cfg(feature = "redis")]
use cache::redis::RedisCache;
#[cfg(feature = "s3")]
use cache::s3::S3Cache;
use futures_cpupool::CpuPool;
use regex::Regex;
Expand Down Expand Up @@ -186,25 +189,43 @@ fn parse_size(val: &str) -> Option<usize> {
}

/// Get a suitable `Storage` implementation from the environment.
pub fn storage_from_environment(pool: &CpuPool, handle: &Handle) -> Arc<Storage> {
if let Ok(bucket) = env::var("SCCACHE_BUCKET") {
let endpoint = match env::var("SCCACHE_ENDPOINT") {
Ok(endpoint) => format!("{}/{}", endpoint, bucket),
_ => match env::var("SCCACHE_REGION") {
Ok(ref region) if region != "us-east-1" =>
format!("{}.s3-{}.amazonaws.com", bucket, region),
_ => format!("{}.s3.amazonaws.com", bucket),
},
};
debug!("Trying S3Cache({})", endpoint);
match S3Cache::new(&bucket, &endpoint, handle) {
Ok(s) => {
trace!("Using S3Cache");
return Arc::new(s);
pub fn storage_from_environment(pool: &CpuPool, _handle: &Handle) -> Arc<Storage> {
if cfg!(feature = "s3") {
if let Ok(bucket) = env::var("SCCACHE_BUCKET") {
let endpoint = match env::var("SCCACHE_ENDPOINT") {
Ok(endpoint) => format!("{}/{}", endpoint, bucket),
_ => match env::var("SCCACHE_REGION") {
Ok(ref region) if region != "us-east-1" =>
format!("{}.s3-{}.amazonaws.com", bucket, region),
_ => format!("{}.s3.amazonaws.com", bucket),
},
};
debug!("Trying S3Cache({})", endpoint);
#[cfg(feature = "s3")]
match S3Cache::new(&bucket, &endpoint, _handle) {
Ok(s) => {
trace!("Using S3Cache");
return Arc::new(s);
}
Err(e) => warn!("Failed to create S3Cache: {:?}", e),
}
Err(e) => warn!("Failed to create S3Cache: {:?}", e),
}
}

if cfg!(feature = "redis") {
if let Ok(url) = env::var("SCCACHE_REDIS") {
debug!("Trying Redis({})", url);
#[cfg(feature = "redis")]
match RedisCache::new(&url, pool) {
Ok(s) => {
trace!("Using Redis: {}", url);
return Arc::new(s);
}
Err(e) => warn!("Failed to create RedisCache: {:?}", e),
}
}
}

let d = env::var_os("SCCACHE_DIR")
.map(|p| PathBuf::from(p))
.or_else(|| app_dir(AppDataType::UserCache, &APP_INFO, "").ok())
Expand Down
3 changes: 3 additions & 0 deletions src/cache/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,9 @@

pub mod cache;
pub mod disk;
#[cfg(feature = "redis")]
pub mod redis;
#[cfg(feature = "s3")]
pub mod s3;

pub use cache::cache::*;
133 changes: 133 additions & 0 deletions src/cache/redis.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,133 @@
// Copyright 2016 Mozilla Foundation
// Copyright 2016 Felix Obenhuber <felix@obenhuber.de>
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.

use cache::{
Cache,
CacheRead,
CacheWrite,
Storage,
};
use errors::*;
use futures::Future;
use futures_cpupool::CpuPool;
use redis::{
cmd,
Client,
Commands,
Connection,
InfoDict,
};
use std::collections::HashMap;
use std::io::Cursor;
use std::time::{
Duration,
Instant,
};

/// A cache that stores entries in a Redis.
#[derive(Clone)]
pub struct RedisCache {
url: String,
client: Client,
pool: CpuPool,
}

impl RedisCache {
/// Create a new `RedisCache`.
pub fn new(url: &str, pool: &CpuPool) -> Result<RedisCache> {
Ok(RedisCache {
url: url.to_owned(),
client: Client::open(url)?,
pool: pool.clone(),
})
}

/// Returns a connection with configured read and write timeouts.
fn connect(&self) -> Result<Connection> {
self.client.get_connection()
.map_err(|e| e.into())
.and_then(|c| {
c.set_read_timeout(Some(Duration::from_millis(10_000)))?;
c.set_write_timeout(Some(Duration::from_millis(10_000)))?;
Ok(c)
})
}
}

impl Storage for RedisCache {
/// Open a connection and query for a key.
fn get(&self, key: &str) -> SFuture<Cache> {
let key = key.to_owned();
let me = self.clone();
self.pool.spawn_fn(move || {
let c = me.connect()?;
let d = c.get::<&str, Vec<u8>>(&key)?;
if d.is_empty() {
Ok(Cache::Miss)
} else {
CacheRead::from(Cursor::new(d))
.map(Cache::Hit)
}
}).boxed()
}

/// Initiate a cache write. There is nothing special needed
/// for the Redis cache.
fn start_put(&self, _key: &str) -> Result<CacheWrite> {
Ok(CacheWrite::new())
}

/// Open a connecxtion and store a object in the cache.
fn finish_put(&self, key: &str, entry: CacheWrite) -> SFuture<Duration> {
let key = key.to_owned();
let me = self.clone();
self.pool.spawn_fn(move || {
let start = Instant::now();
let c = me.connect()?;
let d = entry.finish()?;
c.set::<&str, Vec<u8>, ()>(&key, d)?;
Ok(start.elapsed())
}).boxed()
}

/// Returns the cache location.
fn location(&self) -> String {
format!("Redis: {}", self.url)
}

/// Returns the current cache size. This value is aquired via
/// the Redis INFO command (used_memory).
fn current_size(&self) -> Option<usize> {
self.connect().ok()
.and_then(|c| cmd("INFO").query(&c).ok())
.and_then(|i: InfoDict| i.get("used_memory"))
}

/// Returns the maximum cache size. This value is read via
/// the Redis CONFIG command (maxmemory). If the server has no
/// configured limit, the result is None.
fn max_size(&self) -> Option<usize> {
self.connect().ok()
.and_then(|c| cmd("CONFIG").arg("GET").arg("maxmemory").query(&c).ok())
.and_then(|h: HashMap<String, usize>| h.get("maxmemory").map(|s| *s))
.and_then(|s| {
if s != 0 {
Some(s)
} else {
None
}
})
}
}
5 changes: 5 additions & 0 deletions src/cmdline.rs
Original file line number Diff line number Diff line change
Expand Up @@ -51,6 +51,11 @@ pub fn get_app<'a, 'b>() -> App<'a, 'b> {
App::new(env!("CARGO_PKG_NAME"))
.version(env!("CARGO_PKG_VERSION"))
.setting(AppSettings::TrailingVarArg)
.after_help(concat!(
"Enabled features:\n",
" S3: ", cfg!(feature = "s3"), "\n",
" Redis: ", cfg!(feature = "redis"), "\n")
)
.args_from_usage(
"-s --show-stats 'show cache statistics'
-z, --zero-stats 'zero statistics counters'
Expand Down
10 changes: 8 additions & 2 deletions src/errors.rs
Original file line number Diff line number Diff line change
Expand Up @@ -19,20 +19,26 @@ use std::io;
use bincode;
use futures::Future;
use futures::future;
#[cfg(feature = "hyper")]
use hyper;
use lru_disk_cache;
#[cfg(feature = "serde_json")]
use serde_json;
#[cfg(feature = "redis")]
use redis;

error_chain! {
foreign_links {
Hyper(hyper::Error) #[cfg(feature = "hyper")];
Io(io::Error);
Hyper(hyper::Error);
Lru(lru_disk_cache::Error);
Json(serde_json::Error);
Json(serde_json::Error) #[cfg(feature = "serde_json")];
Bincode(bincode::Error);
Redis(redis::RedisError) #[cfg(feature = "redis")];
}

errors {
#[cfg(feature = "hyper")]
BadHTTPStatus(status: hyper::status::StatusCode) {
description("failed to get a successful HTTP status")
display("didn't get a successful HTTP status, got `{}`", status)
Expand Down
Loading

0 comments on commit 7ae4c25

Please sign in to comment.