Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
21 changes: 12 additions & 9 deletions implants/lib/pb/src/xchacha.rs
Original file line number Diff line number Diff line change
Expand Up @@ -49,10 +49,6 @@ fn get_key(pub_key: [u8; 32]) -> Result<[u8; 32]> {
Ok(res)
}

fn del_key(pub_key: [u8; 32]) -> Option<[u8; 32]> {
key_history().lock().unwrap().pop(&pub_key)
}

// ------------

#[derive(Debug, Clone, Default)]
Expand Down Expand Up @@ -151,7 +147,6 @@ where

// ---
//
const DEFAULT_CODEC_BUFFER_SIZE: usize = 8 * 1024;
const PUBKEY_LEN: usize = 32;
const NONCE_LEN: usize = 24;

Expand All @@ -169,8 +164,8 @@ where
fn decode(&mut self, buf: &mut DecodeBuf<'_>) -> Result<Option<Self::Item>, Self::Error> {
// public key + xchacha nonce + ciphertext
let mut reader = buf.reader();
let mut bytes_in = vec![0; DEFAULT_CODEC_BUFFER_SIZE];
let bytes_read = match reader.read(&mut bytes_in) {
let mut bytes_in = Vec::new();
let bytes_read = match reader.read_to_end(&mut bytes_in) {
Ok(n) => n,
Err(err) => {
#[cfg(debug_assertions)]
Expand All @@ -182,6 +177,16 @@ where
}
};

log::debug!("Bytes read from server: {}", bytes_read);

if bytes_read == 0 {
let item = Message::decode(bytes_in.get(0..bytes_read).unwrap())
.map(Option::Some)
.map_err(from_decode_error)?;

return Ok(item);
}

if bytes_read < PUBKEY_LEN + NONCE_LEN {
let err =
anyhow::anyhow!("Message from server is too small to contain public key and nonce");
Expand Down Expand Up @@ -227,8 +232,6 @@ where
};

let client_private_bytes = get_key(client_public_bytes).map_err(from_anyhow_error)?;
// Shouldn't need private key again once the message has been decrypted
del_key(client_public_bytes);

let cipher = chacha20poly1305::XChaCha20Poly1305::new(GenericArray::from_slice(
&client_private_bytes,
Expand Down
33 changes: 12 additions & 21 deletions tavern/internal/cryptocodec/cryptocodec.go
Original file line number Diff line number Diff line change
Expand Up @@ -10,43 +10,37 @@ import (
"log/slog"
"runtime"
"strconv"
"sync"

"github.com/cloudflare/circl/dh/x25519"
lru "github.com/hashicorp/golang-lru/v2"
"golang.org/x/crypto/chacha20poly1305"
"google.golang.org/grpc/encoding"
"google.golang.org/grpc/mem"
)

// TODO: Switch to a gomap and mutex.
var session_pub_keys = NewSyncMap()

// This size limits the number of concurrent connections each server can handle.
// I can't imagine a single server handling more than 10k connections at once but just in case.
const LRUCACHE_SIZE = 10480
type SyncMap struct {
Mutex sync.RWMutex // Read Write Mutex to allow for multiple readers
Map map[int][]byte // Example data map
Map *lru.Cache[int, []byte] // Example data map
}

func NewSyncMap() *SyncMap {
return &SyncMap{Mutex: sync.RWMutex{}, Map: make(map[int][]byte)}
l, err := lru.New[int, []byte](LRUCACHE_SIZE)
if err != nil {
slog.Error("Failed to create LRU cache")
}
return &SyncMap{Map: l}
}

func (s *SyncMap) Load(key int) ([]byte, bool) {
defer s.Mutex.Unlock()
s.Mutex.Lock()
res, ok := s.Map[key]
return res, ok
return s.Map.Get(key)
}

func (s *SyncMap) Store(key int, value []byte) {
defer s.Mutex.Unlock()
s.Mutex.Lock()
s.Map[key] = value
}

func (s *SyncMap) Delete(key int) {
defer s.Mutex.Unlock()
s.Mutex.Lock()
delete(s.Map, key)
s.Map.Add(key, value)
}

// TODO: Should we make this a random long byte array in case it gets used anywhere to avoid encrypting data with a weak key? - Sliver handles errors in this way.
Expand Down Expand Up @@ -208,9 +202,6 @@ func (csvc *CryptoSvc) Encrypt(in_arr []byte) []byte {
return FAILURE_BYTES
}

// We should only need to use these once so delete it after use
session_pub_keys.Delete(id)

// Generate shared secret
shared_key := csvc.generate_shared_key(client_pub_key_bytes)
aead, err := chacha20poly1305.NewX(shared_key)
Expand Down
Loading