/*
- * Copyright (C) 2005-2012 Andre Noll <maan@systemlinux.org>
+ * Copyright (C) 2005 Andre Noll <maan@tuebingen.mpg.de>
*
* Licensed under the GPL v2. For licencing details see COPYING.
*/
#include <openssl/pem.h>
#include <openssl/sha.h>
#include <openssl/bn.h>
+#include <openssl/aes.h>
#include "para.h"
#include "error.h"
*result = rsa;
return 1;
fail:
- if (rsa)
- RSA_free(rsa);
+ RSA_free(rsa);
return ret;
}
}
struct stream_cipher {
- RC4_KEY key;
+ bool use_aes;
+ union {
+ RC4_KEY rc4_key;
+ EVP_CIPHER_CTX *aes;
+ } context;
};
-struct stream_cipher *sc_new(const unsigned char *data, int len)
+struct stream_cipher *sc_new(const unsigned char *data, int len,
+ bool use_aes)
{
struct stream_cipher *sc = para_malloc(sizeof(*sc));
- RC4_set_key(&sc->key, len, data);
+
+ sc->use_aes = use_aes;
+ if (!use_aes) {
+ RC4_set_key(&sc->context.rc4_key, len, data);
+ return sc;
+ }
+ assert(len >= 2 * AES_CRT128_BLOCK_SIZE);
+ sc->context.aes = EVP_CIPHER_CTX_new();
+ EVP_EncryptInit_ex(sc->context.aes, EVP_aes_128_ctr(), NULL, data,
+ data + AES_CRT128_BLOCK_SIZE);
return sc;
}
void sc_free(struct stream_cipher *sc)
{
+ if (!sc)
+ return;
+ EVP_CIPHER_CTX_free(sc->context.aes);
free(sc);
}
*/
#define RC4_ALIGN 8
-int sc_send_bin_buffer(struct stream_cipher_context *scc, char *buf,
- size_t len)
+static void rc4_crypt(RC4_KEY *key, struct iovec *src, struct iovec *dst)
{
- int ret;
- unsigned char *tmp;
- static unsigned char remainder[RC4_ALIGN];
- size_t l1 = ROUND_DOWN(len, RC4_ALIGN), l2 = ROUND_UP(len, RC4_ALIGN);
-
- assert(len);
- tmp = para_malloc(l2);
- RC4(&scc->send->key, l1, (const unsigned char *)buf, tmp);
+ size_t len = src->iov_len, l1, l2;
+
+ assert(len > 0);
+ assert(len < ((typeof(src->iov_len))-1) / 2);
+ l1 = ROUND_DOWN(len, RC4_ALIGN);
+ l2 = ROUND_UP(len, RC4_ALIGN);
+
+ *dst = (typeof(*dst)) {
+ /* Add one for the terminating zero byte. */
+ .iov_base = para_malloc(l2 + 1),
+ .iov_len = len
+ };
+ RC4(key, l1, src->iov_base, dst->iov_base);
if (len > l1) {
- memcpy(remainder, buf + l1, len - l1);
- RC4(&scc->send->key, len - l1, remainder, tmp + l1);
+ unsigned char remainder[RC4_ALIGN] = "";
+ memcpy(remainder, src->iov_base + l1, len - l1);
+ RC4(key, len - l1, remainder, dst->iov_base + l1);
}
- ret = write_all(scc->fd, (char *)tmp, &len);
- free(tmp);
- return ret;
+ ((char *)dst->iov_base)[len] = '\0';
}
-int sc_recv_bin_buffer(struct stream_cipher_context *scc, char *buf,
- size_t size)
+static void aes_ctr128_crypt(EVP_CIPHER_CTX *ctx, struct iovec *src,
+ struct iovec *dst)
{
- unsigned char *tmp = para_malloc(ROUND_UP(size, RC4_ALIGN));
- ssize_t ret = recv(scc->fd, tmp, size, 0);
-
- if (ret > 0)
- RC4(&scc->recv->key, ret, tmp, (unsigned char *)buf);
- else if (ret < 0)
- ret = -ERRNO_TO_PARA_ERROR(errno);
- free(tmp);
- return ret;
+ int ret, inlen = src->iov_len, outlen, tmplen;
+
+ *dst = (typeof(*dst)) {
+ /* Add one for the terminating zero byte. */
+ .iov_base = para_malloc(inlen + 1),
+ .iov_len = inlen
+ };
+ ret = EVP_EncryptUpdate(ctx, dst->iov_base, &outlen, src->iov_base, inlen);
+ assert(ret != 0);
+ ret = EVP_EncryptFinal_ex(ctx, dst->iov_base + outlen, &tmplen);
+ assert(ret != 0);
+ outlen += tmplen;
+ ((char *)dst->iov_base)[outlen] = '\0';
+ dst->iov_len = outlen;
+}
+
+void sc_crypt(struct stream_cipher *sc, struct iovec *src, struct iovec *dst)
+{
+ if (sc->use_aes)
+ return aes_ctr128_crypt(sc->context.aes, src, dst);
+ return rc4_crypt(&sc->context.rc4_key, src, dst);
}
void hash_function(const char *data, unsigned long len, unsigned char *hash)