diff --git a/.buildkite/eslint_flow_jest.yml b/.buildkite/eslint_flow_jest.yml index f7af06d69..4a868d977 100644 --- a/.buildkite/eslint_flow_jest.yml +++ b/.buildkite/eslint_flow_jest.yml @@ -1,17 +1,18 @@ steps: - label: ':eslint: :jest: ESLint & Flow & Jest' command: - '(pkill flow || true)' - 'curl --proto "=https" --tlsv1.2 -sSf https://sh.rustup.rs | sh -s -- -y' - '. /root/.cargo/env' + - 'apt update && apt install -y cmake' - 'yarn cleaninstall --frozen-lockfile --skip-optional --network-timeout 180000' - 'yarn eslint --max-warnings=0 && yarn workspace lib flow && yarn workspace web flow && yarn workspace landing flow && yarn workspace native flow && yarn workspace keyserver flow && yarn workspace desktop flow' - 'yarn workspace lib test && yarn workspace keyserver test' plugins: - docker#v3.13.0: image: 'node:16.13-bullseye' always-pull: true workdir: /comm propagate-environment: true agents: - 'autoscaling=true' diff --git a/.buildkite/jsi_codegen.yml b/.buildkite/jsi_codegen.yml index f104d092c..be1a066ee 100644 --- a/.buildkite/jsi_codegen.yml +++ b/.buildkite/jsi_codegen.yml @@ -1,14 +1,17 @@ steps: - label: 'JSI Codegen' command: - '(pkill flow || true)' + - 'apt update && apt install -y cmake' + - 'curl --proto "=https" --tlsv1.2 -sSf https://sh.rustup.rs | sh -s -- -y' + - '. /root/.cargo/env' - 'yarn cleaninstall --frozen-lockfile --skip-optional --network-timeout 180000' - 'cd native && yarn codegen-jsi && git diff --exit-code' plugins: - docker#v3.13.0: image: 'node:16.13-bullseye' always-pull: true workdir: /comm propagate-environment: true agents: - 'autoscaling=true' diff --git a/keyserver/Dockerfile b/keyserver/Dockerfile index 9d4305700..dd67bf1c1 100644 --- a/keyserver/Dockerfile +++ b/keyserver/Dockerfile @@ -1,168 +1,178 @@ FROM node:16.18.0-bullseye #------------------------------------------------------------------------------- # STEP 0: SET UP USER # Set up Linux user and group for the container #------------------------------------------------------------------------------- # We use bind mounts for our backups folder, which means Docker on Linux will # blindly match the UID/GID for the backups folder on the container with the # host. In order to make sure the container is able to create backups with the # right UID/GID, we need to do two things: # 1. Make sure that the user that runs the Docker container on the host has # permissions to write to the backups folder on the host. We rely on the host # to configure this properly # 2. Make sure we're running this container with the same UID/GID that the host # is using, so the UID/GID show up correctly on both sides of the bind mount # To handle 2 correctly, we have the host pass the UID/GID with which they're # running the container. Our approach is based on this one: # https://github.com/mhart/alpine-node/issues/48#issuecomment-430902787 ARG HOST_UID ARG HOST_GID ARG COMM_ALCHEMY_KEY USER root RUN \ if [ -z "`getent group $HOST_GID`" ]; then \ addgroup --system --gid $HOST_GID comm; \ else \ groupmod --new-name comm `getent group $HOST_GID | cut -d: -f1`; \ fi && \ if [ -z "`getent passwd $HOST_UID`" ]; then \ adduser --system --uid $HOST_UID --ingroup comm --shell /bin/bash comm; \ else \ usermod --login comm --gid $HOST_GID --home /home/comm --move-home \ `getent passwd $HOST_UID | cut -d: -f1`; \ fi #------------------------------------------------------------------------------- # STEP 1: INSTALL PREREQS # Install prereqs first so we don't have to reinstall them if anything changes #------------------------------------------------------------------------------- # We need to add the MariaDB repo to apt in order to install mariadb-client RUN wget https://downloads.mariadb.com/MariaDB/mariadb_repo_setup \ && chmod +x mariadb_repo_setup \ && ./mariadb_repo_setup \ && rm mariadb_repo_setup # We need rsync in the prod-build yarn script # We need mariadb-client so we can use mysqldump for backups +# We need cmake to install protobuf (prereq for rust-node-addon) RUN apt-get update && DEBIAN_FRONTEND=noninteractive apt-get install -y \ rsync \ mariadb-client \ + cmake \ && rm -rf /var/lib/apt/lists/* +# Install protobuf manually to ensure that we have the correct version +COPY scripts/install_protobuf.sh scripts/ +RUN cd scripts && ./install_protobuf.sh + #------------------------------------------------------------------------------- # STEP 2: DEVOLVE PRIVILEGES # Create another user to run the rest of the commands #------------------------------------------------------------------------------- USER comm WORKDIR /home/comm/app #------------------------------------------------------------------------------- # STEP 3: SET UP MYSQL BACKUPS # Prepare the system to properly handle mysqldump backups #------------------------------------------------------------------------------- # Prepare the directory that will hold the backups RUN mkdir /home/comm/backups #------------------------------------------------------------------------------- # STEP 4: SET UP CARGO (RUST PACKAGE MANAGER) # We use Cargo to build pre-compiled Node.js addons in Rust #------------------------------------------------------------------------------- # Install Rust and add Cargo's bin directory to the $PATH environment variable RUN curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh -s -- -y ENV PATH /home/comm/.cargo/bin:$PATH #------------------------------------------------------------------------------- # STEP 5: SET UP NVM # We use nvm to make sure we're running the right Node version #------------------------------------------------------------------------------- # First we install nvm ENV NVM_DIR /home/comm/.nvm RUN curl -o- https://raw.githubusercontent.com/nvm-sh/nvm/v0.39.1/install.sh \ | bash # Then we use nvm to install the right version of Node. We call this early so # Docker build caching saves us from re-downloading Node when any file changes COPY --chown=comm keyserver/.nvmrc keyserver/ COPY --chown=comm keyserver/bash/source-nvm.sh keyserver/bash/ RUN cd keyserver && . bash/source-nvm.sh #------------------------------------------------------------------------------- # STEP 6: YARN CLEANINSTALL # We run yarn cleaninstall before copying most of the files in for build caching #------------------------------------------------------------------------------- -# Copy in package.json and yarn.lock files +# Copy in package.json files, yarn.lock files, and required scripts COPY --chown=comm package.json yarn.lock ./ COPY --chown=comm keyserver/package.json keyserver/.flowconfig keyserver/ COPY --chown=comm lib/package.json lib/.flowconfig lib/ COPY --chown=comm web/package.json web/.flowconfig web/ COPY --chown=comm native/package.json native/.flowconfig native/postinstall.sh native/ COPY --chown=comm landing/package.json landing/.flowconfig landing/ COPY --chown=comm desktop/package.json desktop/ COPY --chown=comm keyserver/addons/rust-node-addon/package.json \ + keyserver/addons/rust-node-addon/install_ci_deps.sh \ keyserver/addons/rust-node-addon/ COPY --chown=comm native/expo-modules/android-lifecycle/package.json \ native/expo-modules/android-lifecycle/ # Create empty Rust library and copy in Cargo.toml file RUN cargo init keyserver/addons/rust-node-addon --lib COPY --chown=comm keyserver/addons/rust-node-addon/Cargo.toml \ keyserver/addons/rust-node-addon/ +# Copy in comm-opaque library, a dependency of rust-node-addon +COPY --chown=comm shared/comm-opaque shared/comm-opaque/ + # Copy in files needed for patch-package COPY --chown=comm patches patches/ # Actually run yarn RUN yarn cleaninstall #------------------------------------------------------------------------------- # STEP 7: WEBPACK BUILD # We do this first so Docker doesn't rebuild when only keyserver files change #------------------------------------------------------------------------------- COPY --chown=comm lib lib/ COPY --chown=comm landing landing/ RUN yarn workspace landing prod COPY --chown=comm web web/ RUN yarn workspace web prod #------------------------------------------------------------------------------- # STEP 8: COPY IN SOURCE FILES # We run this later so the above layers are cached if only source files change #------------------------------------------------------------------------------- COPY --chown=comm . . #------------------------------------------------------------------------------- # STEP 9: BUILD NODE ADDON # Now that source files have been copied in, build rust-node-addon #------------------------------------------------------------------------------- RUN yarn workspace rust-node-addon build #------------------------------------------------------------------------------- # STEP 10: RUN BUILD SCRIPTS # We need to populate keyserver/dist, among other things #------------------------------------------------------------------------------- # Babel transpilation of keyserver src RUN yarn workspace keyserver prod-build #------------------------------------------------------------------------------- # STEP 11: RUN THE SERVER # Actually run the Node.js keyserver using nvm #------------------------------------------------------------------------------- EXPOSE 3000 WORKDIR /home/comm/app/keyserver CMD bash/run-prod.sh diff --git a/keyserver/addons/rust-node-addon/Cargo.toml b/keyserver/addons/rust-node-addon/Cargo.toml index ed8192965..1229057dd 100644 --- a/keyserver/addons/rust-node-addon/Cargo.toml +++ b/keyserver/addons/rust-node-addon/Cargo.toml @@ -1,19 +1,32 @@ [package] edition = "2021" name = "rust-node-addon" version = "0.1.0" license = "BSD-3-Clause" [lib] crate-type = ["cdylib"] [dependencies] # Default enable napi4 feature, see https://nodejs.org/api/n-api.html#node-api-version-matrix -napi = { version = "2.10.1", default-features = false, features = ["napi4"] } +napi = { version = "2.10.1", default-features = false, features = [ + "napi4", + "tokio_rt", +] } napi-derive = { version = "2.9.1", default-features = false } +opaque-ke = "1.2" +rand = "0.8" +tonic = "0.8" +tokio = { version = "1.0", features = ["macros", "rt-multi-thread"] } +tokio-stream = "0.1" +tracing = "0.1" +prost = "0.11" +comm-opaque = {path = "../../../shared/comm-opaque"} +lazy_static = "1.4" [build-dependencies] napi-build = "2.0.1" +tonic-build = "0.8" [profile.release] lto = true diff --git a/keyserver/addons/rust-node-addon/build.rs b/keyserver/addons/rust-node-addon/build.rs index 1f866b6a3..04f021580 100644 --- a/keyserver/addons/rust-node-addon/build.rs +++ b/keyserver/addons/rust-node-addon/build.rs @@ -1,5 +1,7 @@ extern crate napi_build; fn main() { napi_build::setup(); + tonic_build::compile_protos("../../../shared/protos/identity.proto") + .unwrap_or_else(|e| panic!("Failed to compile protos {:?}", e)); } diff --git a/keyserver/addons/rust-node-addon/index.js b/keyserver/addons/rust-node-addon/index.js index 172ad002c..8c50f7728 100644 --- a/keyserver/addons/rust-node-addon/index.js +++ b/keyserver/addons/rust-node-addon/index.js @@ -1,35 +1,41 @@ // @flow const { platform, arch } = process; type RustAPI = { - +sum: (a: number, b: number) => number, + +registerUser: ( + userId: string, + deviceId: string, + username: string, + password: string, + userPublicKey: string, + ) => Promise, }; async function getRustAPI(): Promise { let nativeBinding = null; if (platform === 'darwin' && arch === 'x64') { // $FlowFixMe nativeBinding = await import('./napi/rust-node-addon.darwin-x64.node'); } else if (platform === 'darwin' && arch === 'arm64') { // $FlowFixMe nativeBinding = await import('./napi/rust-node-addon.darwin-arm64.node'); } else if (platform === 'linux' && arch === 'x64') { // $FlowFixMe nativeBinding = await import('./napi/rust-node-addon.linux-x64-gnu.node'); } else if (platform === 'linux' && arch === 'arm64') { // $FlowFixMe nativeBinding = await import('./napi/rust-node-addon.linux-arm64-gnu.node'); } else { throw new Error(`Unsupported OS: ${platform}, architecture: ${arch}`); } if (!nativeBinding) { throw new Error('Failed to load native binding'); } - const { sum } = nativeBinding.default; - return { sum }; + const { registerUser } = nativeBinding.default; + return { registerUser }; } export { getRustAPI }; diff --git a/keyserver/addons/rust-node-addon/install_ci_deps.sh b/keyserver/addons/rust-node-addon/install_ci_deps.sh new file mode 100755 index 000000000..3a73f1a73 --- /dev/null +++ b/keyserver/addons/rust-node-addon/install_ci_deps.sh @@ -0,0 +1,18 @@ +#!/usr/bin/env bash + +set -eo pipefail + +# We can skip this script if it's not part of a CI workflow +if [[ -z "$BUILDKITE" ]] && [[ -z "$CI" ]]; +then + echo "Not in a CI workflow, exiting" >&2 + exit +fi + +# Install protobuf if it's not already installed +if ! command -v protoc >/dev/null; +then + echo "Installing protobuf" + SCRIPT_DIR=$(cd "$(dirname "$0")"; pwd -P) + bash "${SCRIPT_DIR}/../../../scripts/install_protobuf.sh" +fi diff --git a/keyserver/addons/rust-node-addon/package.json b/keyserver/addons/rust-node-addon/package.json index 691a8940f..f77117ded 100644 --- a/keyserver/addons/rust-node-addon/package.json +++ b/keyserver/addons/rust-node-addon/package.json @@ -1,39 +1,40 @@ { "workspaces": { "nohoist": [ "@napi-rs/cli" ] }, "private": true, "name": "rust-node-addon", "version": "0.0.1", "main": "index.js", "type": "module", "napi": { "name": "rust-node-addon", "triples": { "defaults": false, "additional": [ "x86_64-apple-darwin", "aarch64-apple-darwin", "x86_64-unknown-linux-gnu", "aarch64-unknown-linux-gnu" ] } }, "license": "BSD-3-Clause", "devDependencies": { "@napi-rs/cli": "^2.13.0" }, "engines": { "node": ">= 16" }, "scripts": { "artifacts": "napi artifacts", - "build": "napi build --platform napi --release", + "build": "yarn install-ci-deps && napi build --platform napi --release", "build:debug": "napi build --platform napi", "version": "napi version", "postinstall": "yarn build", - "clean": "rm -rf target/ && rm -rf napi/ && rm -rf node_modules/" + "clean": "rm -rf target/ && rm -rf napi/ && rm -rf node_modules/", + "install-ci-deps": "bash ./install_ci_deps.sh" } } diff --git a/keyserver/addons/rust-node-addon/src/identity_client.rs b/keyserver/addons/rust-node-addon/src/identity_client.rs new file mode 100644 index 000000000..aeec87093 --- /dev/null +++ b/keyserver/addons/rust-node-addon/src/identity_client.rs @@ -0,0 +1,306 @@ +use lazy_static::lazy_static; +use napi::bindgen_prelude::*; +use opaque_ke::{ + ClientLogin, ClientLoginFinishParameters, ClientLoginStartParameters, + ClientLoginStartResult, ClientRegistration, + ClientRegistrationFinishParameters, CredentialFinalization, + CredentialResponse, RegistrationResponse, RegistrationUpload, +}; +use rand::{rngs::OsRng, CryptoRng, Rng}; +use tokio::sync::mpsc; +use tokio_stream::wrappers::ReceiverStream; +use tonic::Request; +use tracing::{error, instrument}; +mod identity { + tonic::include_proto!("identity"); +} +use comm_opaque::Cipher; +use identity::identity_service_client::IdentityServiceClient; +use identity::{ + pake_login_response::Data::AccessToken, + pake_login_response::Data::PakeCredentialResponse, + registration_request::Data::PakeCredentialFinalization as RegistrationPakeCredentialFinalization, + registration_request::Data::PakeRegistrationRequestAndUserId, + registration_request::Data::PakeRegistrationUploadAndCredentialRequest, + registration_response::Data::PakeLoginResponse as RegistrationPakeLoginResponse, + registration_response::Data::PakeRegistrationResponse, + PakeLoginResponse as PakeLoginResponseStruct, + PakeRegistrationRequestAndUserId as PakeRegistrationRequestAndUserIdStruct, + PakeRegistrationUploadAndCredentialRequest as PakeRegistrationUploadAndCredentialRequestStruct, + RegistrationRequest, RegistrationResponse as RegistrationResponseMessage, +}; +use std::env::var; + +lazy_static! { + static ref IDENTITY_SERVICE_SOCKET_ADDR: String = + var("COMM_IDENTITY_SERVICE_SOCKET_ADDR") + .unwrap_or("https://[::1]:50051".to_string()); +} + +#[napi] +#[instrument(skip_all)] +pub async fn register_user( + user_id: String, + device_id: String, + username: String, + password: String, + user_public_key: String, +) -> Result { + let mut identity_client = + IdentityServiceClient::connect(IDENTITY_SERVICE_SOCKET_ADDR.as_str()) + .await + .map_err(|_| Error::from_status(Status::GenericFailure))?; + + // Create a RegistrationRequest channel and use ReceiverStream to turn the + // MPSC receiver into a Stream for outbound messages + let (tx, rx) = mpsc::channel(1); + let stream = ReceiverStream::new(rx); + let request = Request::new(stream); + + // `response` is the Stream for inbound messages + let mut response = identity_client + .register_user(request) + .await + .map_err(|_| Error::from_status(Status::GenericFailure))? + .into_inner(); + + // Start PAKE registration on client and send initial registration request + // to Identity service + let mut client_rng = OsRng; + let (registration_request, client_registration) = pake_registration_start( + &mut client_rng, + user_id, + &password, + device_id, + username, + user_public_key, + )?; + send_to_mpsc(tx.clone(), registration_request).await?; + + // Handle responses from Identity service sequentially, making sure we get + // messages in the correct order + + // Finish PAKE registration and begin PAKE login; send the final + // registration request and initial login request together to reduce the + // number of trips + let message = response + .message() + .await + .map_err(|_| Error::from_status(Status::GenericFailure))?; + let client_login = handle_registration_response( + message, + &mut client_rng, + client_registration, + &password, + tx.clone(), + ) + .await?; + + // Finish PAKE login; send final login request to Identity service + let message = response + .message() + .await + .map_err(|_| Error::from_status(Status::GenericFailure))?; + handle_registration_credential_response(message, client_login, tx) + .await + .map_err(|_| Error::from_status(Status::GenericFailure))?; + + // Return access token + let message = response + .message() + .await + .map_err(|_| Error::from_status(Status::GenericFailure))?; + handle_registration_token_response(message) +} + +fn handle_unexpected_response(message: Option) -> Error { + error!("Received an unexpected message: {:?}", message); + Error::from_status(Status::GenericFailure) +} + +async fn send_to_mpsc(tx: mpsc::Sender, request: T) -> Result<()> { + if let Err(e) = tx.send(request).await { + error!("Response was dropped: {}", e); + return Err(Error::from_status(Status::GenericFailure)); + } + Ok(()) +} + +fn pake_login_start( + rng: &mut (impl Rng + CryptoRng), + password: &str, +) -> Result> { + ClientLogin::::start( + rng, + password.as_bytes(), + ClientLoginStartParameters::default(), + ) + .map_err(|e| { + error!("Failed to start PAKE login: {}", e); + Error::from_status(Status::GenericFailure) + }) +} + +fn pake_login_finish( + credential_response_bytes: &[u8], + client_login: ClientLogin, +) -> Result> { + client_login + .finish( + CredentialResponse::deserialize(credential_response_bytes).map_err( + |e| { + error!("Could not deserialize credential response bytes: {}", e); + Error::from_status(Status::GenericFailure) + }, + )?, + ClientLoginFinishParameters::default(), + ) + .map_err(|e| { + error!("Failed to finish PAKE login: {}", e); + Error::from_status(Status::GenericFailure) + }) + .map(|res| res.message) +} + +fn pake_registration_start( + rng: &mut (impl Rng + CryptoRng), + user_id: String, + password: &str, + device_id: String, + username: String, + user_public_key: String, +) -> Result<(RegistrationRequest, ClientRegistration)> { + let client_registration_start_result = + ClientRegistration::::start(rng, password.as_bytes()).map_err( + |e| { + error!("Failed to start PAKE registration: {}", e); + Error::from_status(Status::GenericFailure) + }, + )?; + let pake_registration_request = + client_registration_start_result.message.serialize(); + Ok(( + RegistrationRequest { + data: Some(PakeRegistrationRequestAndUserId( + PakeRegistrationRequestAndUserIdStruct { + user_id, + device_id, + pake_registration_request, + username, + user_public_key, + }, + )), + }, + client_registration_start_result.state, + )) +} + +async fn handle_registration_response( + message: Option, + client_rng: &mut (impl Rng + CryptoRng), + client_registration: ClientRegistration, + password: &str, + tx: mpsc::Sender, +) -> Result> { + if let Some(RegistrationResponseMessage { + data: Some(PakeRegistrationResponse(registration_response_bytes)), + .. + }) = message + { + let pake_registration_upload = pake_registration_finish( + client_rng, + ®istration_response_bytes, + client_registration, + )? + .serialize(); + let client_login_start_result = pake_login_start(client_rng, password)?; + + // `registration_request` is a gRPC message containing serialized bytes to + // complete PAKE registration and begin PAKE login + let registration_request = RegistrationRequest { + data: Some(PakeRegistrationUploadAndCredentialRequest( + PakeRegistrationUploadAndCredentialRequestStruct { + pake_registration_upload, + pake_credential_request: client_login_start_result + .message + .serialize() + .map_err(|e| { + error!("Could not serialize credential request: {}", e); + Error::from_status(Status::GenericFailure) + })?, + }, + )), + }; + send_to_mpsc(tx, registration_request).await?; + Ok(client_login_start_result.state) + } else { + Err(handle_unexpected_response(message)) + } +} + +async fn handle_registration_credential_response( + message: Option, + client_login: ClientLogin, + tx: mpsc::Sender, +) -> Result<()> { + if let Some(RegistrationResponseMessage { + data: + Some(RegistrationPakeLoginResponse(PakeLoginResponseStruct { + data: Some(PakeCredentialResponse(credential_response_bytes)), + })), + }) = message + { + let registration_request = RegistrationRequest { + data: Some(RegistrationPakeCredentialFinalization( + pake_login_finish(&credential_response_bytes, client_login)? + .serialize() + .map_err(|e| { + error!("Could not serialize credential request: {}", e); + Error::from_status(Status::GenericFailure) + })?, + )), + }; + send_to_mpsc(tx, registration_request).await + } else { + Err(handle_unexpected_response(message)) + } +} + +fn handle_registration_token_response( + message: Option, +) -> Result { + if let Some(RegistrationResponseMessage { + data: + Some(RegistrationPakeLoginResponse(PakeLoginResponseStruct { + data: Some(AccessToken(access_token)), + })), + }) = message + { + Ok(access_token) + } else { + Err(handle_unexpected_response(message)) + } +} + +fn pake_registration_finish( + rng: &mut (impl Rng + CryptoRng), + registration_response_bytes: &[u8], + client_registration: ClientRegistration, +) -> Result> { + client_registration + .finish( + rng, + RegistrationResponse::deserialize(registration_response_bytes).map_err( + |e| { + error!("Could not deserialize registration response bytes: {}", e); + Error::from_status(Status::GenericFailure) + }, + )?, + ClientRegistrationFinishParameters::default(), + ) + .map_err(|e| { + error!("Failed to finish PAKE registration: {}", e); + Error::from_status(Status::GenericFailure) + }) + .map(|res| res.message) +} diff --git a/keyserver/addons/rust-node-addon/src/lib.rs b/keyserver/addons/rust-node-addon/src/lib.rs index b479ed71e..872a3a424 100644 --- a/keyserver/addons/rust-node-addon/src/lib.rs +++ b/keyserver/addons/rust-node-addon/src/lib.rs @@ -1,9 +1,4 @@ -#![deny(clippy::all)] +pub mod identity_client; #[macro_use] extern crate napi_derive; - -#[napi] -pub fn sum(a: i32, b: i32) -> i32 { - a + b -}