Skip to content

Commit

Permalink
Merge pull request #400 from NEU-DSG/create-post-confirmation-lambda-…
Browse files Browse the repository at this point in the history
…event-handler

Create-post-confirmation-lambda-event-handler
  • Loading branch information
GracefulLemming committed Feb 9, 2024
2 parents fcf4d58 + 533f394 commit f2d424b
Show file tree
Hide file tree
Showing 20 changed files with 2,492 additions and 930 deletions.
2,952 changes: 2,132 additions & 820 deletions Cargo.lock

Large diffs are not rendered by default.

2 changes: 1 addition & 1 deletion Cargo.toml
Original file line number Diff line number Diff line change
@@ -1,2 +1,2 @@
[workspace]
members = ["types", "graphql", "migration"]
members = ["types", "graphql", "migration", "admin-event-handlers"]
1 change: 1 addition & 0 deletions admin-event-handlers/.gitignore
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
/target
35 changes: 35 additions & 0 deletions admin-event-handlers/Cargo.toml
Original file line number Diff line number Diff line change
@@ -0,0 +1,35 @@
[package]
name = "admin-event-handlers"
version = "0.1.0"
authors = ["Naomi Trevino <n.trevino@northeastern.edu>"]
edition = "2021"

# Function Binary definitions
# each lambda function should have its own unique binary
[[bin]]
name = "auth-post-confirmation"
path = "src/auth/post-confirmation/main.rs"

[dependencies]
aws_lambda_events = { version = "0.12.0", default-features = false, features = ["cognito"] }

lambda_runtime = "0.9.1"
tokio = { version = "1", features = ["macros"] }
tracing = { version = "0.1", features = ["log"] }
tracing-subscriber = { version = "0.3", default-features = false, features = ["env-filter", "fmt"] }
anyhow = "1.0"
itertools = "0.10"
futures = "0.3"
dotenv = "0.15"
regex = "1.5"
rayon = "1.4"
lazy_static = "1.4"
base64 = "0.13"
log = "0.4"
pretty_env_logger = "0.4"
serde = {version = "^1.0", features = ["derive"]}
serde_json = "^1.0"
reqwest = { version = "0.11", features = ["json", "rustls-tls"], default-features = false }
aws-config = "^1"
aws-sdk-cognitoidentityprovider = "^1"
dailp = {path = "../types"}
Original file line number Diff line number Diff line change
@@ -0,0 +1,36 @@
use aws_config::SdkConfig;
use aws_sdk_cognitoidentityprovider::Client;
use dailp::auth::UserGroup;

/// A client for conducting Cognito operations.
pub struct CognitoClient {
client: Client,
pool_id: String,
}

impl CognitoClient {
/// Create a new Cognito IDP Client with the provided configuration.
pub async fn new(config: &SdkConfig, pool_id: String) -> Result<Self, anyhow::Error> {
Ok(Self {
client: Client::new(config),
pool_id,
})
}
/// Attempts to add a user to a group.
/// Fails if AdminAddUserToGroup fails.
pub async fn add_user_to_group(
self,
email: String,
group: UserGroup,
) -> Result<(), anyhow::Error> {
self.client
.admin_add_user_to_group()
.user_pool_id(self.pool_id)
.username(email)
.group_name(group.to_string())
.send()
.await
.map_err(|e| anyhow::Error::new(e))
.map(|_x| ())
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,44 @@
use anyhow::Result;
use dailp::{auth::UserGroup, SheetResult};
use serde::{Deserialize, Serialize};
use std::str::FromStr;

/// Represents one user's predetermined role.
#[derive(Debug, Serialize, Deserialize, Clone)]
pub struct UserPermission {
/// The user's role
pub role: UserGroup,
/// The user's group
pub email: String,
}

pub struct SheetInterpretation {
pub sheet: SheetResult,
}

impl SheetInterpretation {
/// Reads each line of the spreadsheet and encodes any roles defined in it.
pub fn into_permission_list(self) -> Result<Vec<UserPermission>> {
let mut sections: Vec<UserPermission> = Vec::new();
// First row is headers "Full Name" "Alt name" "DOB" "Role" "email"
for row in self.sheet.values.into_iter().skip(1) {
if row.len() > 4 && !row[3].is_empty() && !row[4].is_empty() {
let role = UserGroup::from_str(&format!("{}s", uppercase_first_letter(&row[3])))?;
sections.push(UserPermission {
role,
email: row[4].clone(),
})
}
}
Ok(sections)
}
}

/// Capitalizes the first letter of a string.
fn uppercase_first_letter(s: &str) -> String {
let mut c = s.chars();
match c.next() {
None => String::new(),
Some(f) => f.to_uppercase().collect::<String>() + c.as_str(),
}
}
89 changes: 89 additions & 0 deletions admin-event-handlers/src/auth/post-confirmation/main.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,89 @@
//! A lambda event handler that adds a user to a group on confirmation if their email appears in a predefined list.

mod cognito_idp_operations;
mod google_sheets_operations;

use aws_config::{meta::region::RegionProviderChain, BehaviorVersion, Region};
use aws_lambda_events::cognito::{
CognitoEventUserPoolsPostConfirmationRequest as CognitoPostConfirmationRequest,
CognitoEventUserPoolsPostConfirmationResponse as CognitoPostConfirmationResponse,
};
use cognito_idp_operations::CognitoClient;
use google_sheets_operations::SheetInterpretation;
use itertools::Itertools;
use lambda_runtime::{service_fn, Error, LambdaEvent};

/// This is the main body for the lambda function.
/// First gets the email user attribute of the user who caused this function's invocation.
/// Then, retrieves emails and roles about predetermined Editors and Contributors.
/// Finally, adds the user who caused this function's invocation to the appropriate user pool group.
/// If the user is not predetermined to be an Editor or Contributor, the final step is skipped.
///
/// # Errors:
/// This function errors under any of the following conditions:
/// 1. User attributes either did not exist or did not come with the request.
/// 2. User causing this invocation does not have an attribute named "email" or the attribute exists but has no value.
/// 3. Google Sheets API did not return any data.
/// 4. Multiple users on the permissions list use the same email.
/// 5. This program is unable to access environment variables.
/// 6. AddUserToGroup fails.
async fn function_handler(
event: LambdaEvent<CognitoPostConfirmationRequest>,
) -> Result<CognitoPostConfirmationResponse, Error> {
let user_attributes = event.payload.user_attributes;
if user_attributes.is_empty() {
return Err("No email attribute found in event body.".into());
}
let user_email_or_none = user_attributes.get("email");
if user_email_or_none.is_none() {
return Err("Email attribute does not exist or is empty.".into());
}
let user_email = user_email_or_none.unwrap().clone();
let user_permission_or_none = SheetInterpretation {
sheet: dailp::SheetResult::from_sheet("1ATTekY411Jz63k6VMDn3ISFu8_f75LYFErCGY-pxVkQ", None)
.await?,
}
.into_permission_list()?
.into_iter()
.filter(move |a| a.email == user_email)
.at_most_one()?;
if user_permission_or_none.is_none() {
// We don't want to error each time a user invoking this function is not in the list.
// Instead, we log that the user is not in the list, then exit successfully.
println!("User does not have preset permissions.");
return Ok(CognitoPostConfirmationResponse {});
}
let user_permission = user_permission_or_none.unwrap();

let region = std::env::var("DAILP_AWS_REGION")?;
let region_provider = RegionProviderChain::first_try(Region::new(region))
.or_default_provider()
.or_else(Region::new("us-east-1"));

let config = aws_config::defaults(BehaviorVersion::latest())
.region(region_provider)
.load()
.await;
let pool_id_or_err = std::env::var("DAILP_USER_POOL");
if pool_id_or_err.is_err() {
return Err("Unable to access environment variable DAILP_USER_POOL.".into());
}
let cognito_action = CognitoClient::new(&config, pool_id_or_err?)
.await?
.add_user_to_group(user_permission.email, user_permission.role)
.await;

if cognito_action.is_err() {
return Err("Failed to add user to group".into());
}

Ok(CognitoPostConfirmationResponse {})
}

#[tokio::main]
async fn main() -> Result<(), Error> {
dotenv::dotenv().ok();
pretty_env_logger::init();

lambda_runtime::run(service_fn(function_handler)).await
}
1 change: 1 addition & 0 deletions flake.nix
Original file line number Diff line number Diff line change
Expand Up @@ -40,6 +40,7 @@
(filter.inDirectory "types")
(filter.inDirectory "graphql")
(filter.inDirectory "migration")
(filter.inDirectory "admin-event-handlers")
./Cargo.toml
./Cargo.lock
./rust-toolchain.toml
Expand Down
3 changes: 1 addition & 2 deletions migration/src/connections.rs
Original file line number Diff line number Diff line change
@@ -1,5 +1,4 @@
use crate::spreadsheets::SheetResult;
use dailp::{Database, LexicalConnection};
use dailp::{Database, LexicalConnection, SheetResult};

pub async fn migrate_connections(db: &Database) -> anyhow::Result<()> {
use itertools::Itertools as _;
Expand Down
3 changes: 1 addition & 2 deletions migration/src/contributors.rs
Original file line number Diff line number Diff line change
@@ -1,6 +1,5 @@
use crate::spreadsheets::SheetResult;
use anyhow::Result;
use dailp::{ContributorDetails, Database};
use dailp::{ContributorDetails, Database, SheetResult};

pub async fn migrate_all(db: &Database) -> Result<()> {
let sheet =
Expand Down
3 changes: 1 addition & 2 deletions migration/src/early_vocab.rs
Original file line number Diff line number Diff line change
Expand Up @@ -74,8 +74,7 @@ async fn parse_early_vocab(
has_segmentation: bool,
num_links: usize,
) -> Result<Vec<dailp::LexicalConnection>> {
use crate::spreadsheets::SheetResult;
use dailp::{Date, DocumentMetadata, WordSegment};
use dailp::{Date, DocumentMetadata, SheetResult, WordSegment};

let sheet = SheetResult::from_sheet(sheet_id, None).await?;
let meta = SheetResult::from_sheet(sheet_id, Some(crate::METADATA_SHEET_NAME)).await?;
Expand Down
19 changes: 11 additions & 8 deletions migration/src/edited_collection.rs
Original file line number Diff line number Diff line change
@@ -1,17 +1,20 @@
use crate::spreadsheets::SheetResult;
use crate::spreadsheets::SheetInterpretation;
use anyhow::Result;
use dailp::raw::CollectionChapter;
use dailp::raw::EditedCollection;
use dailp::Database;
use dailp::SheetResult;

pub async fn migrate_edited_collection(db: &Database) -> anyhow::Result<()> {
let res = SheetResult::from_sheet("12R07Ks8A5g2jffqoILJw7nRn6GehwNwdwosxaJBwNNM", None)
.await?
.into_collection_index(
&"Cherokees Writing the Keetoowah Way".to_string(),
&4579,
&"cwkw".to_string(),
)?;
let res = SheetInterpretation {
sheet: SheetResult::from_sheet("12R07Ks8A5g2jffqoILJw7nRn6GehwNwdwosxaJBwNNM", None)
.await?,
}
.into_collection_index(
&"Cherokees Writing the Keetoowah Way".to_string(),
&4579,
&"cwkw".to_string(),
)?;
let collection = res;

// Insert collection into database
Expand Down
52 changes: 32 additions & 20 deletions migration/src/lexical.rs
Original file line number Diff line number Diff line change
@@ -1,8 +1,8 @@
use crate::spreadsheets::{LexicalEntryWithForms, SheetResult};
use crate::spreadsheets::{LexicalEntryWithForms, SheetInterpretation};
use anyhow::Result;
use dailp::{
convert_udb, seg_verb_surface_forms, AnnotatedForm, Contributor, Database, Date, DocumentId,
DocumentMetadata, LexicalConnection, MorphemeId, PositionInDocument, WordSegment,
DocumentMetadata, LexicalConnection, MorphemeId, PositionInDocument, SheetResult, WordSegment,
};
use itertools::Itertools;

Expand Down Expand Up @@ -55,24 +55,36 @@ pub async fn migrate_dictionaries(db: &Database) -> Result<()> {
3,
3,
);
let root_nouns = SheetResult::from_sheet("1XuQIKzhGf_mGCH4-bHNBAaQqTAJDNtPbNHjQDhszVRo", None)
.await?
.into_nouns(df1975_id, 1975, 1, false)?;
let irreg_nouns = SheetResult::from_sheet("1urfgtarnSypCgb5lSOhQGhhDcg1ozQ1r4jtCJ8Bu-vw", None)
.await?
.into_nouns(df1975_id, 1975, 1, false)?;
let ptcp_nouns = SheetResult::from_sheet("1JRmOx5_LlnoLQhzhyb3NmA4FAfMM2XRoT9ntyWtPEnk", None)
.await?
.into_nouns(df1975_id, 1975, 0, false)?;
let inf_nouns = SheetResult::from_sheet("1feuNOuzm0-TpotKyjebKwuXV4MYv-jnU5zLamczqu5U", None)
.await?
.into_nouns(df1975_id, 1975, 0, true)?;
let body_parts = SheetResult::from_sheet("1xdnJuTsLBwxbCz9ffJmQNeX-xNYSmntoiRTu9Uwgu5I", None)
.await?
.into_nouns(df1975_id, 1975, 1, false)?;
let root_adjs = SheetResult::from_sheet("1R5EhHRq-hlMcYKLzwY2bLAvC-LEeVklHJEHgL6dt5L4", None)
.await?
.into_adjs(df1975_id, 1975)?;
let root_nouns = SheetInterpretation {
sheet: SheetResult::from_sheet("1XuQIKzhGf_mGCH4-bHNBAaQqTAJDNtPbNHjQDhszVRo", None)
.await?,
}
.into_nouns(df1975_id, 1975, 1, false)?;
let irreg_nouns = SheetInterpretation {
sheet: SheetResult::from_sheet("1urfgtarnSypCgb5lSOhQGhhDcg1ozQ1r4jtCJ8Bu-vw", None)
.await?,
}
.into_nouns(df1975_id, 1975, 1, false)?;
let ptcp_nouns = SheetInterpretation {
sheet: SheetResult::from_sheet("1JRmOx5_LlnoLQhzhyb3NmA4FAfMM2XRoT9ntyWtPEnk", None)
.await?,
}
.into_nouns(df1975_id, 1975, 0, false)?;
let inf_nouns = SheetInterpretation {
sheet: SheetResult::from_sheet("1feuNOuzm0-TpotKyjebKwuXV4MYv-jnU5zLamczqu5U", None)
.await?,
}
.into_nouns(df1975_id, 1975, 0, true)?;
let body_parts = SheetInterpretation {
sheet: SheetResult::from_sheet("1xdnJuTsLBwxbCz9ffJmQNeX-xNYSmntoiRTu9Uwgu5I", None)
.await?,
}
.into_nouns(df1975_id, 1975, 1, false)?;
let root_adjs = SheetInterpretation {
sheet: SheetResult::from_sheet("1R5EhHRq-hlMcYKLzwY2bLAvC-LEeVklHJEHgL6dt5L4", None)
.await?,
}
.into_adjs(df1975_id, 1975)?;
let df2003 = parse_new_df1975(
SheetResult::from_sheet("18cKXgsfmVhRZ2ud8Cd7YDSHexs1ODHo6fkTPrmnwI1g", None).await?,
df2003_id,
Expand Down
Loading

0 comments on commit f2d424b

Please sign in to comment.