Skip to content

Commit

Permalink
Rewrite testing framework to use recordings only
Browse files Browse the repository at this point in the history
  • Loading branch information
ehuss committed Feb 5, 2023
1 parent 41be270 commit 1617f0d
Show file tree
Hide file tree
Showing 98 changed files with 26,454 additions and 6,193 deletions.
1 change: 1 addition & 0 deletions Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

1 change: 1 addition & 0 deletions Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -44,6 +44,7 @@ rand = "0.8.5"
ignore = "0.4.18"
postgres-types = { version = "0.2.4", features = ["derive"] }
cron = { version = "0.12.0" }
bytes = "1.1.0"

[dependencies.serde]
version = "1"
Expand Down
100 changes: 46 additions & 54 deletions src/github.rs
Original file line number Diff line number Diff line change
@@ -1,5 +1,7 @@
use crate::test_record;
use anyhow::Context;
use async_trait::async_trait;
use bytes::Bytes;
use chrono::{DateTime, FixedOffset, Utc};
use futures::{future::BoxFuture, FutureExt};
use hyper::header::HeaderValue;
Expand All @@ -21,22 +23,32 @@ pub struct User {
}

impl GithubClient {
async fn _send_req(&self, req: RequestBuilder) -> anyhow::Result<(Response, String)> {
async fn send_req(&self, req: RequestBuilder) -> anyhow::Result<(Bytes, String)> {
const MAX_ATTEMPTS: usize = 2;
log::debug!("_send_req with {:?}", req);
log::debug!("send_req with {:?}", req);
let req_dbg = format!("{:?}", req);
let req = req
.build()
.with_context(|| format!("building reqwest {}", req_dbg))?;

let test_capture_info = test_record::capture_request(&req);

let mut resp = self.client.execute(req.try_clone().unwrap()).await?;
if let Some(sleep) = Self::needs_retry(&resp).await {
resp = self.retry(req, sleep, MAX_ATTEMPTS).await?;
}
let status = resp.status();
let maybe_err = resp.error_for_status_ref().err();
let body = resp
.bytes()
.await
.with_context(|| format!("failed to read response body {req_dbg}"))?;
test_record::record_request(test_capture_info, status, &body);
if let Some(e) = maybe_err {
return Err(e.into());
}

resp.error_for_status_ref()?;

Ok((resp, req_dbg))
Ok((body, req_dbg))
}

async fn needs_retry(resp: &Response) -> Option<Duration> {
Expand Down Expand Up @@ -116,6 +128,7 @@ impl GithubClient {
.unwrap(),
)
.await?;
rate_resp.error_for_status_ref()?;
let rate_limit_response = rate_resp.json::<RateLimitResponse>().await?;

// Check url for search path because github has different rate limits for the search api
Expand Down Expand Up @@ -151,27 +164,12 @@ impl GithubClient {
.boxed()
}

async fn send_req(&self, req: RequestBuilder) -> anyhow::Result<Vec<u8>> {
let (mut resp, req_dbg) = self._send_req(req).await?;

let mut body = Vec::new();
while let Some(chunk) = resp.chunk().await.transpose() {
let chunk = chunk
.context("reading stream failed")
.map_err(anyhow::Error::from)
.context(req_dbg.clone())?;
body.extend_from_slice(&chunk);
}

Ok(body)
}

pub async fn json<T>(&self, req: RequestBuilder) -> anyhow::Result<T>
where
T: serde::de::DeserializeOwned,
{
let (resp, req_dbg) = self._send_req(req).await?;
Ok(resp.json().await.context(req_dbg)?)
let (body, _req_dbg) = self.send_req(req).await?;
Ok(serde_json::from_slice(&body)?)
}
}

Expand Down Expand Up @@ -414,8 +412,8 @@ impl IssueRepository {
async fn has_label(&self, client: &GithubClient, label: &str) -> anyhow::Result<bool> {
#[allow(clippy::redundant_pattern_matching)]
let url = format!("{}/labels/{}", self.url(client), label);
match client._send_req(client.get(&url)).await {
Ok((_, _)) => Ok(true),
match client.send_req(client.get(&url)).await {
Ok(_) => Ok(true),
Err(e) => {
if e.downcast_ref::<reqwest::Error>()
.map_or(false, |e| e.status() == Some(StatusCode::NOT_FOUND))
Expand Down Expand Up @@ -495,7 +493,7 @@ impl Issue {
body: &'a str,
}
client
._send_req(client.patch(&edit_url).json(&ChangedIssue { body }))
.send_req(client.patch(&edit_url).json(&ChangedIssue { body }))
.await
.context("failed to edit issue body")?;
Ok(())
Expand All @@ -513,7 +511,7 @@ impl Issue {
body: &'a str,
}
client
._send_req(
.send_req(
client
.patch(&comment_url)
.json(&NewComment { body: new_body }),
Expand All @@ -534,7 +532,7 @@ impl Issue {
.expect("expected api host");
let comments_url = format!("{}{comments_path}", client.api_url);
client
._send_req(client.post(&comments_url).json(&PostComment { body }))
.send_req(client.post(&comments_url).json(&PostComment { body }))
.await
.context("failed to post comment")?;
Ok(())
Expand All @@ -560,7 +558,7 @@ impl Issue {
}

client
._send_req(client.delete(&url))
.send_req(client.delete(&url))
.await
.context("failed to delete label")?;

Expand Down Expand Up @@ -617,7 +615,7 @@ impl Issue {
}

client
._send_req(client.post(&url).json(&LabelsReq {
.send_req(client.post(&url).json(&LabelsReq {
labels: known_labels,
}))
.await
Expand Down Expand Up @@ -668,7 +666,7 @@ impl Issue {
assignees: &'a [&'a str],
}
client
._send_req(client.delete(&url).json(&AssigneeReq {
.send_req(client.delete(&url).json(&AssigneeReq {
assignees: &assignees[..],
}))
.await
Expand Down Expand Up @@ -761,7 +759,7 @@ impl Issue {
}
let url = format!("{}/issues/{}", self.repository().url(client), self.number);
client
._send_req(client.patch(&url).json(&SetMilestone {
.send_req(client.patch(&url).json(&SetMilestone {
milestone: milestone_no,
}))
.await
Expand All @@ -776,7 +774,7 @@ impl Issue {
state: &'a str,
}
client
._send_req(
.send_req(
client
.patch(&edit_url)
.json(&CloseIssue { state: "closed" }),
Expand All @@ -801,8 +799,9 @@ impl Issue {
after
));
req = req.header("Accept", "application/vnd.github.v3.diff");
let diff = client.send_req(req).await?;
Ok(Some(String::from(String::from_utf8_lossy(&diff))))
let (diff, _) = client.send_req(req).await?;
let body = String::from_utf8_lossy(&diff).to_string();
Ok(Some(body))
}

/// Returns the commits from this pull request (no commits are returned if this `Issue` is not
Expand Down Expand Up @@ -1246,7 +1245,7 @@ impl Repository {
) -> anyhow::Result<()> {
let url = format!("{}/git/refs/{}", self.url(client), refname);
client
._send_req(client.patch(&url).json(&serde_json::json!({
.send_req(client.patch(&url).json(&serde_json::json!({
"sha": sha,
"force": true,
})))
Expand Down Expand Up @@ -1505,7 +1504,7 @@ impl Repository {
pub async fn merge_upstream(&self, client: &GithubClient, branch: &str) -> anyhow::Result<()> {
let url = format!("{}/merge-upstream", self.url(client));
client
._send_req(client.post(&url).json(&serde_json::json!({
.send_req(client.post(&url).json(&serde_json::json!({
"branch": branch,
})))
.await
Expand Down Expand Up @@ -1811,27 +1810,23 @@ impl GithubClient {
repo: &str,
branch: &str,
path: &str,
) -> anyhow::Result<Option<Vec<u8>>> {
) -> anyhow::Result<Option<Bytes>> {
let url = format!("{}/{repo}/{branch}/{path}", self.raw_url);
let req = self.get(&url);
let req_dbg = format!("{:?}", req);
let req = req
.build()
.with_context(|| format!("failed to build request {:?}", req_dbg))?;
let mut resp = self.client.execute(req).await.context(req_dbg.clone())?;
let test_capture_info = test_record::capture_request(&req);
let resp = self.client.execute(req).await.context(req_dbg.clone())?;
let status = resp.status();
let body = resp
.bytes()
.await
.with_context(|| format!("failed to read response body {req_dbg}"))?;
test_record::record_request(test_capture_info, status, &body);
match status {
StatusCode::OK => {
let mut buf = Vec::with_capacity(resp.content_length().unwrap_or(4) as usize);
while let Some(chunk) = resp.chunk().await.transpose() {
let chunk = chunk
.context("reading stream failed")
.map_err(anyhow::Error::from)
.context(req_dbg.clone())?;
buf.extend_from_slice(&chunk);
}
Ok(Some(buf))
}
StatusCode::OK => Ok(Some(body)),
StatusCode::NOT_FOUND => Ok(None),
status => anyhow::bail!("failed to GET {}: {}", url, status),
}
Expand Down Expand Up @@ -2036,11 +2031,8 @@ impl IssuesQuery for LeastRecentlyReviewedPullRequests {
let req = client.post(&format!("{}/graphql", client.api_url));
let req = req.json(&query);

let (resp, req_dbg) = client._send_req(req).await?;
let data = resp
.json::<cynic::GraphQlResponse<queries::LeastRecentlyReviewedPullRequests>>()
.await
.context(req_dbg)?;
let data: cynic::GraphQlResponse<queries::LeastRecentlyReviewedPullRequests> =
client.json(req).await?;
if let Some(errors) = data.errors {
anyhow::bail!("There were graphql errors. {:?}", errors);
}
Expand Down
2 changes: 1 addition & 1 deletion src/handlers/github_releases.rs
Original file line number Diff line number Diff line change
Expand Up @@ -132,7 +132,7 @@ async fn load_changelog(
.await?
.ok_or_else(|| anyhow::Error::msg("missing file"))?;

Ok(String::from_utf8(resp)?)
Ok(String::from_utf8(resp.to_vec())?)
}

async fn load_paginated<T, R, F>(ctx: &Context, url: &str, key: F) -> anyhow::Result<HashMap<R, T>>
Expand Down
1 change: 1 addition & 0 deletions src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -26,6 +26,7 @@ pub mod payload;
pub mod rfcbot;
pub mod team;
mod team_data;
pub mod test_record;
pub mod triage;
pub mod zulip;

Expand Down
68 changes: 2 additions & 66 deletions src/main.rs
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,6 @@ use futures::future::FutureExt;
use futures::StreamExt;
use hyper::{header, Body, Request, Response, Server, StatusCode};
use route_recognizer::Router;
use std::path::PathBuf;
use std::{env, net::SocketAddr, sync::Arc};
use tokio::{task, time};
use tower::{Service, ServiceExt};
Expand Down Expand Up @@ -219,7 +218,7 @@ async fn serve_req(
.unwrap());
}
};
maybe_record_test(&event, &payload);
triagebot::test_record::record_event(&event, &payload);

match triagebot::webhook(event, payload, &ctx).await {
Ok(true) => Ok(Response::new(Body::from("processed request"))),
Expand All @@ -234,70 +233,6 @@ async fn serve_req(
}
}

/// Webhook recording to help with writing server_test integration tests.
fn maybe_record_test(event: &EventName, payload: &str) {
if std::env::var_os("TRIAGEBOT_TEST_RECORD").is_none() {
return;
}
let sequence_path = |name| {
let path = PathBuf::from(format!("{name}.json"));
if !path.exists() {
return path;
}
let mut index = 1;
loop {
let path = PathBuf::from(format!("{name}.{index}.json"));
if !path.exists() {
return path;
}
index += 1;
}
};
let payload_json: serde_json::Value = serde_json::from_str(payload).expect("valid json");
let name = match event {
EventName::PullRequest => {
let action = payload_json["action"].as_str().unwrap();
let number = payload_json["number"].as_u64().unwrap();
format!("pr{number}_{action}")
}
EventName::PullRequestReview => {
let action = payload_json["action"].as_str().unwrap();
let number = payload_json["pull_request"]["number"].as_u64().unwrap();
format!("pr{number}_review_{action}")
}
EventName::PullRequestReviewComment => {
let action = payload_json["action"].as_str().unwrap();
let number = payload_json["pull_request"]["number"].as_u64().unwrap();
format!("pr{number}_review_comment_{action}")
}
EventName::IssueComment => {
let action = payload_json["action"].as_str().unwrap();
let number = payload_json["issue"]["number"].as_u64().unwrap();
format!("issue{number}_comment_{action}")
}
EventName::Issue => {
let action = payload_json["action"].as_str().unwrap();
let number = payload_json["issue"]["number"].as_u64().unwrap();
format!("issue{number}_{action}")
}
EventName::Push => {
let after = payload_json["after"].as_str().unwrap();
format!("push_{after}")
}
EventName::Create => {
let ref_type = payload_json["ref_type"].as_str().unwrap();
let git_ref = payload_json["ref"].as_str().unwrap();
format!("create_{ref_type}_{git_ref}")
}
EventName::Other => {
return;
}
};
let path = sequence_path(name);
std::fs::write(&path, payload).unwrap();
log::info!("recorded JSON payload to {:?}", path);
}

async fn run_server(addr: SocketAddr) -> anyhow::Result<()> {
let pool = db::ClientPool::new();
db::run_migrations(&*pool.get().await)
Expand Down Expand Up @@ -426,6 +361,7 @@ async fn main() {
.with_ansi(std::env::var_os("DISABLE_COLOR").is_none())
.try_init()
.unwrap();
triagebot::test_record::init().unwrap();

let port = env::var("PORT")
.ok()
Expand Down
Loading

0 comments on commit 1617f0d

Please sign in to comment.