Skip to content

Commit

Permalink
editoast: replace serde_json::Value by appropriate structs
Browse files Browse the repository at this point in the history
  • Loading branch information
hamz2a committed Dec 28, 2023
1 parent 6c12f67 commit d101589
Show file tree
Hide file tree
Showing 15 changed files with 278 additions and 76 deletions.
2 changes: 2 additions & 0 deletions editoast/openapi.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -4758,6 +4758,8 @@ paths:
example: track_sections
type: string
promoteId:
additionalProperties:
type: string
type: object
scheme:
example: xyz
Expand Down
6 changes: 3 additions & 3 deletions editoast/src/models/rolling_stock/light_rolling_stock.rs
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@ use diesel_async::RunQueryDsl;
use diesel_json::Json as DieselJson;
use editoast_derive::Model;
use serde::Serialize;
use serde_json::Value as JsonValue;
use std::collections::HashMap;
use utoipa::ToSchema;

#[derive(Debug, Model, Queryable, QueryableByName, Serialize, ToSchema)]
Expand Down Expand Up @@ -43,8 +43,8 @@ pub struct LightRollingStockModel {
rolling_resistance: DieselJson<RollingResistance>,
#[schema(value_type = LoadingGaugeType)]
loading_gauge: String,
#[schema(value_type = HashMap<String, String>)]
power_restrictions: Option<JsonValue>,
#[schema(value_type = Option<HashMap<String, String>>)]
power_restrictions: Option<DieselJson<HashMap<String, String>>>,
#[schema(value_type = Vec<EnergySource>)]
energy_sources: DieselJson<Vec<EnergySource>>,
locked: bool,
Expand Down
6 changes: 3 additions & 3 deletions editoast/src/models/rolling_stock/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,7 @@ use diesel_async::{AsyncPgConnection as PgConnection, RunQueryDsl};
use diesel_json::Json as DieselJson;
use editoast_derive::Model;
use serde::{Deserialize, Serialize};
use serde_json::Value as JsonValue;
use std::collections::HashMap;
use validator::{Validate, ValidationError};

crate::schemas! {
Expand Down Expand Up @@ -109,9 +109,9 @@ pub struct RollingStockModel {
#[diesel(deserialize_as = String)]
#[schema(value_type = LoadingGaugeType)]
pub loading_gauge: Option<String>,
#[diesel(deserialize_as = Option<JsonValue>)]
#[diesel(deserialize_as = Option<DieselJson<HashMap<String, String>>>)]
#[schema(value_type = Option<HashMap<String, String>>)]
pub power_restrictions: Option<Option<JsonValue>>,
pub power_restrictions: Option<Option<DieselJson<HashMap<String, String>>>>,
#[diesel(deserialize_as = DieselJson<Vec<EnergySource>>)]
#[schema(value_type = EnergySource)]
pub energy_sources: Option<DieselJson<Vec<EnergySource>>>,
Expand Down
2 changes: 1 addition & 1 deletion editoast/src/schema/geo_json.rs
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@ use mvt::GeomType;
use serde::{Deserialize, Serialize};

/// GeoJson representation
#[derive(Clone, Serialize, Deserialize, Debug)]
#[derive(Clone, Serialize, Deserialize, Debug, PartialEq)]
#[serde(tag = "type")]
pub enum GeoJson {
Point { coordinates: (f64, f64) },
Expand Down
3 changes: 1 addition & 2 deletions editoast/src/schema/rolling_stock/light_rolling_stock.rs
Original file line number Diff line number Diff line change
@@ -1,7 +1,6 @@
use diesel::sql_types::{Array, BigInt, Bool, Double, Jsonb, Nullable, Text};
use diesel_json::Json as DieselJson;
use serde::{Deserialize, Serialize};
use serde_json::Value as JsonValue;
use std::collections::HashMap;
use utoipa::ToSchema;

Expand Down Expand Up @@ -61,7 +60,7 @@ pub struct LightRollingStock {
pub metadata: DieselJson<RollingStockMetadata>,
#[diesel(sql_type = Nullable<Jsonb>)]
#[schema(value_type = HashMap<String, String>)]
pub power_restrictions: Option<JsonValue>,
pub power_restrictions: Option<DieselJson<HashMap<String, String>>>,
#[diesel(sql_type = Jsonb)]
#[schema(value_type = Vec<EnergySource>)]
pub energy_sources: DieselJson<Vec<EnergySource>>,
Expand Down
4 changes: 2 additions & 2 deletions editoast/src/schema/rolling_stock/mod.rs
Original file line number Diff line number Diff line change
@@ -1,8 +1,8 @@
pub mod light_rolling_stock;
pub mod rolling_stock_livery;

use diesel_json::Json as DieselJson;
use serde::{Deserialize, Deserializer, Serialize};
use serde_json::Value as JsonValue;
use std::collections::HashMap;
use strum_macros::{Display, EnumString};
use utoipa::ToSchema;
Expand Down Expand Up @@ -53,7 +53,7 @@ pub struct RollingStockCommon {
pub loading_gauge: String,
/// Mapping of power restriction code to power class
#[schema(value_type = HashMap<String, String>)]
pub power_restrictions: Option<JsonValue>,
pub power_restrictions: Option<DieselJson<HashMap<String, String>>>,
#[serde(default)]
pub energy_sources: Vec<EnergySource>,
/// The time the train takes before actually using electrical power (in seconds). Is null if the train is not electric.
Expand Down
2 changes: 1 addition & 1 deletion editoast/src/tests/example_rolling_stock_1.json
Original file line number Diff line number Diff line change
Expand Up @@ -981,7 +981,7 @@
"unit": ""
},
"energy_sources": [],
"power_restrictions": {"C2":"2"},
"power_restrictions": {"C2":"2", "C5":"5"},
"electrical_power_startup_time": 5.0,
"raise_pantograph_time": 15.0
}
10 changes: 5 additions & 5 deletions editoast/src/views/infra/auto_fixes.rs
Original file line number Diff line number Diff line change
Expand Up @@ -284,11 +284,11 @@ mod test {
// Check the only initial issues are "overlapping_speed_sections" warnings
let infra_errors_before_all: PaginatedResponse<crate::views::infra::errors::InfraError> =
read_body_json(call_service(&app, errors_request(small_infra_id)).await).await;
assert!(infra_errors_before_all.results.iter().all(|e| e
.information
.get("error_type")
.unwrap()
== "overlapping_speed_sections"));

assert!(infra_errors_before_all.results.iter().all(|e| matches!(
e.information.get_sub_type(),
InfraErrorType::OverlappingSpeedSections { .. }
)));

// Remove a track
let deletion = Operation::Delete(DeleteOperation {
Expand Down
13 changes: 6 additions & 7 deletions editoast/src/views/infra/errors.rs
Original file line number Diff line number Diff line change
@@ -1,15 +1,16 @@
use crate::error::Result;
use crate::schema::InfraError as SchemaInfraError;
use crate::schema::InfraErrorType;
use crate::views::pagination::{Paginate, PaginatedResponse, PaginationQueryParam};
use crate::DbPool;
use actix_web::dev::HttpServiceFactory;
use actix_web::get;
use actix_web::web::{Data, Json as WebJson, Path, Query};
use diesel::sql_query;
use diesel::sql_types::{BigInt, Json, Text};
use diesel::sql_types::{BigInt, Jsonb, Text};
use diesel_json::Json as DieselJson;
use editoast_derive::EditoastError;
use serde::{Deserialize, Serialize};
use serde_json::Value as JsonValue;
use strum::VariantNames;
use thiserror::Error;

Expand Down Expand Up @@ -65,10 +66,9 @@ enum ListErrorsErrors {
}

#[derive(QueryableByName, Debug, Clone, Serialize, Deserialize, PartialEq)]
#[serde(deny_unknown_fields)]
pub struct InfraError {
#[diesel(sql_type = Json)]
pub information: JsonValue,
#[diesel(sql_type = Jsonb)]
pub information: DieselJson<SchemaInfraError>,
}

#[derive(Default, Debug, Clone, PartialEq, Eq, Deserialize)]
Expand All @@ -87,8 +87,7 @@ async fn get_paginated_infra_errors(
per_page: i64,
params: &QueryParams,
) -> Result<PaginatedResponse<InfraError>> {
let mut query =
String::from("SELECT information::text FROM infra_layer_error WHERE infra_id = $1");
let mut query = String::from("SELECT information FROM infra_layer_error WHERE infra_id = $1");
if params.level == Level::Warnings {
query += " AND information->>'is_warning' = 'true'"
} else if params.level == Level::Errors {
Expand Down
17 changes: 11 additions & 6 deletions editoast/src/views/infra/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -35,7 +35,6 @@ use diesel::{sql_query, QueryableByName};
use diesel_async::RunQueryDsl;
use editoast_derive::EditoastError;
use serde::{Deserialize, Serialize};
use serde_json::{json, Value as JsonValue};
use std::collections::HashMap;
use thiserror::Error;
use utoipa::IntoParams;
Expand Down Expand Up @@ -128,6 +127,11 @@ struct RefreshQueryParams {
infras: List<i64>,
}

#[derive(Debug, Serialize)]
struct RefreshResponse {
infra_refreshed: Vec<i64>,
}

/// Refresh infra generated data
#[post("/refresh")]
async fn refresh(
Expand All @@ -136,7 +140,7 @@ async fn refresh(
query_params: Query<RefreshQueryParams>,
infra_caches: Data<CHashMap<i64, InfraCache>>,
map_layers: Data<MapLayers>,
) -> Result<Json<JsonValue>> {
) -> Result<Json<RefreshResponse>> {
let mut conn = db_pool.get().await?;
// Use a transaction to give scope to infra list lock
let mut infras_list = vec![];
Expand All @@ -158,20 +162,20 @@ async fn refresh(
}

// Refresh each infras
let mut refreshed_infra = vec![];
let mut infra_refreshed = vec![];

for infra in infras_list {
let infra_cache = InfraCache::get_or_load(&mut conn, &infra_caches, &infra).await?;
if infra
.refresh(db_pool.clone(), query_params.force, &infra_cache)
.await?
{
refreshed_infra.push(infra.id.unwrap());
infra_refreshed.push(infra.id.unwrap());
}
}

let mut conn = redis_client.get_connection().await?;
for infra_id in refreshed_infra.iter() {
for infra_id in infra_refreshed.iter() {
map::invalidate_all(
&mut conn,
&map_layers.layers.keys().cloned().collect(),
Expand All @@ -180,7 +184,7 @@ async fn refresh(
.await?;
}

Ok(Json(json!({ "infra_refreshed": refreshed_infra })))
Ok(Json(RefreshResponse { infra_refreshed }))
}

/// Return a list of infras
Expand Down Expand Up @@ -531,6 +535,7 @@ pub mod tests {
use actix_web::test::{call_and_read_body_json, call_service, read_body_json, TestRequest};
use diesel_async::{AsyncConnection, AsyncPgConnection as PgConnection};
use rstest::*;
use serde_json::json;
use strum::IntoEnumIterator;

fn delete_infra_request(infra_id: i64) -> Request {
Expand Down
69 changes: 61 additions & 8 deletions editoast/src/views/infra/objects.rs
Original file line number Diff line number Diff line change
Expand Up @@ -6,12 +6,13 @@ use actix_web::web::{Data, Json, Path};
use diesel::sql_types::{Array, BigInt, Jsonb, Nullable, Text};
use diesel::{sql_query, QueryableByName};
use diesel_async::RunQueryDsl;
use diesel_json::Json as DieselJson;
use serde::{Deserialize, Serialize};
use serde_json::Value as JsonValue;
use thiserror::Error;

use crate::error::Result;
use crate::schema::ObjectType;
use crate::schema::{GeoJson, ObjectType};
use crate::DbPool;
use editoast_derive::EditoastError;

Expand All @@ -37,17 +38,16 @@ fn has_unique_ids(obj_ids: &Vec<String>) -> bool {
obj_ids_2.len() == obj_ids.len()
}

#[derive(QueryableByName, Debug, Clone, Serialize, Deserialize)]
#[derive(QueryableByName, Debug, Clone, Serialize, Deserialize, PartialEq)]
struct ObjectQueryable {
#[diesel(sql_type = Text)]
#[serde(skip_serializing)]
obj_id: String,
#[diesel(sql_type = Jsonb)]
railjson: JsonValue,
#[diesel(sql_type = Nullable<Jsonb>)]
geographic: Option<JsonValue>,
geographic: Option<DieselJson<GeoJson>>,
#[diesel(sql_type = Nullable<Jsonb>)]
schematic: Option<JsonValue>,
schematic: Option<DieselJson<GeoJson>>,
}

/// Return the railjson list of a specific OSRD object
Expand Down Expand Up @@ -121,11 +121,14 @@ async fn get_objects(
#[cfg(test)]
mod tests {
use actix_web::http::StatusCode;
use actix_web::test::{call_service, TestRequest};
use actix_web::test::{call_service, read_body_json, TestRequest};
use serde_json::{json, Value as JsonValue};

use crate::fixtures::tests::{empty_infra, TestFixture};
use crate::fixtures::tests::{db_pool, empty_infra, TestFixture};
use crate::models::Infra;
use crate::schema::SwitchType;
use crate::schema::operation::Operation;
use crate::schema::{Switch, SwitchType};
use crate::views::infra::objects::ObjectQueryable;
use crate::views::infra::tests::create_object_request;
use crate::views::tests::create_test_service;
use rstest::*;
Expand Down Expand Up @@ -155,6 +158,56 @@ mod tests {
assert_eq!(call_service(&app, req).await.status(), StatusCode::OK);
}

#[rstest]
async fn get_objects_should_return_switch() {
// GIVEN
let app = create_test_service().await;
let empty_infra = empty_infra(db_pool()).await;
let empty_infra_id = empty_infra.id();

let switch = Switch {
id: "switch_001".into(),
switch_type: "switch_type_001".into(),
..Default::default()
}
.into();

let create_operation = Operation::Create(Box::new(switch));
let request = TestRequest::post()
.uri(format!("/infra/{empty_infra_id}/").as_str())
.set_json(json!([create_operation]))
.to_request();
let response = call_service(&app, request).await;
assert_eq!(response.status(), StatusCode::OK);

let expected_switch_object = vec![ObjectQueryable {
obj_id: "switch_001".into(),
railjson: json!({
"extensions": {
"sncf": JsonValue::Null
},
"group_change_delay": 0.0,
"id": "switch_001",
"ports": {},
"switch_type": "switch_type_001"
}),
geographic: None,
schematic: None,
}];

// WHEN
let req = TestRequest::post()
.uri(format!("/infra/{empty_infra_id}/objects/Switch").as_str())
.set_json(vec!["switch_001"])
.to_request();
let response = call_service(&app, req).await;

// THEN
assert_eq!(response.status(), StatusCode::OK);
let switch_object: Vec<ObjectQueryable> = read_body_json(response).await;
assert_eq!(switch_object, expected_switch_object);
}

#[rstest]
async fn get_objects_duplicate_ids(#[future] empty_infra: TestFixture<Infra>) {
let empty_infra = empty_infra.await;
Expand Down
Loading

0 comments on commit d101589

Please sign in to comment.