Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
13 changes: 8 additions & 5 deletions store/postgres/src/relational.rs
Original file line number Diff line number Diff line change
Expand Up @@ -483,7 +483,7 @@ impl Layout {
FindQuery::new(table.as_ref(), id, block)
.get_result::<EntityData>(conn)
.optional()?
.map(|entity_data| entity_data.deserialize_with_layout(self, None))
.map(|entity_data| entity_data.deserialize_with_layout(self, None, true))
.transpose()
}

Expand All @@ -509,10 +509,13 @@ impl Layout {
};
let mut entities_for_type: BTreeMap<EntityType, Vec<Entity>> = BTreeMap::new();
for data in query.load::<EntityData>(conn)? {
let entity_type = data.entity_type();
let entity_data: Entity = data.deserialize_with_layout(self, None, true)?;

entities_for_type
.entry(data.entity_type())
.entry(entity_type)
.or_default()
.push(data.deserialize_with_layout(self, None)?);
.push(entity_data);
}
Ok(entities_for_type)
}
Expand Down Expand Up @@ -541,7 +544,7 @@ impl Layout {

for entity_data in inserts_or_updates.into_iter() {
let entity_type = entity_data.entity_type();
let mut data: Entity = entity_data.deserialize_with_layout(self, None)?;
let mut data: Entity = entity_data.deserialize_with_layout(self, None, false)?;
let entity_id = data.id().expect("Invalid ID for entity.");
processed_entities.insert((entity_type.clone(), entity_id.clone()));

Expand Down Expand Up @@ -709,7 +712,7 @@ impl Layout {
.into_iter()
.map(|entity_data| {
entity_data
.deserialize_with_layout(self, parent_type.as_ref())
.deserialize_with_layout(self, parent_type.as_ref(), false)
.map_err(|e| e.into())
})
.collect()
Expand Down
9 changes: 7 additions & 2 deletions store/postgres/src/relational_queries.rs
Original file line number Diff line number Diff line change
Expand Up @@ -233,7 +233,7 @@ impl ForeignKeyClauses for Column {
}
}

pub trait FromEntityData: std::fmt::Debug {
pub trait FromEntityData: std::fmt::Debug + std::default::Default {
type Value: FromColumnValue;

fn new_entity(typename: String) -> Self;
Expand Down Expand Up @@ -479,14 +479,19 @@ impl EntityData {
self,
layout: &Layout,
parent_type: Option<&ColumnType>,
remove_typename: bool,
) -> Result<T, StoreError> {
let entity_type = EntityType::new(self.entity);
let table = layout.table_for_entity(&entity_type)?;

use serde_json::Value as j;
match self.data {
j::Object(map) => {
let mut out = T::new_entity(entity_type.into_string());
let mut out = if !remove_typename {
T::new_entity(entity_type.into_string())
} else {
T::default()
};
for (key, json) in map {
// Simply ignore keys that do not have an underlying table
// column; those will be things like the block_range that
Expand Down
2 changes: 0 additions & 2 deletions store/postgres/tests/relational.rs
Original file line number Diff line number Diff line change
Expand Up @@ -182,13 +182,11 @@ lazy_static! {
bigInt: big_int.clone(),
bigIntArray: vec![big_int.clone(), (big_int + 1.into()).clone()],
color: "yellow",
__typename: "Scalar",
}
};
static ref EMPTY_NULLABLESTRINGS_ENTITY: Entity = {
entity! {
id: "one",
__typename: "NullableStrings"
}
};
static ref SCALAR: EntityType = EntityType::from("Scalar");
Expand Down
1 change: 0 additions & 1 deletion store/postgres/tests/relational_bytes.rs
Original file line number Diff line number Diff line change
Expand Up @@ -67,7 +67,6 @@ lazy_static! {
static ref BEEF_ENTITY: Entity = entity! {
id: scalar::Bytes::from_str("deadbeef").unwrap(),
name: "Beef",
__typename: "Thing"
};
static ref NAMESPACE: Namespace = Namespace::new("sgd0815".to_string()).unwrap();
static ref THING: EntityType = EntityType::from("Thing");
Expand Down
3 changes: 0 additions & 3 deletions store/postgres/tests/store.rs
Original file line number Diff line number Diff line change
Expand Up @@ -349,7 +349,6 @@ fn get_entity_1() {

let mut expected_entity = Entity::new();

expected_entity.insert("__typename".to_owned(), USER.into());
expected_entity.insert("id".to_owned(), "1".into());
expected_entity.insert("name".to_owned(), "Johnton".into());
expected_entity.insert(
Expand Down Expand Up @@ -380,7 +379,6 @@ fn get_entity_3() {

let mut expected_entity = Entity::new();

expected_entity.insert("__typename".to_owned(), USER.into());
expected_entity.insert("id".to_owned(), "3".into());
expected_entity.insert("name".to_owned(), "Shaqueeena".into());
expected_entity.insert(
Expand Down Expand Up @@ -473,7 +471,6 @@ fn update_existing() {
_ => unreachable!(),
};

new_data.insert("__typename".to_owned(), USER.into());
new_data.insert("bin_name".to_owned(), Value::Bytes(bin_name));
assert_eq!(writable.get(&entity_key).unwrap(), Some(new_data));
})
Expand Down
2 changes: 1 addition & 1 deletion tests/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@ port_check = "0.1.5"
anyhow = "1.0"
futures = { version = "0.3", features = ["compat"] }
graph = { path = "../graph" }
tokio = {version = "1.16.1", features = ["rt", "macros", "process"]}
tokio = { version = "1.16.1", features = ["rt", "macros", "process"] }
graph-chain-ethereum = { path = "../chain/ethereum" }
async-stream = "0.3.3"
graph-node = { path = "../node" }
Expand Down
2 changes: 1 addition & 1 deletion tests/integration-tests/data-source-revert/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -10,4 +10,4 @@
"@graphprotocol/graph-cli": "https://github.com/graphprotocol/graph-cli#main",
"@graphprotocol/graph-ts": "https://github.com/graphprotocol/graph-ts#main"
}
}
}
2 changes: 1 addition & 1 deletion tests/integration-tests/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,6 @@
"private": true,
"workspaces": [
"api-version-v0-0-4",
"data-source-context",
"data-source-revert",
"fatal-error",
"ganache-reverts",
Expand All @@ -11,6 +10,7 @@
"overloaded-contract-functions",
"poi-for-failed-subgraph",
"remove-then-update",
"typename",
"value-roundtrip"
]
}
33 changes: 33 additions & 0 deletions tests/integration-tests/typename/abis/Contract.abi
Original file line number Diff line number Diff line change
@@ -0,0 +1,33 @@
[
{
"inputs": [],
"stateMutability": "nonpayable",
"type": "constructor"
},
{
"anonymous": false,
"inputs": [
{
"indexed": false,
"internalType": "uint16",
"name": "x",
"type": "uint16"
}
],
"name": "Trigger",
"type": "event"
},
{
"inputs": [
{
"internalType": "uint16",
"name": "x",
"type": "uint16"
}
],
"name": "emitTrigger",
"outputs": [],
"stateMutability": "nonpayable",
"type": "function"
}
]
13 changes: 13 additions & 0 deletions tests/integration-tests/typename/package.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,13 @@
{
"name": "typename",
"version": "0.1.0",
"scripts": {
"codegen": "graph codegen",
"create:test": "graph create test/typename --node $GRAPH_NODE_ADMIN_URI",
"deploy:test": "graph deploy test/typename --version-label v0.0.1 --ipfs $IPFS_URI --node $GRAPH_NODE_ADMIN_URI"
},
"devDependencies": {
"@graphprotocol/graph-cli": "https://github.com/graphprotocol/graph-cli#main",
"@graphprotocol/graph-ts": "https://github.com/graphprotocol/graph-ts#main"
}
}
3 changes: 3 additions & 0 deletions tests/integration-tests/typename/schema.graphql
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
type ExampleEntity @entity {
id: ID!
}
22 changes: 22 additions & 0 deletions tests/integration-tests/typename/src/mapping.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,22 @@
import { ethereum, BigInt } from "@graphprotocol/graph-ts";
import { ExampleEntity } from "../generated/schema";

export function handleBlock(block: ethereum.Block): void {
// At Block 0, we create the Entity.
if (block.number == BigInt.fromI32(0)) {
let example = new ExampleEntity("1234");
example.save();
assert(example.get("__typename") == null, "__typename should be null");
// At Block 2, we merge the previously created Entity.
// Obs: Block 1 there was a reorg, we do nothing and wait the Entity cache to clear.
} else if (block.number == BigInt.fromI32(2)) {
let example = new ExampleEntity("1234");
example.save();
assert(example.get("__typename") == null, "__typename should still be null");
// At Block 3, we load the merged Entity, which should NOT bring
// the __typename field.
} else if (block.number == BigInt.fromI32(3)) {
let example = ExampleEntity.load("1234")!;
assert(example.get("__typename") == null, "__typename should still be null");
}
}
25 changes: 25 additions & 0 deletions tests/integration-tests/typename/subgraph.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,25 @@
specVersion: 0.0.4
repository: https://github.com/graphprotocol/example-subgraph
schema:
file: ./schema.graphql
features:
- nonFatalErrors
dataSources:
- kind: ethereum/contract
name: Contract
network: test
source:
address: "0xCfEB869F69431e42cdB54A4F4f105C19C080A601"
abi: Contract
mapping:
kind: ethereum/events
apiVersion: 0.0.5
language: wasm/assemblyscript
entities:
- ExampleEntity
abis:
- name: Contract
file: ./abis/Contract.abi
blockHandlers:
- handler: handleBlock
file: ./src/mapping.ts
17 changes: 16 additions & 1 deletion tests/src/fixture.rs
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@ pub mod ethereum;

use std::marker::PhantomData;
use std::process::Command;
use std::sync::Mutex;

use crate::helpers::run_cmd;
use anyhow::Error;
Expand Down Expand Up @@ -48,7 +49,13 @@ pub async fn build_subgraph(dir: &str) -> DeploymentHash {
.expect("Could not connect to IPFS, make sure it's running at port 5001");

// Make sure dependencies are present.
run_cmd(Command::new("yarn").current_dir("./integration-tests"));
run_cmd(
Command::new("yarn")
.arg("install")
.arg("--mutex")
.arg("file:.yarn-mutex")
.current_dir("./integration-tests"),
);

// Run codegen.
run_cmd(Command::new("yarn").arg("codegen").current_dir(&dir));
Expand Down Expand Up @@ -99,7 +106,15 @@ pub struct Stores {
chain_store: Arc<ChainStore>,
}

graph::prelude::lazy_static! {
/// Mutex for assuring there's only one test at a time
/// running the `stores` function.
pub static ref STORE_MUTEX: Mutex<()> = Mutex::new(());
}

pub async fn stores(store_config_path: &str) -> Stores {
let _mutex_guard = STORE_MUTEX.lock().unwrap();

let config = {
let config = read_to_string(store_config_path).await.unwrap();
let db_url = match std::env::var("THEGRAPH_STORE_POSTGRES_DIESEL_URL") {
Expand Down
75 changes: 73 additions & 2 deletions tests/tests/runner.rs
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@ use graph::prelude::ethabi::ethereum_types::H256;
use graph::prelude::{SubgraphAssignmentProvider, SubgraphName, SubgraphStore as _};
use slog::{debug, info};

#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
#[tokio::test]
async fn data_source_revert() -> anyhow::Result<()> {
let subgraph_name = SubgraphName::new("data-source-revert")
.expect("Subgraph name must contain only a-z, A-Z, 0-9, '-' and '_'");
Expand Down Expand Up @@ -42,7 +42,78 @@ async fn data_source_revert() -> anyhow::Result<()> {

SubgraphAssignmentProvider::start(provider.as_ref(), ctx.deployment_locator.clone(), None)
.await
.expect("unabel to start subgraph");
.expect("unable to start subgraph");

loop {
tokio::time::sleep(Duration::from_millis(1000)).await;

let block_ptr = match store.least_block_ptr(&hash).await {
Ok(Some(ptr)) => ptr,
res => {
info!(&logger, "{:?}", res);
continue;
}
};

debug!(&logger, "subgraph block: {:?}", block_ptr);

if block_ptr == stop_block {
info!(
&logger,
"subgraph now at block {}, reached stop block {}", block_ptr.number, stop_block
);
break;
}

if !store.is_healthy(&hash).await.unwrap() {
return Err(anyhow!("subgraph failed unexpectedly"));
}
}

assert!(store.is_healthy(&hash).await.unwrap());

fixture::cleanup(&ctx.store, &subgraph_name, &hash);

Ok(())
}

#[tokio::test]
async fn typename() -> anyhow::Result<()> {
let subgraph_name = SubgraphName::new("typename")
.expect("Subgraph name must contain only a-z, A-Z, 0-9, '-' and '_'");

let hash = {
let test_dir = format!("./integration-tests/{}", subgraph_name);
fixture::build_subgraph(&test_dir).await
};

let blocks = {
let block_0 = genesis();
let block_1 = empty_block(block_0.ptr(), test_ptr(1));
let block_1_reorged_ptr = BlockPtr {
number: 1,
hash: H256::from_low_u64_be(12).into(),
};
let block_1_reorged = empty_block(block_0.ptr(), block_1_reorged_ptr);
let block_2 = empty_block(block_1_reorged.ptr(), test_ptr(2));
let block_3 = empty_block(block_2.ptr(), test_ptr(3));
vec![block_0, block_1, block_1_reorged, block_2, block_3]
};

let stop_block = blocks.last().unwrap().block.ptr();

let stores = stores("./integration-tests/config.simple.toml").await;
let chain = chain(blocks, &stores).await;
let ctx = fixture::setup(subgraph_name.clone(), &hash, &stores, chain).await;

let provider = ctx.provider.clone();
let store = ctx.store.clone();

let logger = ctx.logger_factory.subgraph_logger(&ctx.deployment_locator);

SubgraphAssignmentProvider::start(provider.as_ref(), ctx.deployment_locator.clone(), None)
.await
.expect("unable to start subgraph");

loop {
tokio::time::sleep(Duration::from_millis(1000)).await;
Expand Down