Skip to content

Commit

Permalink
add hash<set> & hash<map> (#3051)
Browse files Browse the repository at this point in the history
  • Loading branch information
nevermore3 committed Oct 13, 2021
1 parent 63f5663 commit e1de5af
Show file tree
Hide file tree
Showing 7 changed files with 71 additions and 7 deletions.
11 changes: 11 additions & 0 deletions src/common/datatypes/Map.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -46,3 +46,14 @@ folly::dynamic Map::getMetaData() const {
}

} // namespace nebula

namespace std {
std::size_t hash<nebula::Map>::operator()(const nebula::Map& m) const noexcept {
size_t seed = 0;
for (auto& v : m.kvs) {
seed ^= hash<std::string>()(v.first) + 0x9e3779b9 + (seed << 6) + (seed >> 2);
}
return seed;
}

} // namespace std
8 changes: 8 additions & 0 deletions src/common/datatypes/Map.h
Original file line number Diff line number Diff line change
Expand Up @@ -70,4 +70,12 @@ struct Map {
inline std::ostream& operator<<(std::ostream& os, const Map& m) { return os << m.toString(); }

} // namespace nebula

namespace std {
template <>
struct hash<nebula::Map> {
std::size_t operator()(const nebula::Map& m) const noexcept;
};

} // namespace std
#endif // COMMON_DATATYPES_MAP_H_
11 changes: 11 additions & 0 deletions src/common/datatypes/Set.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -43,3 +43,14 @@ folly::dynamic Set::getMetaData() const {
}

} // namespace nebula

namespace std {
std::size_t hash<nebula::Set>::operator()(const nebula::Set& s) const noexcept {
size_t seed = 0;
for (auto& v : s.values) {
seed ^= hash<nebula::Value>()(v) + 0x9e3779b9 + (seed << 6) + (seed >> 2);
}
return seed;
}

} // namespace std
9 changes: 8 additions & 1 deletion src/common/datatypes/Set.h
Original file line number Diff line number Diff line change
Expand Up @@ -54,6 +54,13 @@ struct Set {
};

inline std::ostream& operator<<(std::ostream& os, const Set& s) { return os << s.toString(); }

} // namespace nebula

namespace std {
template <>
struct hash<nebula::Set> {
std::size_t operator()(const nebula::Set& s) const noexcept;
};

} // namespace std
#endif // COMMON_DATATYPES_SET_H_
4 changes: 2 additions & 2 deletions src/common/datatypes/Value.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -69,10 +69,10 @@ std::size_t hash<nebula::Value>::operator()(const nebula::Value& v) const noexce
return hash<nebula::Geography>()(v.getGeography());
}
case nebula::Value::Type::MAP: {
LOG(FATAL) << "Hash for MAP has not been implemented";
return hash<nebula::Map>()(v.getMap());
}
case nebula::Value::Type::SET: {
LOG(FATAL) << "Hash for SET has not been implemented";
return hash<nebula::Set>()(v.getSet());
}
case nebula::Value::Type::DATASET: {
LOG(FATAL) << "Hash for DATASET has not been implemented";
Expand Down
20 changes: 16 additions & 4 deletions tests/tck/features/go/GoYieldVertexEdge.feature
Original file line number Diff line number Diff line change
Expand Up @@ -226,17 +226,29 @@ Feature: Go Yield Vertex And Edge Sentence
"""
Then a SyntaxError should be raised at runtime: syntax error near `OVER'

@skip
# reason we not support hash<Map> hash<set> hash<DataSet> from now on. line 67 in Value.cpp
Scenario: distinct map
Scenario: distinct map and set
When executing query:
"""
GO FROM "Boris Diaw" OVER like YIELD dst(edge) as id |
GO FROM $-.id OVER like YIELD dst(edge) as id |
GO FROM $-.id OVER serve YIELD DISTINCT dst(edge) as dst, edge as e, properties(edge) as props
"""
Then the result should be, in any order, with relax comparison:
| dst | e | props |
| dst | e | props |
| "Spurs" | [:serve "Manu Ginobili"->"Spurs" @0 {end_year: 2018, start_year: 2002}] | {end_year: 2018, start_year: 2002} |
| "Spurs" | [:serve "Tim Duncan"->"Spurs" @0 {end_year: 2016, start_year: 1997}] | {end_year: 2016, start_year: 1997} |
| "Hornets" | [:serve "Tony Parker"->"Hornets" @0 {end_year: 2019, start_year: 2018}] | {end_year: 2019, start_year: 2018} |
| "Spurs" | [:serve "Tony Parker"->"Spurs" @0 {end_year: 2018, start_year: 1999}] | {end_year: 2018, start_year: 1999} |
| "Spurs" | [:serve "LaMarcus Aldridge"->"Spurs" @0 {end_year: 2019, start_year: 2015}] | {end_year: 2019, start_year: 2015} |
| "Trail Blazers" | [:serve "LaMarcus Aldridge"->"Trail Blazers" @0 {end_year: 2015, start_year: 2006}] | {end_year: 2015, start_year: 2006} |
When executing query:
"""
GO 2 STEPS FROM "Tim Duncan" OVER like YIELD dst(edge) as id |
YIELD DISTINCT collect($-.id) as a, collect_set($-.id) as b
"""
Then the result should be, in any order, with relax comparison:
| a | b |
| ["Tim Duncan", "LaMarcus Aldridge", "Manu Ginobili", "Tim Duncan"] | {"Manu Ginobili", "LaMarcus Aldridge", "Tim Duncan"} |

Scenario: distinct
When executing query:
Expand Down
15 changes: 15 additions & 0 deletions tests/tck/features/match/Base.feature
Original file line number Diff line number Diff line change
Expand Up @@ -211,6 +211,21 @@ Feature: Basic match
| ("Paul Gasol" :player{age: 38, name: "Paul Gasol"}) | [:like "Paul Gasol"->"Marc Gasol" @0 {likeness: 99}] | ("Marc Gasol" :player{age: 34, name: "Marc Gasol"}) |
| ("Yao Ming" :player{age: 38, name: "Yao Ming"}) | [:like "Yao Ming"->"Shaquile O'Neal" @0 {likeness: 90}] | ("Shaquile O'Neal" :player{age: 47, name: "Shaquile O'Neal"}) |
| ("Yao Ming" :player{age: 38, name: "Yao Ming"}) | [:like "Yao Ming"->"Tracy McGrady" @0 {likeness: 90}] | ("Tracy McGrady" :player{age: 39, name: "Tracy McGrady"}) |
When executing query:
"""
MATCH (v:player)-[e:like]->(v2) where id(v) == "Tim Duncan" RETURN DISTINCT properties(e) as props, e
"""
Then the result should be, in any order, with relax comparison:
| props | e |
| {likeness: 95} | [:like "Tim Duncan"->"Manu Ginobili" @0 {likeness: 95}] |
| {likeness: 95} | [:like "Tim Duncan"->"Tony Parker" @0 {likeness: 95}] |
When executing query:
"""
MATCH (v:player)-[e:like]->(v2) where id(v) == "Tim Duncan" RETURN DISTINCT properties(e) as props
"""
Then the result should be, in any order, with relax comparison:
| props |
| {likeness: 95} |

Scenario: two steps
When executing query:
Expand Down

0 comments on commit e1de5af

Please sign in to comment.