Skip to content

Commit

Permalink
rename R_ to dd_id_
Browse files Browse the repository at this point in the history
  • Loading branch information
alldefector committed Jun 15, 2016
1 parent d5b50c1 commit df296de
Show file tree
Hide file tree
Showing 9 changed files with 109 additions and 108 deletions.
15 changes: 8 additions & 7 deletions src/main/scala/org/deepdive/ddlog/DeepDiveLogCompiler.scala
Original file line number Diff line number Diff line change
Expand Up @@ -471,29 +471,28 @@ class QueryCompiler(cq : ConjunctiveQuery, hackFrom: List[String] = Nil, hackWhe
case (x: Atom, i) if schemaDeclarationByRelationName get x.name exists (_.isQuery) =>
// TODO maybe TableAlias can be useful here or we can completely get rid of it?
// variable id column
s"""R_${headAsBody indexOf x}.${
s"""${deepdiveVariableIdColumn}_${headAsBody indexOf x}.${
deepdiveVariableIdColumn
} AS "${x.name}.R${headAsBody indexOf x}.${deepdiveVariableIdColumn}\"""" ::
List(
} AS "${x.name}.R${headAsBody indexOf x}.${deepdiveVariableIdColumn}\"""" :: (
// project variable key columns as well (to reduce unnecssary joins)
schemaDeclarationByRelationName get x.name map (_.keyColumns map {
case term => s"""R${headAsBody indexOf x}.${term
} AS "${x.name}.R${headAsBody indexOf x}.${term}\""""
}) get
).flatten ::: List(
) ++ (
// project category value columns as well (to reduce unnecssary joins)
schemaDeclarationByRelationName get x.name map (_.categoricalColumns map {
case term => s"""R${headAsBody indexOf x}.${term
} AS "${x.name}.R${headAsBody indexOf x}.${term}\""""
}) get
).flatten
)

case _ => List.empty
}

val internalVarTables = headAsBody.zipWithIndex flatMap {
case (x: Atom, i) if schemaDeclarationByRelationName get x.name exists (_.isQuery) =>
List(s"""${deepdivePrefixForVariablesIdsTable}${x.name} AS R_${headAsBody indexOf x}""")
List(s"""${deepdivePrefixForVariablesIdsTable}${x.name} AS ${deepdiveVariableIdColumn}_${headAsBody indexOf x}""")
case _ => List.empty
}

Expand All @@ -502,7 +501,7 @@ class QueryCompiler(cq : ConjunctiveQuery, hackFrom: List[String] = Nil, hackWhe
List(
// project variable key columns as well (to reduce unnecssary joins)
schemaDeclarationByRelationName get x.name map (_.keyColumns map {
case term => s"""R${headAsBody indexOf x}.${term} = R_${headAsBody indexOf x}.${term}"""
case term => s"""R${headAsBody indexOf x}.${term} = ${deepdiveVariableIdColumn}_${headAsBody indexOf x}.${term}"""
}) get
)
case _ => List.empty
Expand All @@ -515,6 +514,8 @@ class QueryCompiler(cq : ConjunctiveQuery, hackFrom: List[String] = Nil, hackWhe
// This is achieved by puting head atoms into the body.
val fakeBody = headAsBody ++ cqBody
val fakeCQ = stmt.q.copy(bodies = List(fakeBody))

// TODO XXX: Fix the `internal` hack below
val qc = new QueryCompiler(fakeCQ, internalVarTables, internalVarJoinConds)

// weight columns
Expand Down
40 changes: 20 additions & 20 deletions test/expected-output-test/chunking_example/compile.expected
Original file line number Diff line number Diff line change
Expand Up @@ -80,7 +80,7 @@ WHERE R1.sent_id = R0.sent_id

deepdive.inference.factors.inf_istrue_chunk {
input_query: """
SELECT R_0.dd_id AS "chunk.R0.dd_id"
SELECT dd_id_0.dd_id AS "chunk.R0.dd_id"
, R0.sent_id AS "chunk.R0.sent_id"
, R0.word_id AS "chunk.R0.word_id"
, R0.tag AS "chunk.R0.tag"
Expand All @@ -90,14 +90,14 @@ FROM chunk R0
, words R1
, tags R2
, word_features R3
, dd_variables_chunk AS R_0
, dd_variables_chunk AS dd_id_0
WHERE R1.sent_id = R0.sent_id
AND R1.word_id = R0.word_id
AND R2.tag = R0.tag
AND R3.sent_id = R0.sent_id
AND R3.word_id = R0.word_id
AND R0.sent_id = R_0.sent_id
AND R0.word_id = R_0.word_id
AND R0.sent_id = dd_id_0.sent_id
AND R0.word_id = dd_id_0.word_id
"""
function: """Multinomial(chunk.R0.dd_label)"""
weight: """?(dd_weight_column_0, dd_weight_column_1)"""
Expand All @@ -112,11 +112,11 @@ input_relations: [

deepdive.inference.factors.inf_and_chunk_chunk {
input_query: """
SELECT R_0.dd_id AS "chunk.R0.dd_id"
SELECT dd_id_0.dd_id AS "chunk.R0.dd_id"
, R0.sent_id AS "chunk.R0.sent_id"
, R0.word_id AS "chunk.R0.word_id"
, R0.tag AS "chunk.R0.tag"
, R_1.dd_id AS "chunk.R1.dd_id"
, dd_id_1.dd_id AS "chunk.R1.dd_id"
, R1.sent_id AS "chunk.R1.sent_id"
, R1.word_id AS "chunk.R1.word_id"
, R1.tag AS "chunk.R1.tag"
Expand All @@ -128,8 +128,8 @@ FROM chunk R0
, words R3
, tags R4
, tags R5
, dd_variables_chunk AS R_0
, dd_variables_chunk AS R_1
, dd_variables_chunk AS dd_id_0
, dd_variables_chunk AS dd_id_1
WHERE R1.sent_id = R0.sent_id
AND R2.sent_id = R0.sent_id
AND R2.word_id = R0.word_id
Expand All @@ -138,10 +138,10 @@ WHERE R1.sent_id = R0.sent_id
AND R4.tag = R0.tag
AND R5.tag = R1.tag
AND R1.word_id = (R0.word_id + 1)
AND R0.sent_id = R_0.sent_id
AND R0.word_id = R_0.word_id
AND R1.sent_id = R_1.sent_id
AND R1.word_id = R_1.word_id
AND R0.sent_id = dd_id_0.sent_id
AND R0.word_id = dd_id_0.word_id
AND R1.sent_id = dd_id_1.sent_id
AND R1.word_id = dd_id_1.word_id
"""
function: """Multinomial(chunk.R0.dd_label, chunk.R1.dd_label)"""
weight: """?(dd_weight_column_0, dd_weight_column_1)"""
Expand All @@ -155,11 +155,11 @@ input_relations: [

deepdive.inference.factors.inf1_and_chunk_chunk {
input_query: """
SELECT R_0.dd_id AS "chunk.R0.dd_id"
SELECT dd_id_0.dd_id AS "chunk.R0.dd_id"
, R0.sent_id AS "chunk.R0.sent_id"
, R0.word_id AS "chunk.R0.word_id"
, R0.tag AS "chunk.R0.tag"
, R_1.dd_id AS "chunk.R1.dd_id"
, dd_id_1.dd_id AS "chunk.R1.dd_id"
, R1.sent_id AS "chunk.R1.sent_id"
, R1.word_id AS "chunk.R1.word_id"
, R1.tag AS "chunk.R1.tag"
Expand All @@ -171,8 +171,8 @@ FROM chunk R0
, words R3
, tags R4
, tags R5
, dd_variables_chunk AS R_0
, dd_variables_chunk AS R_1
, dd_variables_chunk AS dd_id_0
, dd_variables_chunk AS dd_id_1
WHERE R1.sent_id = R0.sent_id
AND R2.sent_id = R0.sent_id
AND R2.word_id = R0.word_id
Expand All @@ -182,10 +182,10 @@ WHERE R1.sent_id = R0.sent_id
AND R5.tag = R1.tag
AND R2.tag IS NOT NULL
AND R0.word_id < R1.word_id
AND R0.sent_id = R_0.sent_id
AND R0.word_id = R_0.word_id
AND R1.sent_id = R_1.sent_id
AND R1.word_id = R_1.word_id
AND R0.sent_id = dd_id_0.sent_id
AND R0.word_id = dd_id_0.word_id
AND R1.sent_id = dd_id_1.sent_id
AND R1.word_id = dd_id_1.word_id
"""
function: """Multinomial(chunk.R0.dd_label, chunk.R1.dd_label)"""
weight: """?(dd_weight_column_0, dd_weight_column_1)"""
Expand Down
24 changes: 12 additions & 12 deletions test/expected-output-test/factor_functions/compile.expected
Original file line number Diff line number Diff line change
Expand Up @@ -6,20 +6,20 @@ Q.dd_label: Boolean

deepdive.inference.factors.inf_imply_Q_Q {
input_query: """
SELECT R_0.dd_id AS "Q.R0.dd_id"
SELECT dd_id_0.dd_id AS "Q.R0.dd_id"
, R0.x AS "Q.R0.x"
, R_1.dd_id AS "Q.R1.dd_id"
, dd_id_1.dd_id AS "Q.R1.dd_id"
, R1.x AS "Q.R1.x"
, R1.x AS "dd_weight_column_0"
FROM Q R0
, Q R1
, R R2
, dd_variables_Q AS R_0
, dd_variables_Q AS R_1
, dd_variables_Q AS dd_id_0
, dd_variables_Q AS dd_id_1
WHERE R2.a = R0.x
AND R2.b = R1.x
AND R0.x = R_0.x
AND R1.x = R_1.x
AND R0.x = dd_id_0.x
AND R1.x = dd_id_1.x
"""
function: """Imply(Q.R0.dd_label, Q.R1.dd_label)"""
weight: """?(dd_weight_column_0)"""
Expand All @@ -32,20 +32,20 @@ input_relations: [

deepdive.inference.factors.inf_equal_Q_not_Q {
input_query: """
SELECT R_0.dd_id AS "Q.R0.dd_id"
SELECT dd_id_0.dd_id AS "Q.R0.dd_id"
, R0.x AS "Q.R0.x"
, R_1.dd_id AS "Q.R1.dd_id"
, dd_id_1.dd_id AS "Q.R1.dd_id"
, R1.x AS "Q.R1.x"
, R1.x AS "dd_weight_column_0"
FROM Q R0
, Q R1
, R R2
, dd_variables_Q AS R_0
, dd_variables_Q AS R_1
, dd_variables_Q AS dd_id_0
, dd_variables_Q AS dd_id_1
WHERE R2.a = R0.x
AND R2.b = R1.x
AND R0.x = R_0.x
AND R1.x = R_1.x
AND R0.x = dd_id_0.x
AND R1.x = dd_id_1.x
"""
function: """Equal(Q.R0.dd_label, !Q.R1.dd_label)"""
weight: """?(dd_weight_column_0)"""
Expand Down
12 changes: 6 additions & 6 deletions test/expected-output-test/logical_rules/compile.expected
Original file line number Diff line number Diff line change
Expand Up @@ -36,18 +36,18 @@ materialize: false

deepdive.inference.factors.inf_imply_P_Q {
input_query: """
SELECT R_0.dd_id AS "P.R0.dd_id"
SELECT dd_id_0.dd_id AS "P.R0.dd_id"
, R0.x AS "P.R0.x"
, R_1.dd_id AS "Q.R1.dd_id"
, dd_id_1.dd_id AS "Q.R1.dd_id"
, R1.x AS "Q.R1.x"
FROM P R0
, Q R1
, dd_variables_P AS R_0
, dd_variables_Q AS R_1
, dd_variables_P AS dd_id_0
, dd_variables_Q AS dd_id_1
WHERE R1.x = R0.x
AND true
AND R0.x = R_0.x
AND R1.x = R_1.x
AND R0.x = dd_id_0.x
AND R1.x = dd_id_1.x
"""
function: """Imply(P.R0.dd_label, Q.R1.dd_label)"""
weight: """1"""
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -199,15 +199,15 @@ WHERE R0.a > 0

deepdive.inference.factors.inf_istrue_Q {
input_query: """
SELECT R_0.dd_id AS "Q.R0.dd_id"
SELECT dd_id_0.dd_id AS "Q.R0.dd_id"
, R0.x AS "Q.R0.x"
, R0.x AS "dd_weight_column_0"
FROM Q R0
, S R1
, dd_variables_Q AS R_0
, dd_variables_Q AS dd_id_0
WHERE R1.a = R0.x
AND R0.x > 1000
AND R0.x = R_0.x
AND R0.x = dd_id_0.x
"""
function: """Imply(Q.R0.dd_label)"""
weight: """?(dd_weight_column_0)"""
Expand All @@ -220,14 +220,14 @@ input_relations: [

deepdive.inference.factors.inf1_istrue_Q {
input_query: """
SELECT R_0.dd_id AS "Q.R0.dd_id"
SELECT dd_id_0.dd_id AS "Q.R0.dd_id"
, R0.x AS "Q.R0.x"
FROM Q R0
, S R1
, dd_variables_Q AS R_0
, dd_variables_Q AS dd_id_0
WHERE R1.a = R0.x
AND R0.x = 0
AND R0.x = R_0.x
AND R0.x = dd_id_0.x
"""
function: """Imply(Q.R0.dd_label)"""
weight: """1.0"""
Expand All @@ -240,14 +240,14 @@ input_relations: [

deepdive.inference.factors.inf_istrue_not_Q {
input_query: """
SELECT R_0.dd_id AS "Q.R0.dd_id"
SELECT dd_id_0.dd_id AS "Q.R0.dd_id"
, R0.x AS "Q.R0.x"
FROM Q R0
, S R1
, dd_variables_Q AS R_0
, dd_variables_Q AS dd_id_0
WHERE R1.a = R0.x
AND R0.x < 1000
AND R0.x = R_0.x
AND R0.x = dd_id_0.x
"""
function: """Imply(!Q.R0.dd_label)"""
weight: """?"""
Expand All @@ -260,24 +260,24 @@ input_relations: [

deepdive.inference.factors.inf_imply_Q_Q {
input_query: """
SELECT R_0.dd_id AS "Q.R0.dd_id"
SELECT dd_id_0.dd_id AS "Q.R0.dd_id"
, R0.x AS "Q.R0.x"
, R_1.dd_id AS "Q.R1.dd_id"
, dd_id_1.dd_id AS "Q.R1.dd_id"
, R1.x AS "Q.R1.x"
, R0.x AS "dd_weight_column_0"
FROM Q R0
, Q R1
, S R2
, R R3
, S R4
, dd_variables_Q AS R_0
, dd_variables_Q AS R_1
, dd_variables_Q AS dd_id_0
, dd_variables_Q AS dd_id_1
WHERE R2.a = R0.x
AND R3.a = R0.x
AND R3.b = R1.x
AND R4.a = R1.x
AND R0.x = R_0.x
AND R1.x = R_1.x
AND R0.x = dd_id_0.x
AND R1.x = dd_id_1.x
"""
function: """Imply(Q.R0.dd_label, Q.R1.dd_label)"""
weight: """?(dd_weight_column_0)"""
Expand All @@ -291,24 +291,24 @@ input_relations: [

deepdive.inference.factors.inf1_imply_Q_Q {
input_query: """
SELECT R_0.dd_id AS "Q.R0.dd_id"
SELECT dd_id_0.dd_id AS "Q.R0.dd_id"
, R0.x AS "Q.R0.x"
, R_1.dd_id AS "Q.R1.dd_id"
, dd_id_1.dd_id AS "Q.R1.dd_id"
, R1.x AS "Q.R1.x"
FROM Q R0
, Q R1
, S R2
, R R3
, S R4
, dd_variables_Q AS R_0
, dd_variables_Q AS R_1
, dd_variables_Q AS dd_id_0
, dd_variables_Q AS dd_id_1
WHERE R2.a = R0.x
AND R3.a = R0.x
AND R3.b = R1.x
AND R4.a = R1.x
AND (R0.x + R1.x) < 1000
AND R0.x = R_0.x
AND R1.x = R_1.x
AND R0.x = dd_id_0.x
AND R1.x = dd_id_1.x
"""
function: """Imply(Q.R0.dd_label, Q.R1.dd_label)"""
weight: """-10.0"""
Expand All @@ -322,24 +322,24 @@ input_relations: [

deepdive.inference.factors.inf2_imply_Q_Q {
input_query: """
SELECT R_0.dd_id AS "Q.R0.dd_id"
SELECT dd_id_0.dd_id AS "Q.R0.dd_id"
, R0.x AS "Q.R0.x"
, R_1.dd_id AS "Q.R1.dd_id"
, dd_id_1.dd_id AS "Q.R1.dd_id"
, R1.x AS "Q.R1.x"
FROM Q R0
, Q R1
, S R2
, R R3
, S R4
, dd_variables_Q AS R_0
, dd_variables_Q AS R_1
, dd_variables_Q AS dd_id_0
, dd_variables_Q AS dd_id_1
WHERE R2.a = R0.x
AND R3.a = R0.x
AND R3.b = R1.x
AND R4.a = R1.x
AND (R0.x + R1.x) > 1000
AND R0.x = R_0.x
AND R1.x = R_1.x
AND R0.x = dd_id_0.x
AND R1.x = dd_id_1.x
"""
function: """Imply(Q.R0.dd_label, Q.R1.dd_label)"""
weight: """10.0"""
Expand Down

0 comments on commit df296de

Please sign in to comment.