Skip to content

Commit

Permalink
add, implement and use the Unifier trait, use it to improve dif:attri…
Browse files Browse the repository at this point in the history
…bute_goals//2 (#1382, #1433)
  • Loading branch information
mthom committed Feb 26, 2023
1 parent 7ffb40e commit 4961dba
Show file tree
Hide file tree
Showing 9 changed files with 967 additions and 1,120 deletions.
12 changes: 12 additions & 0 deletions Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

1 change: 1 addition & 0 deletions Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -63,6 +63,7 @@ hyper = { version = "0.14", features = ["full"] }
hyper-tls = "0.5.0"
tokio = { version = "1.24.2", features = ["full"] }
futures = "0.3"
derive_deref = "1.1.1"

[dev-dependencies]
assert_cmd = "1.0.3"
Expand Down
4 changes: 4 additions & 0 deletions build/instructions_template.rs
Original file line number Diff line number Diff line change
Expand Up @@ -568,6 +568,8 @@ enum SystemClauseType {
DeleteFromAttributedVarList,
#[strum_discriminants(strum(props(Arity = "1", Name = "$delete_all_attributes_from_var")))]
DeleteAllAttributesFromVar,
#[strum_discriminants(strum(props(Arity = "2", Name = "$terms_unify")))]
TermsUnify,
REPL(REPLCodePtr),
}

Expand Down Expand Up @@ -1630,6 +1632,7 @@ fn generate_instruction_preface() -> TokenStream {
&Instruction::CallPutToAttributedVarList(_) |
&Instruction::CallDeleteFromAttributedVarList(_) |
&Instruction::CallDeleteAllAttributesFromVar(_) |
&Instruction::CallTermsUnify(_) |
&Instruction::CallFetchGlobalVar(_) |
&Instruction::CallFirstStream(_) |
&Instruction::CallFlushOutput(_) |
Expand Down Expand Up @@ -1846,6 +1849,7 @@ fn generate_instruction_preface() -> TokenStream {
&Instruction::ExecutePutToAttributedVarList(_) |
&Instruction::ExecuteDeleteFromAttributedVarList(_) |
&Instruction::ExecuteDeleteAllAttributesFromVar(_) |
&Instruction::ExecuteTermsUnify(_) |
&Instruction::ExecuteFetchGlobalVar(_) |
&Instruction::ExecuteFirstStream(_) |
&Instruction::ExecuteFlushOutput(_) |
Expand Down
16 changes: 13 additions & 3 deletions src/lib/dif.pl
Original file line number Diff line number Diff line change
Expand Up @@ -23,13 +23,22 @@
put_dif_att(Var, X, Y),
dif_set_variables(Vars, X, Y).

filter_non_unifiable_goals([]) --> [].
filter_non_unifiable_goals([(X \== Y)|Goals]) -->
( { '$terms_unify'(X, Y) } ->
[(X \== Y)]
; []
),
filter_non_unifiable_goals(Goals).

append_goals([], _).
append_goals([Var|Vars], Goals) :-
( get_atts(Var, +dif(VarGoals)) ->
append(Goals, VarGoals, NewGoals0),
sort(NewGoals0, NewGoals)
; NewGoals = Goals
sort(NewGoals0, NewGoals1)
; NewGoals1 = Goals
),
phrase(filter_non_unifiable_goals(NewGoals1), NewGoals),
put_atts(Var, +dif(NewGoals)),
append_goals(Vars, Goals).

Expand Down Expand Up @@ -68,7 +77,8 @@

gather_dif_goals(_, []) --> [].
gather_dif_goals(V, [(X \== Y) | Goals]) -->
( { term_variables(X, [V0 | _]),
( { '$terms_unify'(X,Y),
term_variables(X, [V0 | _]),
V == V0 } ->
[dif:dif(X, Y)]
; []
Expand Down
8 changes: 8 additions & 0 deletions src/machine/dispatch.rs
Original file line number Diff line number Diff line change
Expand Up @@ -5215,6 +5215,14 @@ impl Machine {
self.delete_all_attributes_from_var();
self.machine_st.p = self.machine_st.cp;
}
&Instruction::CallTermsUnify(_) => {
self.terms_unify();
step_or_fail!(self, self.machine_st.p += 1);
}
&Instruction::ExecuteTermsUnify(_) => {
self.terms_unify();
step_or_fail!(self, self.machine_st.p = self.machine_st.cp);
}
}
}

Expand Down

1 comment on commit 4961dba

@UWN
Copy link

@UWN UWN commented on 4961dba Feb 26, 2023

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

This accumulates technical debt only. As long as there is no GC we cannot see the problem. But then, it will again spill out.

Projection is here for complex operations. Like in CLP(Q). But with dif/2 there are no complex operations.

Ideally, there would be a clean API for all of this. The atv interface is too low level.

Please sign in to comment.