Skip to content

Commit

Permalink
Merge branch 'Dispatch2' into feyncalcfixes
Browse files Browse the repository at this point in the history
  • Loading branch information
mmatera committed May 3, 2021
2 parents c6ab8a1 + 916a8a7 commit 3116187
Showing 1 changed file with 91 additions and 10 deletions.
101 changes: 91 additions & 10 deletions mathics/builtin/patterns.py
Original file line number Diff line number Diff line change
Expand Up @@ -34,11 +34,13 @@


from mathics.version import __version__ # noqa used in loading to check consistency.
from mathics.builtin.base import Builtin, BinaryOperator, PostfixOperator
from mathics.builtin.base import Builtin, BinaryOperator, PostfixOperator, AtomBuiltin
from mathics.builtin.base import PatternObject, PatternError
from mathics.builtin.lists import python_levelspec, InvalidLevelspecError

from mathics.core.expression import (
Atom,
String,
Symbol,
Expression,
Number,
Expand Down Expand Up @@ -103,13 +105,16 @@ class RuleDelayed(BinaryOperator):


def create_rules(rules_expr, expr, name, evaluation, extra_args=[]):
if rules_expr.has_form("Dispatch", None):
rules_expr = rules_expr.leaves[0]
if isinstance(rules_expr, Dispatch):
return rules_expr.rules, False
elif rules_expr.has_form("Dispatch", None):
return Dispatch(rules_expr._leaves, evaluation)

if rules_expr.has_form("List", None):
rules = rules_expr.leaves
else:
rules = [rules_expr]
any_lists = any(item.has_form("List", None) for item in rules)
any_lists = any(item.has_form(("List", "Dispatch"), None) for item in rules)
if any_lists:
all_lists = all(item.has_form("List", None) for item in rules)
if all_lists:
Expand Down Expand Up @@ -287,10 +292,8 @@ def apply(self, expr, rules, evaluation):
"ReplaceAll[expr_, rules_]"
try:
rules, ret = create_rules(rules, expr, "ReplaceAll", evaluation)

if ret:
return rules

result, applied = expr.apply_rules(rules, evaluation)
return result
except PatternError:
Expand Down Expand Up @@ -1468,18 +1471,57 @@ def yield_match(vars, rest):
)


class Dispatch(Builtin):
class Dispatch(Atom):
def __init__(self, rulelist, evaluation):
self.src = Expression(SymbolList, *rulelist)
self.rules = [Rule(rule._leaves[0], rule._leaves[1]) for rule in rulelist]
self._leaves = None
self._head = Symbol("Dispatch")

def get_sort_key(self):
return self.src.get_sort_key()

def get_atom_name(self):
return "System`Dispatch"

def __repr__(self):
return "dispatch"

def atom_to_boxes(self, f, evaluation):
leaves = self.src.format(evaluation, f.get_name())
return Expression(
"RowBox",
Expression(
SymbolList, String("Dispatch"), String("["), leaves, String("]")
),
)


class DispatchAtom(AtomBuiltin):
"""
<dl>
<dt>'Dispatch[$rulelist$]'
<dd>Introduced for compatibility. Currently, it just return $rulelist$.
In the future, it should return an optimized DispatchRules atom,
containing an optimized set of rules.
</dl>
>> rules = {{a_,b_}->a^b, {1,2}->3., F[x_]->x^2};
>> F[2] /. rules
= 4
>> dispatchrules = Dispatch[rules]
= Dispatch[{{a_, b_} -> a ^ b, {1, 2} -> 3., F[x_] -> x ^ 2}]
>> F[2] /. dispatchrules
= 4
"""

def apply_stub(self, rules, evaluation):
messages = {
"invrpl": "`1` is not a valid rule or list of rules.",
}

def __repr__(self):
return "dispatchatom"

def apply_create(self, rules, evaluation):
"""Dispatch[rules_List]"""
# TODO:
# The next step would be to enlarge this method, in order to
Expand All @@ -1489,4 +1531,43 @@ def apply_stub(self, rules, evaluation):
# compiled patters, and modify Replace and ReplaceAll to handle this
# kind of objects.
#
return rules
if isinstance(rules, Dispatch):
return rules
if rules.is_symbol():
rules = rules.evaluate(evaluation)

if rules.has_form("List", None):
rules = rules._leaves
else:
rules = [rules]

all_list = all(rule.has_form("List", None) for rule in rules)
if all_list:
leaves = [self.apply_create(rule, evaluation) for rule in rules]
return Expression(SymbolList, *leaves)
flatten_list = []
for rule in rules:
if rule.is_symbol():
rule = rule.evaluate(evaluation)
if rule.has_form("List", None):
flatten_list.extend(rule._leaves)
elif rule.has_form(("Rule", "RuleDelayed"), 2):
flatten_list.append(rule)
elif isinstance(rule, Dispatch):
flatten_list.extend(rule.src._leaves)
else:
# WMA does not raise this message: just leave it unevaluated,
# and raise an error when the dispatch rule is used.
evaluation.message("Dispatch", "invrpl", rule)
return
try:
return Dispatch(flatten_list, evaluation)
except:
return

def apply_normal(self, dispatch, evaluation):
"""Normal[dispatch_Dispatch]"""
if isinstance(dispatch, Dispatch):
return dispatch.src
else:
return dispatch._leaves[0]

0 comments on commit 3116187

Please sign in to comment.