Skip to content

Commit

Permalink
Add support for "Duckling support for multiple timezone time/date det…
Browse files Browse the repository at this point in the history
  • Loading branch information
horeapopa committed Oct 30, 2018
1 parent 12d19cf commit 278fb23
Show file tree
Hide file tree
Showing 6 changed files with 14 additions and 11 deletions.
3 changes: 2 additions & 1 deletion rasa_nlu/data_router.py
Expand Up @@ -267,7 +267,8 @@ def parse(self, data):
"Error: {}".format(project, e))

time = data.get('time')
response = self.project_store[project].parse(data['text'], time,
tz = data.get('tz')
response = self.project_store[project].parse(data['text'], time, tz,
model)

if self.responses:
Expand Down
1 change: 1 addition & 0 deletions rasa_nlu/emulators/__init__.py
Expand Up @@ -32,6 +32,7 @@ def normalise_request_json(self, data):
_data["model"] = data["model"][0] if type(data["model"]) == list else data["model"]

_data['time'] = data["time"] if "time" in data else None
_data['tz'] = data["tz"] if "tz" in data else None
return _data

def normalise_response_json(self, data):
Expand Down
10 changes: 5 additions & 5 deletions rasa_nlu/extractors/duckling_http_extractor.py
Expand Up @@ -77,19 +77,19 @@ def _url(self):

return self.component_config.get("url")

def _payload(self, text, reference_time):
def _payload(self, text, reference_time, tz):
return {
"text": text,
"locale": self._locale(),
"tz": self.component_config.get("timezone"),
"tz": tz if tz else self.component_config.get("timezone"),
"reftime": reference_time
}

def _duckling_parse(self, text, reference_time):
def _duckling_parse(self, text, reference_time, tz):
"""Sends the request to the duckling server and parses the result."""

try:
payload = self._payload(text, reference_time)
payload = self._payload(text, reference_time, tz)
headers = {"Content-Type": "application/x-www-form-urlencoded; "
"charset=UTF-8"}
response = requests.post(self._url() + "/parse",
Expand Down Expand Up @@ -130,7 +130,7 @@ def process(self, message, **kwargs):

if self._url() is not None:
reference_time = self._reference_time_from_message(message)
matches = self._duckling_parse(message.text, reference_time)
matches = self._duckling_parse(message.text, reference_time, message.tz)
dimensions = self.component_config["dimensions"]
relevant_matches = filter_irrelevant_matches(matches, dimensions)
extracted = convert_duckling_format_to_rasa(relevant_matches)
Expand Down
4 changes: 2 additions & 2 deletions rasa_nlu/model.py
Expand Up @@ -336,7 +336,7 @@ def __init__(self, pipeline, context, model_metadata=None):
self.context = context if context is not None else {}
self.model_metadata = model_metadata

def parse(self, text, time=None, only_output_properties=True):
def parse(self, text, time=None, tz=None, only_output_properties=True):
# type: (Text) -> Dict[Text, Any]
"""Parse the input text, classify it and return pipeline result.
Expand All @@ -351,7 +351,7 @@ def parse(self, text, time=None, only_output_properties=True):
output["text"] = ""
return output

message = Message(text, self.default_output_attributes(), time=time)
message = Message(text, self.default_output_attributes(), time=time, tz=tz)

for component in self.pipeline:
component.process(message, **self.context)
Expand Down
4 changes: 2 additions & 2 deletions rasa_nlu/project.py
Expand Up @@ -250,7 +250,7 @@ def _dynamic_load_model(self, requested_model_name=None):
logger.warn("Invalid model requested. Using default")
return self._latest_project_model()

def parse(self, text, time=None, requested_model_name=None):
def parse(self, text, time=None, tz=None, requested_model_name=None):
self._begin_read()

model_name = self._dynamic_load_model(requested_model_name)
Expand All @@ -263,7 +263,7 @@ def parse(self, text, time=None, requested_model_name=None):
finally:
self._loader_lock.release()

response = self._models[model_name].parse(text, time)
response = self._models[model_name].parse(text, time, tz)
response['project'] = self._project
response['model'] = model_name

Expand Down
3 changes: 2 additions & 1 deletion rasa_nlu/training_data/message.py
Expand Up @@ -9,9 +9,10 @@


class Message(object):
def __init__(self, text, data=None, output_properties=None, time=None):
def __init__(self, text, data=None, output_properties=None, time=None, tz=None):
self.text = text
self.time = time
self.tz = tz
self.data = data if data else {}

if output_properties:
Expand Down

7 comments on commit 278fb23

@mike23333
Copy link

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Is this available in the code? I don't see it.

@horeapopa
Copy link
Owner Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Is this available in the code? I don't see it.

Hi Mike, I am not sure what the question is.

The timezone is passed the duckling when duckling runs in a different service/server (eg. docker container) along the rasa service/server.

@mike23333
Copy link

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Hi sorry I wasn't clear. I mean I don't see the changes you made in the latest release. Is it there in the latest release?

@horeapopa
Copy link
Owner Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I just merged the master into my branch, hadn't done any test for it; I would be useful to get the Rasa guys to patch this into their master ;)

@mike23333
Copy link

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Yes I agree. It's exactly what I'm looking for. It seems to solve a big problem we are having. Nice work.

@mike23333
Copy link

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Does it work good with rasa nlu?

@horeapopa
Copy link
Owner Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

yes, we are using it in production

Please sign in to comment.