Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
import nltk
from nltk.corpus import wordnet
from nlu.messages.parsed_input_message import ParsedInputMessage
import utils
class YesNoMessage(ParsedInputMessage):
"""
>>> YesNoMessage.confidence("Hmmm... No thanks.")
1.0
>>> ynm = YesNoMessage("Hmmm... No thanks.")
>>> ynm.frame['decision']
{'decision': False, 'word': 'No', 'id': 2}
>>> ynm.getDecision()
False
>>> YesNoMessage.confidence("Ok")
1.0
>>> ynm = YesNoMessage("Ok")
>>> ynm.frame['decision']
{'decision': True, 'word': 'Ok', 'id': 0}
>>> ynm.getDecision()
True
>>> YesNoMessage.confidence("Sounds good")
from nlu.messages.parsed_input_message import ParsedInputMessage
class OutOfDomainMessage(ParsedInputMessage):
def _parse(self, raw_input_message, generators):
pass
import nltk
from nltk.corpus import wordnet
from nlu.messages.parsed_input_message import ParsedInputMessage
from nlu.messages.msgutils import extract_close_keywords
from nlu.messages.msgutils import get_keyword_confidence
import utils
class SystemMessage(ParsedInputMessage):
frame_keys = ['action']
exit_keywords = ['adieu.n.01', 'bye.n.01', 'farewell.n.02', 'exit.v.01']
restart_keywords = ['restart.v.01', 'reload.v.02']
keywords = exit_keywords + restart_keywords
def _parse(self, raw_input_string):
"""
Fills out message meta and frame attributes
"""
tokenizer = nltk.WordPunctTokenizer()
tokenized_string = tokenizer.tokenize(raw_input_string)
tagger = utils.combined_taggers
tagged_string = tagger.tag(tokenized_string)
wordActionMap = {'exit':SystemMessage.exit_keywords, 'restart':SystemMessage.restart_keywords}
from nlu.messages.parsed_input_message import ParsedInputMessage
class PrefferenceMessage(ParsedInputMessage):
def _parse(self, raw_input_message):
pass
from nlu.messages.parsed_input_message import ParsedInputMessage
from nlu.stanford_utils import extract_subject_nodes
from nlu.stanford_utils import get_node_string
from nlu.stanford_utils import get_parse_tree
from nlu.messages.msgutils import get_keyword_confidence
from nlu.messages.msgutils import extract_close_keywords
import nltk
from nltk.corpus import wordnet
class PreferenceMessage(ParsedInputMessage):
"""
>>> from nlu.generators import *
>>> cache_size = 16
>>> generators = Generators()
>>> generators.add(Generate_Tokenized_String, cache_size)
>>> generators.add(Generate_Stanford_Parse_Tree, cache_size)
>>> pm = PreferenceMessage('I like Japanese food.', generators)
>>> print pm.frame
{'word': 'like', 'temporal': 'permanent', 'prefer': True, 'subject': [u'Japanese', u'food']}
>>> pm = PreferenceMessage('I like carrots.', generators)
>>> print pm.frame
{'word': 'like', 'temporal': 'permanent', 'prefer': True, 'subject': [u'carrots']}
>>> pm = PreferenceMessage('I want carrots.', generators)
>>> print pm.frame
{'word': 'want', 'temporal': 'temporary', 'prefer': True, 'subject': [u'carrots']}
3 chinese
5 mexican
"""
stemmed_string = utils.stem_words(tokenized_string)
cuisines = set.difference(wordlists.cuisines, wordlists.meal_types)
cuisines = cuisines.union(wordlists.list_of_adjectivals)
stemmed_cuisines = utils.stem_words(cuisines)
results = extract_words_from_list(stemmed_cuisines, stemmed_string, True)
if enum:
return [(i, tokenized_string[i]) for i, w in results]
else:
return [tokenized_string[i] for i, w in results]
class SearchMessage(ParsedInputMessage):
"""
>>> # Test Confidence
>>> SearchMessage.confidence('I like apples and carrots.')
1.0
>>> SearchMessage.confidence('I am looking for a breakfast dish.')
1.0
>>> SearchMessage.confidence('What can I make with bricks?')
1.0
>>> # Test _parse
>>> sm = SearchMessage('I like apples or carrots.')
>>> sm.frame['ingredient']
[{'descriptor': [], 'relationship': 'or', 'id': 2, 'preference': 0, 'name': 'apples'}, {'descriptor': [], 'relationship': 'or', 'id': 4, 'preference': 0, 'name': 'carrots'}]
>>> sm.frame['dish']
[]
>>> for ingredient in sm.frame['ingredient']: print ingredient['name']