Skip to content

Commit 4a2a728

Browse files
Merge pull request #554 from NVIDIA/feature/colang-NLD-intents
Feature/colang nld intents
2 parents 26304fd + 51ab0a1 commit 4a2a728

File tree

5 files changed

+167
-94
lines changed

5 files changed

+167
-94
lines changed

nemoguardrails/actions/llm/generation.py

+9-7
Original file line numberDiff line numberDiff line change
@@ -44,6 +44,7 @@
4444
)
4545
from nemoguardrails.colang import parse_colang_file
4646
from nemoguardrails.colang.v2_x.lang.colang_ast import Flow, Spec, SpecOp
47+
from nemoguardrails.colang.v2_x.runtime.eval import eval_expression
4748
from nemoguardrails.context import (
4849
generation_options_var,
4950
llm_call_info_var,
@@ -150,8 +151,9 @@ def _extract_user_message_example(self, flow: Flow):
150151
return
151152

152153
# Extract the message and remove the double quotes
153-
message = spec.arguments["final_transcript"][1:-1]
154-
self.user_messages[flow.name] = [message]
154+
message = eval_expression(spec.arguments["final_transcript"], {})
155+
if isinstance(message, str):
156+
self.user_messages[flow.name] = [message]
155157

156158
elif el.op == "await":
157159
spec = cast(SpecOp, el).spec
@@ -169,11 +171,11 @@ def _extract_user_message_example(self, flow: Flow):
169171
):
170172
continue
171173

172-
message = spec.arguments["$0"][1:-1]
173-
if flow.name not in self.user_messages:
174-
self.user_messages[flow.name] = []
175-
176-
self.user_messages[flow.name].append(message)
174+
message = eval_expression(spec.arguments["$0"], {})
175+
if isinstance(message, str):
176+
if flow.name not in self.user_messages:
177+
self.user_messages[flow.name] = []
178+
self.user_messages[flow.name].append(message)
177179

178180
def _extract_bot_message_example(self, flow: Flow):
179181
# Quick heuristic to identify the user utterance examples

nemoguardrails/actions/v2_x/generation.py

+62-71
Original file line numberDiff line numberDiff line change
@@ -17,7 +17,7 @@
1717
import logging
1818
import re
1919
from ast import literal_eval
20-
from typing import Any, List, Optional
20+
from typing import Any, List, Optional, Tuple
2121

2222
from langchain.llms import BaseLLM
2323

@@ -140,30 +140,9 @@ async def _init_flows_index(self) -> None:
140140
if self.instruction_flows_index is None:
141141
self.instruction_flows_index = self.flows_index
142142

143-
@action(name="GetLastUserMessageAction", is_system_action=True)
144-
async def get_last_user_message(
145-
self, events: List[dict], llm: Optional[BaseLLM] = None
146-
) -> str:
147-
event = get_last_user_utterance_event_v2_x(events)
148-
assert event and event["type"] == "UtteranceUserActionFinished"
149-
return event["final_transcript"]
150-
151-
@action(name="GenerateUserIntentAction", is_system_action=True, execute_async=True)
152-
async def generate_user_intent(
153-
self,
154-
state: State,
155-
events: List[dict],
156-
user_action: str,
157-
max_example_flows: int = 5,
158-
llm: Optional[BaseLLM] = None,
159-
) -> str:
160-
"""Generate the canonical form for what the user said i.e. user intent."""
161-
162-
# Use action specific llm if registered else fallback to main llm
163-
llm = llm or self.llm
164-
165-
log.info("Phase 1 :: Generating user intent")
166-
143+
async def _collect_user_intent_and_examples(
144+
self, state: State, user_action: str, max_example_flows: int
145+
) -> Tuple[List[str], str]:
167146
# We search for the most relevant similar user intents
168147
examples = ""
169148
potential_user_intents = []
@@ -182,25 +161,68 @@ async def generate_user_intent(
182161
heads = find_all_active_event_matchers(state)
183162
for head in heads:
184163
element = get_element_from_head(state, head)
185-
event = get_event_from_element(
186-
state, state.flow_states[head.flow_state_uid], element
187-
)
164+
flow_state = state.flow_states[head.flow_state_uid]
165+
event = get_event_from_element(state, flow_state, element)
188166
if (
189167
event.name == InternalEvents.FLOW_FINISHED
190168
and "flow_id" in event.arguments
191169
):
192170
flow_id = event.arguments["flow_id"]
171+
if not isinstance(flow_id, str):
172+
continue
173+
193174
flow_config = state.flow_configs.get(flow_id, None)
194-
if isinstance(flow_id, str) and (
195-
flow_config is None
175+
element_flow_state_instance = state.flow_id_states[flow_id]
176+
if flow_config is not None and (
177+
flow_config.has_meta_tag("user_intent")
196178
or (
197-
flow_config.has_meta_tag("user_intent")
198-
and flow_id not in potential_user_intents
179+
element_flow_state_instance
180+
and "_user_intent" in element_flow_state_instance[0].context
199181
)
200182
):
201-
examples += f"user intent: {flow_id}\n\n"
202-
potential_user_intents.append(flow_id)
183+
if flow_config.elements[1]["_type"] == "doc_string_stmt":
184+
examples += "user action: <" + (
185+
flow_config.elements[1]["elements"][0]["elements"][0][
186+
"elements"
187+
][0][3:-3]
188+
+ ">\n"
189+
)
190+
examples += f"user intent: {flow_id}\n\n"
191+
elif flow_id not in potential_user_intents:
192+
examples += f"user intent: {flow_id}\n\n"
193+
potential_user_intents.append(flow_id)
203194
examples = examples.strip("\n")
195+
return (potential_user_intents, examples)
196+
197+
@action(name="GetLastUserMessageAction", is_system_action=True)
198+
async def get_last_user_message(
199+
self, events: List[dict], llm: Optional[BaseLLM] = None
200+
) -> str:
201+
event = get_last_user_utterance_event_v2_x(events)
202+
assert event and event["type"] == "UtteranceUserActionFinished"
203+
return event["final_transcript"]
204+
205+
@action(name="GenerateUserIntentAction", is_system_action=True, execute_async=True)
206+
async def generate_user_intent(
207+
self,
208+
state: State,
209+
events: List[dict],
210+
user_action: str,
211+
max_example_flows: int = 5,
212+
llm: Optional[BaseLLM] = None,
213+
) -> str:
214+
"""Generate the canonical form for what the user said i.e. user intent."""
215+
216+
# Use action specific llm if registered else fallback to main llm
217+
llm = llm or self.llm
218+
219+
log.info("Phase 1 :: Generating user intent")
220+
(
221+
potential_user_intents,
222+
examples,
223+
) = await self._collect_user_intent_and_examples(
224+
state, user_action, max_example_flows
225+
)
204226

205227
prompt = self.llm_task_manager.render_task_prompt(
206228
task=Task.GENERATE_USER_INTENT_FROM_USER_ACTION,
@@ -257,43 +279,12 @@ async def generate_user_intent_and_bot_action(
257279

258280
log.info("Phase 1 :: Generating user intent and bot action")
259281

260-
# We search for the most relevant similar user intents
261-
examples = ""
262-
potential_user_intents = []
263-
264-
if self.user_message_index:
265-
results = await self.user_message_index.search(
266-
text=user_action, max_results=max_example_flows
267-
)
268-
269-
# We add these in reverse order so the most relevant is towards the end.
270-
for result in reversed(results):
271-
examples += f"user action: user said \"{result.text}\"\nuser intent: {result.meta['intent']}\n\n"
272-
potential_user_intents.append(result.meta["intent"])
273-
274-
# We add all currently active user intents (heads on match statements)
275-
heads = find_all_active_event_matchers(state)
276-
for head in heads:
277-
element = get_element_from_head(state, head)
278-
event = get_event_from_element(
279-
state, state.flow_states[head.flow_state_uid], element
280-
)
281-
if (
282-
event.name == InternalEvents.FLOW_FINISHED
283-
and "flow_id" in event.arguments
284-
):
285-
flow_id = event.arguments["flow_id"]
286-
flow_config = state.flow_configs.get(flow_id, None)
287-
if isinstance(flow_id, str) and (
288-
flow_config is None
289-
or (
290-
flow_config.has_meta_tag("user_intent")
291-
and flow_id not in potential_user_intents
292-
)
293-
):
294-
examples += f"user intent: {flow_id}\n\n"
295-
potential_user_intents.append(flow_id)
296-
examples = examples.strip("\n")
282+
(
283+
potential_user_intents,
284+
examples,
285+
) = await self._collect_user_intent_and_examples(
286+
state, user_action, max_example_flows
287+
)
297288

298289
prompt = self.llm_task_manager.render_task_prompt(
299290
task=Task.GENERATE_USER_INTENT_FROM_USER_ACTION,

nemoguardrails/colang/v2_x/library/llm.co

+10-9
Original file line numberDiff line numberDiff line change
@@ -101,21 +101,22 @@ flow continuation on unhandled user utterance
101101
send UserIntentLog(flow_id=$user_intent, parameter=None)
102102
# If the user intent was not handled
103103

104-
if $bot_intent is not None
105-
$exists = await CheckValidFlowExistsAction(flow_id=$bot_intent)
106-
$flow_info = await CreateFlowAction(name=$bot_intent, body=$bot_action)
104+
# Create and start new flow or an existing on with same name as bot intent
105+
$flow_exists = False
106+
if $bot_intent is None
107+
$bot_intent = "bot custom action"
107108
else
108-
$exists = False
109-
$flow_info = await CreateFlowAction(name="bot action", body=$bot_action)
109+
$flow_exists = await CheckValidFlowExistsAction(flow_id=$bot_intent)
110110

111-
if $exists == False
111+
if $flow_exists == False and $bot_action is not None
112+
$flow_info = await CreateFlowAction(name=$bot_intent, body=$bot_action)
112113
$flows = await AddFlowsAction(config=$flow_info.body)
113114
if len($flows) == 0
114115
print "Failed parsing created bot action flow!"
115116
return
116-
else
117-
await await_flow_by_name $flow_info.name
118-
await RemoveFlowsAction(flow_ids=[$flow_info.name])
117+
# Start
118+
await await_flow_by_name $flow_info.name
119+
await RemoveFlowsAction(flow_ids=[$flow_info.name])
119120

120121

121122
flow unhandled user intent -> $intent

nemoguardrails/llm/prompts/general.yml

+1-7
Original file line numberDiff line numberDiff line change
@@ -116,19 +116,13 @@ prompts:
116116
# Prompt for detecting the user message canonical form.
117117
- task: generate_user_intent_from_user_action
118118
content: |-
119-
"""
120-
{{ general_instructions }}
121-
"""
122-
123119
# This is how a conversation between a user and the bot can go:
124120
{{ sample_conversation }}
125121
126122
# These are the most likely user intents:
127123
{{ examples }}
128124
129-
# This is the current conversation between the user and the bot:
130-
{{ history | colang }}
131-
125+
# Continuation of interaction using only specified user intents from the section 'These are the most likely user intents:':
132126
user action: {{ user_action }}
133127
user intent:
134128
stop:
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,85 @@
1+
# SPDX-FileCopyrightText: Copyright (c) 2023 NVIDIA CORPORATION & AFFILIATES. All rights reserved.
2+
# SPDX-License-Identifier: Apache-2.0
3+
#
4+
# Licensed under the Apache License, Version 2.0 (the "License");
5+
# you may not use this file except in compliance with the License.
6+
# You may obtain a copy of the License at
7+
#
8+
# http://www.apache.org/licenses/LICENSE-2.0
9+
#
10+
# Unless required by applicable law or agreed to in writing, software
11+
# distributed under the License is distributed on an "AS IS" BASIS,
12+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13+
# See the License for the specific language governing permissions and
14+
# limitations under the License.
15+
16+
import os
17+
18+
from nemoguardrails import RailsConfig
19+
from tests.utils import TestChat
20+
21+
colang_content = '''
22+
import core
23+
import llm
24+
25+
flow main
26+
activate llm continuation
27+
activate greeting
28+
activate other reactions
29+
30+
flow greeting
31+
user expressed greeting
32+
bot say "Hello world!"
33+
34+
flow other reactions
35+
user expressed to be bored
36+
bot say "No problem!"
37+
38+
flow user expressed greeting
39+
""""User expressed greeting in any way or form."""
40+
user said "hi"
41+
42+
flow user expressed to be bored
43+
""""User expressed to be bored."""
44+
user said "This is boring"
45+
'''
46+
47+
yaml_content = """
48+
colang_version: "2.x"
49+
models:
50+
- type: main
51+
engine: openai
52+
model: gpt-3.5-turbo-instruct
53+
54+
"""
55+
56+
57+
def test_1():
58+
config = RailsConfig.from_content(colang_content, yaml_content)
59+
60+
chat = TestChat(
61+
config,
62+
llm_completions=["user expressed greeting"],
63+
)
64+
65+
chat >> "hi"
66+
chat << "Hello world!"
67+
68+
chat >> "hello"
69+
chat << "Hello world!"
70+
71+
72+
def test_2():
73+
config = RailsConfig.from_content(colang_content, yaml_content)
74+
75+
chat = TestChat(
76+
config,
77+
llm_completions=["user expressed to be bored"],
78+
)
79+
80+
chat >> "You are boring me!"
81+
chat << "No problem!"
82+
83+
84+
if __name__ == "__main__":
85+
test_2()

0 commit comments

Comments
 (0)