Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 3 additions & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -68,3 +68,6 @@ firebase.json
scratch.py

.env

# Ignoring congfiguration files generated by chainlit
.chainlit/*
3 changes: 0 additions & 3 deletions chat-ui/README.md

This file was deleted.

1 change: 0 additions & 1 deletion chat-ui/frontend/404.html

This file was deleted.

This file was deleted.

This file was deleted.

This file was deleted.

52 changes: 0 additions & 52 deletions chat-ui/frontend/_next/static/chunks/105-4bc220fcbf235939.js

This file was deleted.

This file was deleted.

33 changes: 0 additions & 33 deletions chat-ui/frontend/_next/static/chunks/framework-73b8966a3c579ab0.js

This file was deleted.

This file was deleted.

This file was deleted.

This file was deleted.

This file was deleted.

This file was deleted.

This file was deleted.

3 changes: 0 additions & 3 deletions chat-ui/frontend/_next/static/css/c15848365b5406ac.css

This file was deleted.

Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file removed chat-ui/frontend/favicon.ico
Binary file not shown.
1 change: 0 additions & 1 deletion chat-ui/frontend/index.html

This file was deleted.

Binary file removed chat-ui/frontend/static/bot-avatar.png
Binary file not shown.
Binary file removed chat-ui/frontend/static/user-avatar.png
Binary file not shown.
36 changes: 19 additions & 17 deletions nemoguardrails/server/api.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,13 +26,13 @@
from typing import Any, AsyncIterator, Callable, List, Optional, Union

import httpx
from chainlit.utils import mount_chainlit
from fastapi import FastAPI, HTTPException, Request
from fastapi.middleware.cors import CORSMiddleware
from openai.types.chat.chat_completion import Choice
from openai.types.chat.chat_completion_message import ChatCompletionMessage
from pydantic import BaseModel, ValidationError
from starlette.responses import StreamingResponse
from starlette.staticfiles import StaticFiles
from starlette.responses import RedirectResponse, StreamingResponse

from nemoguardrails import LLMRails, RailsConfig, utils
from nemoguardrails.rails.llm.config import Model
Expand Down Expand Up @@ -98,10 +98,13 @@ async def lifespan(app: GuardrailsApp):
with open(challenges_files) as f:
register_challenges(json.load(f))

# If there is a `config.yml` in the root `app.rails_config_path`, then
# that means we are in single config mode.
if os.path.exists(os.path.join(app.rails_config_path, "config.yml")) or os.path.exists(
os.path.join(app.rails_config_path, "config.yaml")
# If there is a `config.yml` in the root `app.rails_config_path` (or in
# a `config/` subdirectory), set the app to single config mode.
if (
os.path.exists(os.path.join(app.rails_config_path, "config.yml"))
or os.path.exists(os.path.join(app.rails_config_path, "config.yaml"))
or os.path.exists(os.path.join(app.rails_config_path, "config", "config.yml"))
or os.path.exists(os.path.join(app.rails_config_path, "config", "config.yaml"))
):
app.single_config_mode = True
app.single_config_id = os.path.basename(app.rails_config_path)
Expand All @@ -122,19 +125,18 @@ async def lifespan(app: GuardrailsApp):
if config_module is not None and hasattr(config_module, "init"):
config_module.init(app)

# Finally, we register the static frontend UI serving

# Mount Chainlit chat UI or a basic status endpoint
if not app.disable_chat_ui:
FRONTEND_DIR = utils.get_chat_ui_data_path("frontend")

app.mount(
"/",
StaticFiles(
directory=FRONTEND_DIR,
html=True,
),
name="chat",
chainlit_app_path = os.path.join(
os.path.dirname(__file__),
"app.py",
)
mount_chainlit(app=app, target=chainlit_app_path, path="/chat")

@app.get("/")
async def root_redirect():
return RedirectResponse(url="/chat")

else:

@app.get("/")
Expand Down
164 changes: 164 additions & 0 deletions nemoguardrails/server/app.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,164 @@
# SPDX-FileCopyrightText: Copyright (c) 2023-2026 NVIDIA CORPORATION & AFFILIATES. All rights reserved.
# SPDX-License-Identifier: Apache-2.0
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

"""Chat UI for Guardrails Server."""

import logging
import os
from typing import List, Optional

import chainlit as cl
from chainlit.types import Feedback

from nemoguardrails.server.api import _get_rails, app, challenges

log = logging.getLogger(__name__)


@cl.set_starters
async def set_starters(user: Optional[cl.User] = None) -> List[cl.Starter]:
"""Show challenges as starter prompts when available."""
if not challenges:
return []

return [
cl.Starter(
label=challenge.get("name", challenge.get("content", "")[:40]),
message=challenge.get("content", ""),
icon=challenge.get("icon", None),
)
for challenge in challenges
]


def _has_config_file(path: str) -> bool:
"""Check if a directory (or its 'config' subdirectory) contains a config.yml/yaml."""
for candidate in [path, os.path.join(path, "config")]:
if os.path.exists(os.path.join(candidate, "config.yml")) or os.path.exists(
os.path.join(candidate, "config.yaml")
):
return True
return False


def _discover_configs() -> List[str]:
"""Return the list of available guardrails configuration IDs."""
if app.single_config_mode and app.single_config_id:
return [app.single_config_id]

if not hasattr(app, "rails_config_path") or not os.path.isdir(app.rails_config_path):
return []

return sorted(
f
for f in os.listdir(app.rails_config_path)
if os.path.isdir(os.path.join(app.rails_config_path, f))
and not f.startswith(".")
and not f.startswith("_")
and _has_config_file(os.path.join(app.rails_config_path, f))
)


@cl.on_chat_start
async def on_chat_start():
"""Initialize a new chat session with a config selector dropdown."""
cl.user_session.set("messages", [])

configs = _discover_configs()

if not configs:
cl.user_session.set("config_id", None)
await cl.Message(
content="No guardrails configurations available. Please configure the server with a rails config path."
).send()
return

default = app.default_config_id or app.single_config_id
initial = default if default in configs else configs[0]

settings = await cl.ChatSettings(
[
cl.input_widget.Select(
id="config_id",
label="Guardrails Configuration",
values=configs,
initial_value=initial,
)
]
).send()

selected = settings["config_id"]
cl.user_session.set("config_id", selected)


@cl.on_settings_update
async def on_settings_update(settings):
"""Handle configuration changes from the settings panel."""
cl.user_session.set("config_id", settings["config_id"])
cl.user_session.set("messages", [])
await cl.Message(content=f"Switched to configuration: **{settings['config_id']}**").send()


@cl.on_message
async def on_message(message: cl.Message):
"""Process an incoming user message through guardrails."""
config_id = cl.user_session.get("config_id")

if not config_id:
await cl.Message(content="No guardrails configuration selected. Please select one from settings.").send()
return

messages = cl.user_session.get("messages") or []
messages.append({"role": "user", "content": message.content})

try:
llm_rails = await _get_rails([config_id])
except ValueError as ex:
log.exception("Failed to load rails config: %s", ex)
await cl.Message(content=f"Error loading guardrails configuration '{config_id}': {ex}").send()
return

response_msg = cl.Message(content="")
await response_msg.send()

try:
full_response = ""
async for chunk in llm_rails.stream_async(messages=messages):
if isinstance(chunk, str) and chunk:
full_response += chunk
await response_msg.stream_token(chunk)

if not full_response:
res = await llm_rails.generate_async(messages=messages)
if isinstance(res, str):
bot_content = res
elif isinstance(res, dict):
bot_content = res.get("content", str(res))
elif hasattr(res, "response"):
bot_content = str(res.response) # type: ignore[union-attr]
else:
bot_content = str(res)

response_msg.content = bot_content
await response_msg.update()
full_response = bot_content

messages.append({"role": "assistant", "content": full_response})
cl.user_session.set("messages", messages)

except Exception as ex:
log.exception("Error generating response: %s", ex)
response_msg.content = f"An error occurred while processing your message: {ex}"
await response_msg.update()
5 changes: 0 additions & 5 deletions nemoguardrails/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -272,11 +272,6 @@ def get_examples_data_path(file_path: str) -> str:
return get_data_path("nemoguardrails", f"examples/{file_path}")


def get_chat_ui_data_path(file_path: str) -> str:
"""Helper to get the path to the chat-ui data directory."""
return get_data_path("nemoguardrails", f"chat-ui/{file_path}")


def camelcase_to_snakecase(name: str) -> str:
"""Converts a CamelCase string to snake_case.

Expand Down
Loading
Loading