Skip to content

Commit c2b86de

Browse files
committed
Merge remote-tracking branch 'origin/main' into Oracle-SQL-Change-II-May2025
2 parents 5777da4 + c179a09 commit c2b86de

File tree

70 files changed

+4490
-515
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

70 files changed

+4490
-515
lines changed

.DS_Store

0 Bytes
Binary file not shown.

ai/gen-ai-agents/travel_agent/README.md

Lines changed: 21 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -9,6 +9,10 @@ You only need to create a file, named config_private.py, with the value for **CO
99
The compartment must be a compartment where you have setup the right policies to access OCI Generative AI.
1010

1111
In config.py AUTH_TYPE is set to API_KEY, therefore you need to have in $HOME/.oci the key pair to access OCI.
12+
Another option, if you're deploying in OCI, is to setup INSTANCE_PRINCIPAL.
13+
14+
## How-to start the demo
15+
Two shell scripts (.sh) are provided. We use **uvicorn** to start the API.
1216

1317
## List of libraries used
1418
* oci
@@ -35,17 +39,32 @@ of the following cities:
3539

3640
or, simply add other records to the JSON in mock_data.py.
3741

38-
If you want to diplsay the positio of the Hotel in a map, you need to provide in the file
42+
If you want to diaplay the position of the Hotel in a map, you need to provide in the file
3943
correct values for latitude and longitude.
4044

45+
(26/05/2025) The demo has been updated: now you should be able to select any place of departure and destination.
46+
Data are generated by a LLM.
47+
4148
## Supported languages
42-
As of now, the demo supports:
49+
As of now, the demo fully supports:
4350
* English
4451
* Italian
4552

4653
to add other languages, you need to add the translations in translations.py and change, accordingly, some
4754
code in streamlit_app.py.
4855

56+
## Features
57+
* **Routing** (you can ask **for information** or **book** a travel)
58+
* **Multi-turn** conversation to get all the required information for booking a travel
59+
* Easy **localization** (english, italian, ...)
60+
* **Streaming** support in API
61+
62+
## Streaming
63+
Streaming support is based on the same core agent implementation than nop-streaming.
64+
65+
To use it, you should:
66+
* start the API contained in agent_api (use the script)
67+
* use the [Streaming Streamlit client](./streamlit_app_streaming.py)
4968

5069

5170

Lines changed: 54 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,54 @@
1+
"""
2+
FastAPI server for the travel agent example using LangChain.
3+
This server provides a simple chat interface to interact with the travel agent.
4+
"""
5+
6+
import json
7+
from fastapi import FastAPI, Query
8+
from fastapi.responses import StreamingResponse
9+
from workflow import create_travel_planner_graph
10+
from travel_state import TravelState
11+
from utils import get_console_logger
12+
13+
MEDIA_TYPE = "application/json"
14+
15+
app = FastAPI()
16+
17+
# here we create the graph
18+
travel_agent_graph = create_travel_planner_graph()
19+
20+
logger = get_console_logger("agent_fastapi_logger", level="INFO")
21+
22+
23+
async def stream_graph_updates(user_input: str):
24+
"""
25+
Stream the updates from the travel agent graph.
26+
Args:
27+
user_input (str): User input for the travel agent.
28+
Yields:
29+
str: JSON string of the step output.
30+
"""
31+
# prepare the input
32+
state = TravelState(user_input=user_input)
33+
34+
# here we call the agent and return the state
35+
# update the state with the user input
36+
async for step_output in travel_agent_graph.astream(state):
37+
# using stream with LangGraph returns state updates
38+
# for each node in the graph
39+
# yield returns the state update
40+
yield json.dumps(step_output) + "\n"
41+
42+
43+
@app.get("/invoke")
44+
async def invoke(user_input: str = Query(...)):
45+
"""
46+
endpoint to interact with the travel agent.
47+
Args:
48+
user_input (str): User input for the travel agent.
49+
Returns:
50+
StreamingResponse: Stream of JSON responses from the travel agent.
51+
"""
52+
logger.info("Invoked Agent API...")
53+
54+
return StreamingResponse(stream_graph_updates(user_input), media_type=MEDIA_TYPE)
Lines changed: 61 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,61 @@
1+
"""
2+
Test client for the FastAPI agent.
3+
This client sends a request to the FastAPI agent and prints the response.
4+
"""
5+
6+
import asyncio
7+
import json
8+
import httpx
9+
from utils import get_console_logger
10+
from config import AGENT_API_URL
11+
12+
logger = get_console_logger("agent_fastapi_client_logger", level="INFO")
13+
14+
15+
async def stream_invoke(_user_input: str):
16+
"""
17+
Stream the chat with the agent.
18+
Args:
19+
user_input (str): User input for the travel agent.
20+
"""
21+
# Prepare the input
22+
params = {"user_input": _user_input}
23+
24+
async with httpx.AsyncClient(timeout=None) as client:
25+
print("--------------------")
26+
print("Streaming response:")
27+
print("--------------------")
28+
29+
async with client.stream("GET", AGENT_API_URL, params=params) as response:
30+
async for line in response.aiter_lines():
31+
if line.strip(): # skip empty lines
32+
try:
33+
data = json.loads(line)
34+
35+
for key, value in data.items():
36+
# key here is the name of the node
37+
logger.info("Step: %s completed...", key)
38+
39+
if key == "synthesize_plan":
40+
print("")
41+
print(value["final_plan"])
42+
if key == "generate_itinerary":
43+
print("")
44+
print(value["itinerary"])
45+
46+
except json.JSONDecodeError as e:
47+
print("Failed to parse JSON:", e)
48+
49+
50+
if __name__ == "__main__":
51+
52+
USER_INPUT = """I want to go from Rome to Florence
53+
from June 10 to June 15 with my partner.
54+
I want to go by train and I need a hotel in the city center."""
55+
56+
print("--------------------")
57+
print("User input:")
58+
print(USER_INPUT)
59+
print("--------------------")
60+
61+
asyncio.run(stream_invoke(USER_INPUT))
Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,3 @@
1+
# start the Agent API
2+
export PYTHONPATH="..:$PYTHONPATH"
3+
uvicorn agent_fastapi:app --port 8080 --reload
Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,2 @@
1+
export PYTHONPATH="..:$PYTHONPATH"
2+
python agent_fastapi_client.py

ai/gen-ai-agents/travel_agent/config.py

Lines changed: 23 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -5,8 +5,14 @@
55
#
66
# application configs
77
#
8+
# in secs
9+
SLEEP_TIME = 1
810
DEBUG = False
911

12+
# this is only for the UI
13+
SUPPORTED_LANGUAGES = ["EN", "IT"]
14+
SUPPORTED_LANGUAGES_LONG = {"EN": "English", "IT": "Italiano"}
15+
1016
# this is the list of the mandatory fields in user input
1117
# if any of these fields is missing, the agent will ask for clarification
1218
REQUIRED_FIELDS = [
@@ -24,6 +30,7 @@
2430
AUTH_TYPE = "API_KEY"
2531

2632
REGION = "eu-frankfurt-1"
33+
# REGION = "us-chicago-1"
2734
SERVICE_ENDPOINT = f"https://inference.generativeai.{REGION}.oci.oraclecloud.com"
2835

2936
# seems to work fine with both models
@@ -32,9 +39,21 @@
3239

3340
MAX_TOKENS = 2048
3441

35-
# Mock API configuration
36-
HOTEL_API_URL = "http://localhost:8000/search/hotels"
37-
TRANSPORT_API_URL = "http://localhost:8000/search/transport"
38-
42+
#
43+
# Map configuration
3944
# Hotel Map
4045
MAP_STYLE = "https://basemaps.cartocdn.com/gl/positron-gl-style/style.json"
46+
47+
#
48+
# Mock API (Hotel and transport)
49+
#
50+
HOST_API = "localhost"
51+
PORT_MOCK_API = 8000
52+
HOTEL_API_URL = f"http://{HOST_API}:{PORT_MOCK_API}/search/hotels"
53+
TRANSPORT_API_URL = f"http://{HOST_API}:{PORT_MOCK_API}/search/transport"
54+
55+
#
56+
# AGENT API
57+
#
58+
PORT_AGENT = 8080
59+
AGENT_API_URL = f"http://{HOST_API}:{PORT_AGENT}/invoke"
Lines changed: 66 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,66 @@
1+
"""
2+
Simulate hotel options using an LLM.
3+
This script uses an LLM to generate realistic hotel options
4+
"""
5+
6+
from model_factory import get_chat_model
7+
from utils import extract_json_from_text
8+
from config import SERVICE_ENDPOINT, MODEL_ID
9+
10+
11+
def simulate_hotel_with_llm(destination, start_date, num_days, stars):
12+
"""
13+
Simulate a hotel option using an LLM.
14+
Args:
15+
destination (str): Destination city.
16+
start_date (str): Start date of the stay in YYYY-MM-DD format.
17+
num_days (int): Number of nights to stay.
18+
stars (int): Star rating of the hotel (1-5).
19+
Returns:
20+
dict: A dictionary containing the hotel option details.
21+
- name (str): Name of the hotel.
22+
- price (float): Price per night in EUR.
23+
- stars (int): Star rating of the hotel.
24+
- location (str): Location description.
25+
- amenities (list): List of amenities available at the hotel.
26+
- latitude (float): Approximate latitude of the hotel.
27+
- longitude (float): Approximate longitude of the hotel.
28+
"""
29+
prompt = f"""
30+
Simulate a realistic hotel option in {destination} starting on {start_date} for {num_days} nights.
31+
The hotel should be rated {stars} stars.
32+
33+
Return a JSON object with the following fields:
34+
- name
35+
- price (EUR per night)
36+
- stars
37+
- location
38+
- amenities (list of 2-3 amenities)
39+
- latitude (approximate)
40+
- longitude (approximate)
41+
JSON must be enclosed in triple backticks
42+
43+
Return a **JSON object** with the following format:
44+
```json
45+
{{
46+
"name": "Hotel Example",
47+
"price": 150.0,
48+
"stars": 4,
49+
"location": "City Center",
50+
"amenities": ["WiFi", "Breakfast", "Gym"],
51+
"latitude": 40.7128,
52+
"longitude": -74.0060
53+
}}
54+
```
55+
"""
56+
llm = get_chat_model(
57+
model_id=MODEL_ID,
58+
service_endpoint=SERVICE_ENDPOINT,
59+
temperature=0.2,
60+
max_tokens=1000,
61+
)
62+
63+
response = llm.invoke(prompt).content
64+
data = extract_json_from_text(response)
65+
66+
return data
Lines changed: 59 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,59 @@
1+
"""
2+
Simulate transport options using an LLM.
3+
"""
4+
5+
from model_factory import get_chat_model
6+
from utils import extract_json_from_text
7+
from config import SERVICE_ENDPOINT, MODEL_ID
8+
9+
10+
def simulate_transport_with_llm(departure, destination, date, mode):
11+
"""
12+
Simulate a transport option using an LLM.
13+
Args:
14+
departure (str): Place of departure.
15+
destination (str): Destination place.
16+
date (str): Date of travel in YYYY-MM-DD format.
17+
mode (str): Mode of transport (e.g., "airplane", "train").
18+
Returns:
19+
dict: A dictionary containing the transport option details.
20+
- provider (str): Name of the transport provider.
21+
- price (float): Price in EUR.
22+
- duration_hours (float): Duration of the trip in hours.
23+
"""
24+
prompt = f"""
25+
Simulate a realistic {mode} travel option between {departure} and {destination} for the date {date}.
26+
Return JSON with the fields: provider, price (EUR), duration_hours.
27+
JSON must be enclosed in triple backticks.
28+
29+
Return a **JSON object** with the following format:
30+
```json
31+
Example:
32+
{{
33+
"provider": "Lufthansa",
34+
"price": 155.0,
35+
"departure": "2025-10-10T08:00",
36+
"duration_hours": 2.5
37+
}}
38+
```
39+
"""
40+
41+
llm = get_chat_model(
42+
model_id=MODEL_ID,
43+
service_endpoint=SERVICE_ENDPOINT,
44+
temperature=0.2,
45+
max_tokens=1000,
46+
)
47+
48+
response = llm.invoke(prompt).content
49+
50+
data = extract_json_from_text(response)
51+
52+
# transform in the expected output format
53+
return {
54+
"provider": data["provider"],
55+
"price": data["price"],
56+
"departure": f"{date}T08:00",
57+
"arrival": f"{date}T{int(8 + data['duration_hours']):02}:00",
58+
"type": mode,
59+
}

0 commit comments

Comments
 (0)