-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathschema.py
89 lines (70 loc) · 27.8 KB
/
schema.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
import logging
from typing import List, Optional
from fastapi import HTTPException
from pydantic import BaseModel, Field
AlttexterRequest_text_example = """\n \"cells\": [\n {\n \"cell_type\": \"raw\",\n \"id\": \"ea5c61b2-8b52-4270-bdb0-c4df88608f15\",\n \"metadata\": {},\n \"source\": [\n \"---\\n\",\n \"sidebar_position: 1\\n\",\n \"title: Interacting with APIs\\n\",\n \"---\"\n ]\n },\n {\n \"cell_type\": \"markdown\",\n \"id\": \"a15e6a18\",\n \"metadata\": {},\n \"source\": [\n \"[](https://colab.research.google.com/github/langchain-ai/langchain/blob/master/docs/docs/use_cases/apis.ipynb)\\n\",\n \"\\n\",\n \"## Use case \\n\",\n \"\\n\",\n \"Suppose you want an LLM to interact with external APIs.\\n\",\n \"\\n\",\n \"This can be very useful for retrieving context for the LLM to utilize.\\n\",\n \"\\n\",\n \"And, more generally, it allows us to interact with APIs using natural language! \\n\",\n \" \\n\",\n \"\\n\",\n \"## Overview\\n\",\n \"\\n\",\n \"There are two primary ways to interface LLMs with external APIs:\\n\",\n \" \\n\",\n \"* `Functions`: For example, [OpenAI functions](https://platform.openai.com/docs/guides/gpt/function-calling) is one popular means of doing this.\\n\",\n \"* `LLM-generated interface`: Use an LLM with access to API documentation to create an interface.\\n\",\n \"\\n\",\n \"\"\n ]\n },\n {\n \"cell_type\": \"markdown\",\n \"id\": \"abbd82f0\",\n \"metadata\": {},\n \"source\": [\n \"## Quickstart \\n\",\n \"\\n\",\n \"Many APIs are already compatible with OpenAI function calling.\\n\",\n \"\\n\",\n \"For example, [Klarna](https://www.klarna.com/international/press/klarna-brings-smoooth-shopping-to-chatgpt/) has a YAML file that describes its API and allows OpenAI to interact with it:\\n\",\n \"\\n\",\n \"```\\n\",\n \"https://www.klarna.com/us/shopping/public/openai/v0/api-docs/\\n\",\n \"```\\n\",\n \"\\n\",\n \"Other options include:\\n\",\n \"\\n\",\n \"* [Speak](https://api.speak.com/openapi.yaml) for translation\\n\",\n \"* [XKCD](https://gist.githubusercontent.com/roaldnefs/053e505b2b7a807290908fe9aa3e1f00/raw/0a212622ebfef501163f91e23803552411ed00e4/openapi.yaml) for comics\\n\",\n \"\\n\",\n \"We can supply the specification to `get_openapi_chain` directly in order to query the API with OpenAI functions:\"\n ]\n },\n {\n \"cell_type\": \"code\",\n \"execution_count\": null,\n \"id\": \"5a218fcc\",\n \"metadata\": {},\n \"outputs\": [],\n \"source\": [\n \"pip install langchain openai \\n\",\n \"\\n\",\n \"# Set env var OPENAI_API_KEY or load from a .env file:\\n\",\n \"# import dotenv\\n\",\n \"# dotenv.load_dotenv()\"\n ]\n },\n {\n \"cell_type\": \"code\",\n \"execution_count\": 2,\n \"id\": \"30b780e3\",\n \"metadata\": {},\n \"outputs\": [\n {\n \"name\": \"stderr\",\n \"output_type\": \"stream\",\n \"text\": [\n \"Attempting to load an OpenAPI 3.0.1 spec. This may result in degraded performance. Convert your OpenAPI spec to 3.1.* spec for better support.\\n\"\n ]\n },\n {\n \"data\": {\n \"text/plain\": [\n \"{'query': \\\"What are some options for a men's large blue button down shirt\\\",\\n\",\n \" 'response': {'products': [{'name': 'Cubavera Four Pocket Guayabera Shirt',\\n\",\n \" 'url': 'https://www.klarna.com/us/shopping/pl/cl10001/3202055522/Clothing/Cubavera-Four-Pocket-Guayabera-Shirt/?utm_source=openai&ref-site=openai_plugin',\\n\",\n \" 'price': '$13.50',\\n\",\n \" 'attributes': ['Material:Polyester,Cotton',\\n\",\n \" 'Target Group:Man',\\n\",\n \" 'Color:Red,White,Blue,Black',\\n\",\n \" 'Properties:Pockets',\\n\",\n \" 'Pattern:Solid Color',\\n\",\n \" 'Size (Small-Large):S,XL,L,M,XXL']},\\n\",\n \" {'name': 'Polo Ralph Lauren Plaid Short Sleeve Button-down Oxford Shirt',\\n\",\n \" 'url': 'https://www.klarna.com/us/shopping/pl/cl10001/3207163438/Clothing/Polo-Ralph-Lauren-Plaid-Short-Sleeve-Button-down-Oxford-Shirt/?utm_source=openai&ref-site=openai_plugin',\\n\",\n \" 'price': '$52.20',\\n\",\n \" 'attributes': ['Material:Cotton',\\n\",\n \" 'Target Group:Man',\\n\",\n \" 'Color:Red,Blue,Multicolor',\\n\",\n \" 'Size (Small-Large):S,XL,L,M,XXL']},\\n\",\n \" {'name': 'Brixton Bowery Flannel Shirt',\\n\",\n \" 'url': 'https://www.klarna.com/us/shopping/pl/cl10001/3202331096/Clothing/Brixton-Bowery-Flannel-Shirt/?utm_source=openai&ref-site=openai_plugin',\\n\",\n \" 'price': '$27.48',\\n\",\n \" 'attributes': ['Material:Cotton',\\n\",\n \" 'Target Group:Man',\\n\",\n \" 'Color:Gray,Blue,Black,Orange',\\n\",\n \" 'Properties:Pockets',\\n\",\n \" 'Pattern:Checkered',\\n\",\n \" 'Size (Small-Large):XL,3XL,4XL,5XL,L,M,XXL']},\\n\",\n \" {'name': 'Vineyard Vines Gingham On-The-Go brrr Classic Fit Shirt Crystal',\\n\",\n \" 'url': 'https://www.klarna.com/us/shopping/pl/cl10001/3201938510/Clothing/Vineyard-Vines-Gingham-On-The-Go-brrr-Classic-Fit-Shirt-Crystal/?utm_source=openai&ref-site=openai_plugin',\\n\",\n \" 'price': '$80.64',\\n\",\n \" 'attributes': ['Material:Cotton',\\n\",\n \" 'Target Group:Man',\\n\",\n \" 'Color:Blue',\\n\",\n \" 'Size (Small-Large):XL,XS,L,M']},\\n\",\n \" {'name': \\\"Carhartt Men's Loose Fit Midweight Short Sleeve Plaid Shirt\\\",\\n\",\n \" 'url': 'https://www.klarna.com/us/shopping/pl/cl10001/3201826024/Clothing/Carhartt-Men-s-Loose-Fit-Midweight-Short-Sleeve-Plaid-Shirt/?utm_source=openai&ref-site=openai_plugin',\\n\",\n \" 'price': '$17.99',\\n\",\n \" 'attributes': ['Material:Cotton',\\n\",\n \" 'Target Group:Man',\\n\",\n \" 'Color:Red,Brown,Blue,Green',\\n\",\n \" 'Properties:Pockets',\\n\",\n \" 'Pattern:Checkered',\\n\",\n \" 'Size (Small-Large):S,XL,L,M']}]}}\"\n ]\n },\n \"execution_count\": 2,\n \"metadata\": {},\n \"output_type\": \"execute_result\"\n }\n ],\n \"source\": [\n \"from langchain.chains.openai_functions.openapi import get_openapi_chain\\n\",\n \"\\n\",\n \"chain = get_openapi_chain(\\n\",\n \" \\\"https://www.klarna.com/us/shopping/public/openai/v0/api-docs/\\\"\\n\",\n \")\\n\",\n \"chain(\\\"What are some options for a men's large blue button down shirt\\\")\"\n ]\n },\n {\n \"cell_type\": \"markdown\",\n \"id\": \"9162c91c\",\n \"metadata\": {},\n \"source\": [\n \"## Functions \\n\",\n \"\\n\",\n \"We can unpack what is happening when we use the functions to call external APIs.\\n\",\n \"\\n\",\n \"Let's look at the [LangSmith trace](https://smith.langchain.com/public/76a58b85-193f-4eb7-ba40-747f0d5dd56e/r):\\n\",\n \"\\n\",\n \"* See [here](https://github.com/langchain-ai/langchain/blob/7fc07ba5df99b9fa8bef837b0fafa220bc5c932c/libs/langchain/langchain/chains/openai_functions/openapi.py#L279C9-L279C19) that we call the OpenAI LLM with the provided API spec:\\n\",\n \"\\n\",\n \"```\\n\",\n \"https://www.klarna.com/us/shopping/public/openai/v0/api-docs/\\n\",\n \"```\\n\",\n \"\\n\",\n \"* The prompt then tells the LLM to use the API spec with input question:\\n\",\n \"\\n\",\n \"```\\n\",\n \"Use the provided APIs to respond to this user query:\\n\",\n \"What are some options for a men's large blue button down shirt\\n\",\n \"```\\n\",\n \"\\n\",\n \"* The LLM returns the parameters for the function call `productsUsingGET`, which is [specified in the provided API spec](https://www.klarna.com/us/shopping/public/openai/v0/api-docs/):\\n\",\n \"```\\n\",\n \"function_call:\\n\",\n \" name: productsUsingGET\\n\",\n \" arguments: |-\\n\",\n \" {\\n\",\n \" \\\"params\\\": {\\n\",\n \" \\\"countryCode\\\": \\\"US\\\",\\n\",\n \" \\\"q\\\": \\\"men's large blue button down shirt\\\",\\n\",\n \" \\\"size\\\": 5,\\n\",\n \" \\\"min_price\\\": 0,\\n\",\n \" \\\"max_price\\\": 100\\n\",\n \" }\\n\",\n \" }\\n\",\n \" ```\\n\",\n \" \\n\",\n \"\\n\",\n \" \\n\",\n \"* This `Dict` above split and the [API is called here](https://github.com/langchain-ai/langchain/blob/7fc07ba5df99b9fa8bef837b0fafa220bc5c932c/libs/langchain/langchain/chains/openai_functions/openapi.py#L215).\"\n ]\n },\n {\n \"cell_type\": \"markdown\",\n \"id\": \"1fe49a0d\",\n \"metadata\": {},\n \"source\": [\n \"## API Chain \\n\",\n \"\\n\",\n \"We can also build our own interface to external APIs using the `APIChain` and provided API documentation.\"\n ]\n },\n {\n \"cell_type\": \"code\",\n \"execution_count\": 4,\n \"id\": \"4ef0c3d0\",\n \"metadata\": {},\n \"outputs\": [\n {\n \"name\": \"stdout\",\n \"output_type\": \"stream\",\n \"text\": [\n \"\\n\",\n \"\\n\",\n \"\\u001b[1m> Entering new APIChain chain...\\u001b[0m\\n\",\n \"\\u001b[32;1m\\u001b[1;3mhttps://api.open-meteo.com/v1/forecast?latitude=48.1351&longitude=11.5820&hourly=temperature_2m&temperature_unit=fahrenheit¤t_weather=true\\u001b[0m\\n\",\n \"\\u001b[33;1m\\u001b[1;3m{\\\"latitude\\\":48.14,\\\"longitude\\\":11.58,\\\"generationtime_ms\\\":0.1710653305053711,\\\"utc_offset_seconds\\\":0,\\\"timezone\\\":\\\"GMT\\\",\\\"timezone_abbreviation\\\":\\\"GMT\\\",\\\"elevation\\\":521.0,\\\"current_weather_units\\\":{\\\"time\\\":\\\"iso8601\\\",\\\"interval\\\":\\\"seconds\\\",\\\"temperature\\\":\\\"\u00b0F\\\",\\\"windspeed\\\":\\\"km/h\\\",\\\"winddirection\\\":\\\"\u00b0\\\",\\\"is_day\\\":\\\"\\\",\\\"weathercode\\\":\\\"wmo code\\\"},\\\"current_weather\\\":{\\\"time\\\":\\\"2023-11-01T21:30\\\",\\\"interval\\\":900,\\\"temperature\\\":46.5,\\\"windspeed\\\":7.7,\\\"winddirection\\\":259,\\\"is_day\\\":0,\\\"weathercode\\\":3},\\\"hourly_units\\\":{\\\"time\\\":\\\"iso8601\\\",\\\"temperature_2m\\\":\\\"\u00b0F\\\"},\\\"hourly\\\":{\\\"time\\\":[\\\"2023-11-01T00:00\\\",\\\"2023-11-01T01:00\\\",\\\"2023-11-01T02:00\\\",\\\"2023-11-01T03:00\\\",\\\"2023-11-01T04:00\\\",\\\"2023-11-01T05:00\\\",\\\"2023-11-01T06:00\\\",\\\"2023-11-01T07:00\\\",\\\"2023-11-01T08:00\\\",\\\"2023-11-01T09:00\\\",\\\"2023-11-01T10:00\\\",\\\"2023-11-01T11:00\\\",\\\"2023-11-01T12:00\\\",\\\"2023-11-01T13:00\\\",\\\"2023-11-01T14:00\\\",\\\"2023-11-01T15:00\\\",\\\"2023-11-01T16:00\\\",\\\"2023-11-01T17:00\\\",\\\"2023-11-01T18:00\\\",\\\"2023-11-01T19:00\\\",\\\"2023-11-01T20:00\\\",\\\"2023-11-01T21:00\\\",\\\"2023-11-01T22:00\\\",\\\"2023-11-01T23:00\\\",\\\"2023-11-02T00:00\\\",\\\"2023-11-02T01:00\\\",\\\"2023-11-02T02:00\\\",\\\"2023-11-02T03:00\\\",\\\"2023-11-02T04:00\\\",\\\"2023-11-02T05:00\\\",\\\"2023-11-02T06:00\\\",\\\"2023-11-02T07:00\\\",\\\"2023-11-02T08:00\\\",\\\"2023-11-02T09:00\\\",\\\"2023-11-02T10:00\\\",\\\"2023-11-02T11:00\\\",\\\"2023-11-02T12:00\\\",\\\"2023-11-02T13:00\\\",\\\"2023-11-02T14:00\\\",\\\"2023-11-02T15:00\\\",\\\"2023-11-02T16:00\\\",\\\"2023-11-02T17:00\\\",\\\"2023-11-02T18:00\\\",\\\"2023-11-02T19:00\\\",\\\"2023-11-02T20:00\\\",\\\"2023-11-02T21:00\\\",\\\"2023-11-02T22:00\\\",\\\"2023-11-02T23:00\\\",\\\"2023-11-03T00:00\\\",\\\"2023-11-03T01:00\\\",\\\"2023-11-03T02:00\\\",\\\"2023-11-03T03:00\\\",\\\"2023-11-03T04:00\\\",\\\"2023-11-03T05:00\\\",\\\"2023-11-03T06:00\\\",\\\"2023-11-03T07:00\\\",\\\"2023-11-03T08:00\\\",\\\"2023-11-03T09:00\\\",\\\"2023-11-03T10:00\\\",\\\"2023-11-03T11:00\\\",\\\"2023-11-03T12:00\\\",\\\"2023-11-03T13:00\\\",\\\"2023-11-03T14:00\\\",\\\"2023-11-03T15:00\\\",\\\"2023-11-03T16:00\\\",\\\"2023-11-03T17:00\\\",\\\"2023-11-03T18:00\\\",\\\"2023-11-03T19:00\\\",\\\"2023-11-03T20:00\\\",\\\"2023-11-03T21:00\\\",\\\"2023-11-03T22:00\\\",\\\"2023-11-03T23:00\\\",\\\"2023-11-04T00:00\\\",\\\"2023-11-04T01:00\\\",\\\"2023-11-04T02:00\\\",\\\"2023-11-04T03:00\\\",\\\"2023-11-04T04:00\\\",\\\"2023-11-04T05:00\\\",\\\"2023-11-04T06:00\\\",\\\"2023-11-04T07:00\\\",\\\"2023-11-04T08:00\\\",\\\"2023-11-04T09:00\\\",\\\"2023-11-04T10:00\\\",\\\"2023-11-04T11:00\\\",\\\"2023-11-04T12:00\\\",\\\"2023-11-04T13:00\\\",\\\"2023-11-04T14:00\\\",\\\"2023-11-04T15:00\\\",\\\"2023-11-04T16:00\\\",\\\"2023-11-04T17:00\\\",\\\"2023-11-04T18:00\\\",\\\"2023-11-04T19:00\\\",\\\"2023-11-04T20:00\\\",\\\"2023-11-04T21:00\\\",\\\"2023-11-04T22:00\\\",\\\"2023-11-04T23:00\\\",\\\"2023-11-05T00:00\\\",\\\"2023-11-05T01:00\\\",\\\"2023-11-05T02:00\\\",\\\"2023-11-05T03:00\\\",\\\"2023-11-05T04:00\\\",\\\"2023-11-05T05:00\\\",\\\"2023-11-05T06:00\\\",\\\"2023-11-05T07:00\\\",\\\"2023-11-05T08:00\\\",\\\"2023-11-05T09:00\\\",\\\"2023-11-05T10:00\\\",\\\"2023-11-05T11:00\\\",\\\"2023-11-05T12:00\\\",\\\"2023-11-05T13:00\\\",\\\"2023-11-05T14:00\\\",\\\"2023-11-05T15:00\\\",\\\"2023-11-05T16:00\\\",\\\"2023-11-05T17:00\\\",\\\"2023-11-05T18:00\\\",\\\"2023-11-05T19:00\\\",\\\"2023-11-05T20:00\\\",\\\"2023-11-05T21:00\\\",\\\"2023-11-05T22:00\\\",\\\"2023-11-05T23:00\\\",\\\"2023-11-06T00:00\\\",\\\"2023-11-06T01:00\\\",\\\"2023-11-06T02:00\\\",\\\"2023-11-06T03:00\\\",\\\"2023-11-06T04:00\\\",\\\"2023-11-06T05:00\\\",\\\"2023-11-06T06:00\\\",\\\"2023-11-06T07:00\\\",\\\"2023-11-06T08:00\\\",\\\"2023-11-06T09:00\\\",\\\"2023-11-06T10:00\\\",\\\"2023-11-06T11:00\\\",\\\"2023-11-06T12:00\\\",\\\"2023-11-06T13:00\\\",\\\"2023-11-06T14:00\\\",\\\"2023-11-06T15:00\\\",\\\"2023-11-06T16:00\\\",\\\"2023-11-06T17:00\\\",\\\"2023-11-06T18:00\\\",\\\"2023-11-06T19:00\\\",\\\"2023-11-06T20:00\\\",\\\"2023-11-06T21:00\\\",\\\"2023-11-06T22:00\\\",\\\"2023-11-06T23:00\\\",\\\"2023-11-07T00:00\\\",\\\"2023-11-07T01:00\\\",\\\"2023-11-07T02:00\\\",\\\"2023-11-07T03:00\\\",\\\"2023-11-07T04:00\\\",\\\"2023-11-07T05:00\\\",\\\"2023-11-07T06:00\\\",\\\"2023-11-07T07:00\\\",\\\"2023-11-07T08:00\\\",\\\"2023-11-07T09:00\\\",\\\"2023-11-07T10:00\\\",\\\"2023-11-07T11:00\\\",\\\"2023-11-07T12:00\\\",\\\"2023-11-07T13:00\\\",\\\"2023-11-07T14:00\\\",\\\"2023-11-07T15:00\\\",\\\"2023-11-07T16:00\\\",\\\"2023-11-07T17:00\\\",\\\"2023-11-07T18:00\\\",\\\"2023-11-07T19:00\\\",\\\"2023-11-07T20:00\\\",\\\"2023-11-07T21:00\\\",\\\"2023-11-07T22:00\\\",\\\"2023-11-07T23:00\\\"],\\\"temperature_2m\\\":[47.9,46.9,47.1,46.6,45.8,45.2,43.4,43.5,46.8,51.5,55.0,56.3,58.1,57.9,57.0,56.6,54.4,52.1,49.1,48.3,47.7,46.9,46.2,45.8,44.4,42.4,41.7,41.7,42.0,42.7,43.6,44.3,45.9,48.0,49.1,50.7,52.2,52.6,51.9,50.3,48.1,47.4,47.1,46.9,46.2,45.7,45.6,45.6,45.7,45.3,45.1,44.2,43.6,43.2,42.8,41.6,41.0,42.1,42.4,42.3,42.7,43.9,44.2,43.6,41.9,40.4,39.0,40.8,40.2,40.1,39.6,38.8,38.2,36.9,35.8,36.4,37.3,38.5,38.9,39.0,41.8,45.4,48.7,50.8,51.7,52.1,51.3,49.8,48.6,47.8,47.0,46.3,45.9,45.6,45.7,46.1,46.3,46.4,46.3,46.3,45.8,45.4,45.5,47.1,49.3,51.2,52.4,53.1,53.5,53.4,53.0,52.4,51.6,50.5,49.6,49.0,48.6,48.1,47.6,47.0,46.4,46.0,45.5,45.1,44.4,43.7,43.9,45.6,48.1,50.3,51.7,52.8,53.5,52.7,51.5,50.2,48.8,47.4,46.2,45.5,45.0,44.6,44.3,44.2,43.9,43.4,43.0,42.6,42.3,42.0,42.2,43.0,44.3,45.5,46.8,48.1,48.9,49.0,48.7,48.1,47.4,46.5,45.7,45.1,44.5,44.3,44.5,45.1]}}\\u001b[0m\\n\",\n \"\\n\",\n \"\\u001b[1m> Finished chain.\\u001b[0m\\n\"\n ]\n },\n {\n \"data\": {\n \"text/plain\": [\n \"' The current temperature in Munich, Germany is 46.5\u00b0F.'\"\n ]\n },\n \"execution_count\": 4,\n \"metadata\": {},\n \"output_type\": \"execute_result\"\n }\n ],\n \"source\": [\n \"from langchain.chains import APIChain\\n\",\n \"from langchain.chains.api import open_meteo_docs\\n\",\n \"from langchain.llms import OpenAI\\n\",\n \"\\n\",\n \"llm = OpenAI(temperature=0)\\n\",\n \"chain = APIChain.from_llm_and_api_docs(\\n\",\n \" llm,\\n\",\n \" open_meteo_docs.OPEN_METEO_DOCS,\\n\",\n \" verbose=True,\\n\",\n \" limit_to_domains=[\\\"https://api.open-meteo.com/\\\"],\\n\",\n \")\\n\",\n \"chain.run(\\n\",\n \" \\\"What is the weather like right now in Munich, Germany in degrees Fahrenheit?\\\"\\n\",\n \")\"\n ]\n },\n {\n \"cell_type\": \"markdown\",\n \"id\": \"5b179318\",\n \"metadata\": {},\n \"source\": [\n \"Note that we supply information about the API:\"\n ]\n },\n {\n \"cell_type\": \"code\",\n \"execution_count\": 37,\n \"id\": \"a9e03cc2\",\n \"metadata\": {},\n \"outputs\": [\n {\n \"data\": {\n \"text/plain\": [\n \"'BASE URL: https://api.open-meteo.com/\\\\n\\\\nAPI Documentation\\\\nThe API endpoint /v1/forecast accepts a geographical coordinate, a list of weather variables and responds with a JSON hourly weather forecast for 7 days. Time always starts at 0:00 today and contains 168 hours. All URL parameters are listed below:\\\\n\\\\nParameter\\\\tFormat\\\\tRequired\\\\tDefault\\\\tDescription\\\\nlatitude, longitude\\\\tFloating point\\\\tYes\\\\t\\\\tGeographical WGS84 coordinate of the location\\\\nhourly\\\\tString array\\\\tNo\\\\t\\\\tA list of weather variables which shou'\"\n ]\n },\n \"execution_count\": 37,\n \"metadata\": {},\n \"output_type\": \"execute_result\"\n }\n ],\n \"source\": [\n \"open_meteo_docs.OPEN_METEO_DOCS[0:500]\"\n ]\n },\n {\n \"cell_type\": \"markdown\",\n \"id\": \"3fab7930\",\n \"metadata\": {},\n \"source\": [\n \"Under the hood, we do two things:\\n\",\n \" \\n\",\n \"* `api_request_chain`: Generate an API URL based on the input question and the api_docs\\n\",\n \"* `api_answer_chain`: generate a final answer based on the API response\\n\",\n \"\\n\",\n \"We can look at the [LangSmith trace](https://smith.langchain.com/public/1e0d18ca-0d76-444c-97df-a939a6a815a7/r) to inspect this:\\n\",\n \"\\n\",\n \"* The `api_request_chain` produces the API url from our question and the API documentation:\\n\",\n \"\\n\",\n \"\\n\",\n \"\\n\",\n \"* [Here](https://github.com/langchain-ai/langchain/blob/bbd22b9b761389a5e40fc45b0570e1830aabb707/libs/langchain/langchain/chains/api/base.py#L82) we make the API request with the API url.\\n\",\n \"* The `api_answer_chain` takes the response from the API and provides us with a natural language response:\\n\",\n \"\\n\",\n \"\"\n ]\n },\n {\n \"cell_type\": \"markdown\",\n \"id\": \"2511f446\",\n \"metadata\": {},\n \"source\": [\n \"### Going deeper\\n\",\n \"\\n\",\n \"**Test with other APIs**\"\n ]\n },\n {\n \"cell_type\": \"code\",\n \"execution_count\": null,\n \"id\": \"1e1cf418\",\n \"metadata\": {},\n \"outputs\": [],\n \"source\": [\n \"import os\\n\",\n \"\\n\",\n \"os.environ[\\\"TMDB_BEARER_TOKEN\\\"] = \\\"\\\"\\n\",\n \"from langchain.chains.api import tmdb_docs\\n\",\n \"\\n\",\n \"headers = {\\\"Authorization\\\": f\\\"Bearer {os.environ['TMDB_BEARER_TOKEN']}\\\"}\\n\",\n \"chain = APIChain.from_llm_and_api_docs(\\n\",\n \" llm,\\n\",\n \" tmdb_docs.TMDB_DOCS,\\n\",\n \" headers=headers,\\n\",\n \" verbose=True,\\n\",\n \" limit_to_domains=[\\\"https://api.themoviedb.org/\\\"],\\n\",\n \")\\n\",\n \"chain.run(\\\"Search for 'Avatar'\\\")\"\n ]\n },\n {\n \"cell_type\": \"code\",\n \"execution_count\": null,\n \"id\": \"dd80a717\",\n \"metadata\": {},\n \"outputs\": [],\n \"source\": [\n \"import os\\n\",\n \"\\n\",\n \"from langchain.chains import APIChain\\n\",\n \"from langchain.chains.api import podcast_docs\\n\",\n \"from langchain.llms import OpenAI\\n\",\n \"\\n\",\n \"listen_api_key = \\\"xxx\\\" # Get api key here: https://www.listennotes.com/api/pricing/\\n\",\n \"llm = OpenAI(temperature=0)\\n\",\n \"headers = {\\\"X-ListenAPI-Key\\\": listen_api_key}\\n\",\n \"chain = APIChain.from_llm_and_api_docs(\\n\",\n \" llm,\\n\",\n \" podcast_docs.PODCAST_DOCS,\\n\",\n \" headers=headers,\\n\",\n \" verbose=True,\\n\",\n \" limit_to_domains=[\\\"https://listen-api.listennotes.com/\\\"],\\n\",\n \")\\n\",\n \"chain.run(\\n\",\n \" \\\"Search for 'silicon valley bank' podcast episodes, audio length is more than 30 minutes, return only 1 results\\\"\\n\",\n \")\"\n ]\n },\n {\n \"cell_type\": \"markdown\",\n \"id\": \"a5939be5\",\n \"metadata\": {},\n \"source\": [\n \"**Web requests**\\n\",\n \"\\n\",\n \"URL requests are such a common use-case that we have the `LLMRequestsChain`, which makes an HTTP GET request. \"\n ]\n },\n {\n \"cell_type\": \"code\",\n \"execution_count\": 39,\n \"id\": \"0b158296\",\n \"metadata\": {},\n \"outputs\": [],\n \"source\": [\n \"from langchain.chains import LLMChain, LLMRequestsChain\\n\",\n \"from langchain.llms import OpenAI\\n\",\n \"from langchain.prompts import PromptTemplate\"\n ]\n },\n {\n \"cell_type\": \"code\",\n \"execution_count\": 40,\n \"id\": \"d49c33e4\",\n \"metadata\": {},\n \"outputs\": [],\n \"source\": [\n \"template = \\\"\\\"\\\"Between >>> and <<< are the raw search result text from google.\\n\",\n \"Extract the answer to the question '{query}' or say \\\"not found\\\" if the information is not contained.\\n\",\n \"Use the format\\n\",\n \"Extracted:<answer or \\\"not found\\\">\\n\",\n \">>> {requests_result} <<<\\n\",\n \"Extracted:\\\"\\\"\\\"\\n\",\n \"\\n\",\n \"PROMPT = PromptTemplate(\\n\",\n \" input_variables=[\\\"query\\\", \\\"requests_result\\\"],\\n\",\n \" template=template,\\n\",\n \")\"\n ]\n },\n {\n \"cell_type\": \"code\",\n \"execution_count\": 43,\n \"id\": \"d0fd4aab\",\n \"metadata\": {},\n \"outputs\": [\n {\n \"data\": {\n \"text/plain\": [\n \"{'query': 'What are the Three (3) biggest countries, and their respective sizes?',\\n\",\n \" 'url': 'https://www.google.com/search?q=What+are+the+Three+(3)+biggest+countries,+and+their+respective+sizes?',\\n\",\n \" 'output': ' Russia (17,098,242 km\u00b2), Canada (9,984,670 km\u00b2), China (9,706,961 km\u00b2)'}\"\n ]\n },\n \"execution_count\": 43,\n \"metadata\": {},\n \"output_type\": \"execute_result\"\n }\n ],\n \"source\": [\n \"chain = LLMRequestsChain(llm_chain=LLMChain(llm=OpenAI(temperature=0), prompt=PROMPT))\\n\",\n \"question = \\\"What are the Three (3) biggest countries, and their respective sizes?\\\"\\n\",\n \"inputs = {\\n\",\n \" \\\"query\\\": question,\\n\",\n \" \\\"url\\\": \\\"https://www.google.com/search?q=\\\" + question.replace(\\\" \\\", \\\"+\\\"),\\n\",\n \"}\\n\",\n \"chain(inputs)\"\n ]\n }\n ],\n \"metadata\": {\n \"kernelspec\": {\n \"display_name\": \"Python 3 (ipykernel)\",\n \"language\": \"python\",\n \"name\": \"python3\"\n },\n \"language_info\": {\n \"codemirror_mode\": {\n \"name\": \"ipython\",\n \"version\": 3\n },\n \"file_extension\": \".py\",\n \"mimetype\": \"text/x-python\",\n \"name\": \"python\",\n \"nbconvert_exporter\": \"python\",\n \"pygments_lexer\": \"ipython3\",\n \"version\": \"3.11.4\"\n }\n },\n \"nbformat\": 4,\n \"nbformat_minor\": 5\n"""
class AlttexterRequest(BaseModel):
text: str = Field(..., description="Article containing markdown formatted text.", example=AlttexterRequest_text_example)
images: dict = Field({}, description="Local images defined in markdown article encoded in base64 format.", example={"api_use_case.png": "<base64-encoded-data>", "api_function_call.png": "<base64-encoded-data>"})
image_urls: List[str] = Field([], description="Image URLs defined in markdown article.", example=["https://github.com/langchain-ai/langchain/blob/b9636e5c987e1217afcdf83e9c311568ad50c304/docs/static/img/api_chain.png?raw=true", "https://github.com/langchain-ai/langchain/blob/b9636e5c987e1217afcdf83e9c311568ad50c304/docs/static/img/api_chain_response.png?raw=true"])
class ImageAltText(BaseModel):
name: str = Field(..., description="File name of the image including path or URL.")
title: str = Field(..., description="Title of the image.")
alt_text: str = Field(..., description="Concise alternative text for the image. The text should follow the Microsoft Style Guide.")
class AlttexterResponse(BaseModel):
images: List[ImageAltText] = Field(..., description="The list of images included in the request as a file.")
class ExtendedAlttexterResponse(AlttexterResponse):
run_url: Optional[str] = Field(None, description="LangSmith trace URL.")
class Config:
schema_extra = {
"example": {
"images": [
{
"name": "api_use_case.png",
"title": "API Use Case Diagram",
"alt_text": "Diagram illustrating the use case of an LLM interacting with an external API to retrieve context."
},
{
"name": "api_function_call.png",
"title": "API Function Call Process",
"alt_text": "Flowchart showing the process of an LLM formulating an API call based on a user's question and the API specification."
},
{
"name": "https://github.com/langchain-ai/langchain/blob/b9636e5c987e1217afcdf83e9c311568ad50c304/docs/static/img/api_chain.png?raw=true",
"title": "API Request Chain Trace",
"alt_text": "Screenshot of a LangSmith trace showing the API request chain generating an API URL from a question and API documentation."
},
{
"name": "https://github.com/langchain-ai/langchain/blob/b9636e5c987e1217afcdf83e9c311568ad50c304/docs/static/img/api_chain_response.png?raw=true",
"title": "API Response Chain Trace",
"alt_text": "Screenshot of a LangSmith trace showing the API response chain providing a natural language response based on the API's output."
}
],
"run_url": "https://smith.langchain.com/public/7987b795-2ace-4161-92c7-9a57b96d1eee/r"
}
}
class ErrorResponse(BaseModel):
error: str = Field(..., description="A brief description of the error.")
message: str = Field(..., description="A more detailed explanation of the error.")
status_code: int = Field(..., description="The HTTP status code associated with the error.")
def handle_endpoint_error(e: Exception):
"""
Handles exceptions by logging the error and raising an HTTPException with a standardized error response.
This function is intended to be used within FastAPI endpoints to catch and uniformly handle exceptions, providing a consistent error response structure to the client. It logs the error for server-side diagnostics and then raises an HTTPException that FastAPI can translate into an HTTP response.
Args:
e (Exception): The exception that was caught during endpoint processing.
Raises:
HTTPException: An exception with a 500 status code indicating an internal server error, along with a JSON body detailing the error message and status code.
Returns:
None: This function does not return but instead raises an HTTPException to be handled by FastAPI.
"""
logging.error(f"An error occurred: {str(e)}")
error_response = ErrorResponse(
error="Server Error",
message="An internal error occurred. Please try again later.",
status_code=500
)
raise HTTPException(
status_code=500,
detail=error_response.dict()
)