 AI-  LangChain  OpenRouter
 


     AI?   LangChain (       LLM)  OpenRouter (        API).

 :

    LangChain  OpenRouter;

      (, API,  );

  ,    ;

      .

  :

?     ;

      ;

   .

  :

,   AI   ;

   ,   ;

,  AI?.

:      AI?,   .







 AI-  LangChain  OpenRouter





 1.    




   

                  .    ,          OpenRouter    LangChain   , autonom   .      ,     :            , ,  ,     .           ,   ,        ,    .



   .  LLM   :   ,  ,  .            .   :       ,  ,  ,      API,     . OpenRouter         ,      . LangChain     , ,   ,       .    ,    ,   .



    .       :  ,    ,   ,  ,        .  ,       ,    ,  ,      .     :   -       .     ,    :     ,        .



 .    :   OpenRouter, LangChain   .     :  , ,  ,    .     :  ,  , -,   API,  .       : , , , , .        :  ,  ,  , .     :  ,    ,    .     ,  , -   .



   .    ,  , -   ,     AI-.    Python    API,        .   ,        ,  .    ,      ,           .



 .   ,   ,    ,         .      API,          .    ,     .       ,        .   ,            .   ,           . OpenRouter   ,     LLM   API. LangChain     ,   .



 OpenRouter  LangChain. OpenRouter     ,   ,          .      :             ,    . LangChain,   ,   :  , ,  , ,    .       ,        .  OpenRouter + LangChain          .



 ,    .    ,  ,   OpenRouter     .    :    ,  ,     .    ,      ,   :    .      ,   API,    ,  .   ,   ,      .  ,  -: ,  ,  ,   .



 .    :    ,     .  :    , , ,   .        .   fallback:     ,       .  :    ,    ,   .  : ,  ,   , .  :  ,    ,  ,     .



  .   :    ,  ,  . :    , ,      .   :   CRM,  ,    -. :    , , . :  , ,  . :   ,  , .



  .   ,     ,  API  .       :    ,     ,   ,  -   .        .     .        .



-   .      .     OpenRouter.   API   .   :  ->  ->   ->  -> .    .     .   1020  .  : , , , .     .         .



 .       ,  ,   ,  ,     .       ,     :  ,  ,  ,  .  LangChain     :  , few-shot ,   .       .



   .      :    ,      ,   ,  .   ,      .            .     fallback    .     ,     .



.     API   .  ,     :      ,      .    , ,        . OpenRouter            .



  UX.     ,   .     ,  ,   .   ,       ,     .        ,      .



.   ,  ,    ,   , .          .     streaming   -.      : ,   .



  .        ,     -,    ,    .   ,    ,     .       ,      .



 .   ,  ,   ,    ,   .        ,    . ,   ,    ,         .



 .     ,    ,    LangChain,     OpenRouter,  ,         .     ,    , ,   .      ,   -.     ,     ,     .



  .            .      ,    ,     .      :   ,    ,        .                .



.     ,         OpenRouter   LangChain.



   .     :  Python,   ,   ,          OpenRouter  LangChain.     ,       .



 Python   .   Python 3.10  .  :

python version

    ,     python3.       :

mkdir ai_agent_project

cd ai_agent_project

python -m venv .venv

  Linux/macOS:

source .venv/bin/activate

  Windows:

.venv\Scripts\activate

 pip:

pip install upgrade pip



 LangChain  .    :

pip install langchain langchain-core langchain-community

 langchain-community     .    OpenRouter     HTTP-   ,     .        :

pip install httpx tenacity pydantic dotenv

     :

pip install python-dotenv

        , :

pip install langchain-text-splitters langchain-chroma sentence-transformers

  experiments    :

pip install rich streamlit

    :

pip install pytest black isort



  OpenRouter.   openrouter.ai,   API  .   .env     :

OPENROUTER_API_KEY=your_key_here

OPENROUTER_BASE_URL=https://openrouter.ai/api/v1

  .   :

OPENROUTER_DEFAULT_MODEL=anthropic/claude-3.5-sonnet

OPENROUTER_TIMEOUT=60

   SITE_URL  SITE_NAME        .    .env  .  .env  .gitignore:

.env

.env.local

secrets.*



    .   ,     OpenRouter   .   HTTP-,  ,   .  check_openrouter.py:

import os

import httpx

from dotenv import load_dotenv



load_dotenv()



API_KEY = os.getenv("OPENROUTER_API_KEY")

BASE_URL = os.getenv("OPENROUTER_BASE_URL", "https://openrouter.ai/api/v1")

MODEL = os.getenv("OPENROUTER_DEFAULT_MODEL", "anthropic/claude-3.5-sonnet")



def test_call():

headers = {

"Authorization": f"Bearer {API_KEY}",

"Content-Type": "application/json",

"HTTP-Referer": "https://example.com",

"X-Title": "AI Agent Test"

}

payload = {

"model": MODEL,

"messages": [

{"role": "system", "content": "You are a helpful assistant."},

{"role": "user", "content": "!   ''  42."}

],

"max_tokens": 100

}

try:

resp = httpx.post(f"{BASE_URL}/chat/completions", json=payload, headers=headers, timeout=60)

resp.raise_for_status()

data = resp.json()

content = data["choices"][0]["message"]["content"]

print(" :", content)

print(" !")

except Exception as e:

print(":", e)



if __name__ == "__main__":

test_call()

:

python check_openrouter.py

       .



  LangChain. LangChain      .  OpenRouter    ChatOpenAI    URL,    ,   OpenAI-.     LangChain:

from langchain_core.messages import HumanMessage, SystemMessage

from langchain_community.chat_models import ChatOpenAI

import os

from dotenv import load_dotenv



load_dotenv()



model = ChatOpenAI(

model="anthropic/claude-3.5-sonnet",

openai_api_key=os.getenv("OPENROUTER_API_KEY"),

openai_api_base=os.getenv("OPENROUTER_BASE_URL"),

temperature=0.3,

max_tokens=500

)



messages = [

SystemMessage(content="  .     ."),

HumanMessage(content="  ''  .")

]



result = model.invoke(messages)

print(result.content)

  -    ChatOpenAI    OpenRouter,    LLM-  RunnableLambda    HTTP-  LangChain  LLM-.       .



   LangChain.   ,       .        .  simple_agent.py:

from langchain_core.messages import HumanMessage, SystemMessage

from langchain_community.chat_models import ChatOpenAI

from dotenv import load_dotenv

import os



load_dotenv()



model = ChatOpenAI(

model="anthropic/claude-3.5-sonnet",

openai_api_key=os.getenv("OPENROUTER_API_KEY"),

openai_api_base=os.getenv("OPENROUTER_BASE_URL"),

temperature=0.1,

max_tokens=500

)



def run_agent(query: str):

system = SystemMessage(content="  ,      .   .")

user = HumanMessage(content=query)

response = model.invoke([system, user])

return response.content



if __name__ == "__main__":

print(run_agent("     ."))

  ,   .        .



    LangChain.     ,  . :

pip install duckduckgo-search # 

pip install wikipedia # 

pip install sqlalchemy #  

pip install beautifulsoup4 #  -

   .    ,         .



   . ,    OpenRouter     .     402  429,    .        .     tenacity:

from tenacity import retry, stop_after_attempt, wait_exponential



@retry(stop=stop_after_attempt(3), wait=wait_exponential(multiplier=1, min=2, max=10))

def call_model_safe(payload, headers):

import httpx

resp = httpx.post("https://openrouter.ai/api/v1/chat/completions", json=payload, headers=headers, timeout=60)

resp.raise_for_status()

return resp.json()

        API.



  .   ,    .  Python  logging :

import logging

logging.basicConfig(level=logging.INFO, format="%(asctime)s  %(levelname)s  %(message)s")

logger = logging.getLogger("agent")

logger.info(" ")

    :  ,  , ,  .      Rich:

from rich.console import Console

console = Console()

console.print("[bold green][/bold green] ")

      .



 .     :

src/

__init__.py

llm.py #   OpenRouter

tools.py #   

memory.py #    

planner.py #  

reflexion.py #    

agent.py #   

tests/

test_tools.py

test_agent.py

config/

prompts.yaml #  

settings.py #   

.env

.gitignore

README.md

requirements.txt

   ,   .



  requirements.txt.      :

pip freeze > requirements.txt

,    :

langchain

langchain-core

langchain-community

httpx

tenacity

python-dotenv

rich

duckduckgo-search

wikipedia

sqlalchemy

beautifulsoup4

sentence-transformers

langchain-chroma

streamlit (  -)

   .



   .  ,    :

AGENT_DEBUG=true #   

AGENT_MAX_STEPS=10 #   

AGENT_TOOL_TIMEOUT=30 #   

MODEL_TEMPERATURE=0.1 #   

   :

debug = os.getenv("AGENT_DEBUG", "false").lower() == "true"

if debug:

logger.setLevel(logging.DEBUG)

      dev    prod.



    .   :  ->   -> . :

from langchain_community.utilities import DuckDuckGoSearchAPIWrapper

from langchain.agents import Tool

from langchain_core.messages import HumanMessage, SystemMessage

from langchain_community.chat_models import ChatOpenAI

import os

from dotenv import load_dotenv



load_dotenv()



search = DuckDuckGoSearchAPIWrapper()

tool = Tool(name="search", func=search.run, description="     .")



model = ChatOpenAI(

model="anthropic/claude-3.5-sonnet",

openai_api_key=os.getenv("OPENROUTER_API_KEY"),

openai_api_base=os.getenv("OPENROUTER_BASE_URL"),

temperature=0.1

)



def simple_agent_with_tool(query: str):

#   ,    

sys_msg = SystemMessage(content=" .      ,   .  .")

user_msg = HumanMessage(content=query)

#            

search_result = tool.func(query)

augmented_messages = [

sys_msg,

HumanMessage(content=f": {query}\n :\n{search_result}\n\n  .")

]

response = model.invoke(augmented_messages)

return response.content



if __name__ == "__main__":

print(simple_agent_with_tool("    ?"))

       .        LangChain Agents   .



  .       :

# tests/test_agent.py

import sys

sys.path.append("src")

from simple_agent import run_agent



def test_run_agent():

res = run_agent("  ''")

assert "" in res.lower()

  pytest  :

pytest tests/

   -     ,     API    .



  .      .  .env     .     ,   : AWS Secrets Manager, Google Secret Manager, Azure Key Vault.      .env:   600    .    ,         .



  streaming.      .    streaming  OpenRouter,     .  LangChain   stream:

for chunk in model.stream([HumanMessage(content="    .")]):

print(chunk.content, end="", flush=True)

     .



  .     :  ,  ,   ,  .   :

from dataclasses import dataclass

import time



@dataclass

class Metrics:

tokens_in: int = 0

tokens_out: int = 0

tool_calls: int = 0

duration_ms: int = 0



def track_metrics_start():

return {"start": time.time()}



def track_metrics_end(stats, metrics: Metrics):

metrics.duration_ms = int((time.time()  stats["start"]) * 1000)

return metrics

  tokens_in/tokens_out    API,    usage. OpenRouter   usage,   .       .



   .        try/except.     ,   :  ,  ,  ,   .  LangChain            .          .



 .   config/prompts.yaml   :

system_agent: " .  .   ."

system_reflexion: ",    .  ,  ."

system_tool_guidance: ",   .  ,      tool:_|."

   :

import yaml

with open("config/prompts.yaml") as f:

prompts = yaml.safe_load(f)

system = prompts["system_agent"]

       .



  .       ,   :

1)      ,   .     . 2)         . ,     . 3)    ,    .  streaming-   .



    .     ,   :

   ;

    LangChain;

    ;

   ;

  .



 .      AI-,      OpenRouter       LangChain.    ,    ,      .        :   ,   ,        .     ,    ,      .    ,   ,      . !






 2.    LangChain  OpenRouter




 2.    LangChain  OpenRouter

     AI-.       ,    ,  ,        LangChain  OpenRouter.           ,       API-.      ,      .



  

   ,     .   Python     :        .   ,     .



    Linux  macOS:

1.  .

2.   : mkdir ai_agent_project && cd ai_agent_project.

3.   : python3 -m venv venv.

4.  : source venv/bin/activate.



    Windows (PowerShell):

1.   : New-Item -ItemType Directory -Path "ai_agent_project"; Set-Location ai_agent_project.

2.   : python -m venv venv.

3.  : .\venv\Scripts\Activate.ps1. (,   Set-ExecutionPolicy -ExecutionPolicy RemoteSigned -Scope CurrentUser    ).



    Windows (CMD):

1. md ai_agent_project && cd ai_agent_project.

2. python -m venv venv.

3. venv\Scripts\activate.



      (venv)    .  ,          Python.



 Python   

,     Python  3.8  .   3.10  3.11       AI.    :

python version



 pip ( )          :

pip install upgrade pip



      (API-)  python-dotenv:

pip install python-dotenv



 LangChain

LangChain   ,    .       -  .   :      ( ,     )     .



    OpenRouter              API.



 :

pip install langchain



 ,  LangChain Core    (, , ),        LLM.    OpenRouter      OpenAI,   OpenRouter   API.



     (streaming)    :

pip install langchain-core



  LangChain 0.2.x      langchain-community    ,   OpenRouter       OpenAI SDK.



   ,      ,   :

pip install langchain-openai #     OpenAI    OpenRouter

pip install langgraph #      ( )

pip install numpy #        

pip install requests #   HTTP   



:       "  " (LangGraph),           ,       .



 OpenRouter

OpenRouter    .      Python,    .       API,   OpenAI,    HTTP-.



      OpenAI SDK,   OpenRouter    :

pip install openai



     LangChain (  LangChain   ),       .



 API- OpenRouter

  API       .

1.    openrouter.ai.

2.     .

3.    "Keys" ().

4.  "Create Key" ( ).

5.    (, "My Agent Project")   .

6. :       . OpenRouter     .



   (Security Best Practices)

   API-    ( .py ).       GitHub,        .      .



      .env.     .

 Linux/macOS: touch .env

 Windows (PowerShell): New-Item -ItemType File -Path ".env"



           :

OPENROUTER_API_KEY=sk-or-v1-__



     URL API OpenRouter:

OPENROUTER_API_BASE=https://openrouter.ai/api/v1



     Python- ,  :

from dotenv import load_dotenv

load_dotenv()



     .



 : LangChain  OpenRouter

OpenRouter   ,      (GPT-4, Claude, Mistral, Llama  ..).      LangChain,       URL OpenRouter   .



  main.py.    :  ChatOpenAI ()    .



 1:  ChatOpenAI (LangChain OpenAI Wrapper)

  ,      LangChain ( ,  ,   ).



  setup_check.py    :



import os

from dotenv import load_dotenv

from langchain_openai import ChatOpenAI

from langchain_core.messages import HumanMessage



#    .env

load_dotenv()



#    URL

api_key = os.getenv("OPENROUTER_API_KEY")

api_base = os.getenv("OPENROUTER_API_BASE")



if not api_key:

raise ValueError("API   ! ,    .env    OPENROUTER_API_KEY.")



#    LangChain

#     base_url      OpenRouter

llm = ChatOpenAI(

model="openai/gpt-4o-mini", #  . : /

openai_api_key=api_key,

openai_api_base=api_base,

temperature=0.7

)



#  

message = HumanMessage(content="!    OpenRouter?")

response = llm.invoke([message])



print(" :")

print(response.content)



#    (Streaming)

print("\n    ")

for chunk in llm.stream([HumanMessage(content="    ,   Python.")]):

print(chunk.content, end="", flush=True)



 :

1. load_dotenv()   .

2. ChatOpenAI   -   API LLM.

3.  openai_api_base     OpenRouter.

4. openai_api_key    .

5.  .invoke()       .

6.  .stream()      (),     .



   python setup_check.py       ,  !



 2:    LangChain (BaseChatModel)

        .     ChatOpenAI  langchain_openai,    .     .



 :  .

OpenRouter   .   ,    .

 :

openai/gpt-4-turbo-preview

anthropic/claude-3-opus:beta

meta-llama/llama-3-8b-instruct

google/gemini-pro-1.5



     OpenRouter.     , OpenRouter  .



   

   ChatOpenAI    ,    :



llm = ChatOpenAI(

model="openai/gpt-4o-mini",

openai_api_key=api_key,

openai_api_base=api_base,

temperature=0.1, #  (0   , 1  /)

max_tokens=1024, #    

top_p=0.9, #     ( )

frequency_penalty=0.5, #     ( -2.0  2.0)

presence_penalty=0.5, #     ( -2.0  2.0)

request_timeout=20 #     

)



   

 ,   (Code Runner, Data Analyst),      0 (, 0.0  0.2).       .

   (, )  0.7  1.0.



  (Chain)   

,     ,    ,    ,     .    .



 LangChain PromptTemplate    .



  :

from langchain_core.prompts import ChatPromptTemplate

from langchain_core.output_parsers import StrOutputParser



  :

prompt = ChatPromptTemplate.from_messages([

("system", "  ,     ."),

("human", "{input}") # {input}   ,    

])



  (Chain):

chain = prompt | llm | StrOutputParser()



   | (),   :

1. prompt:   .

2. llm:   .

3. StrOutputParser():       (    Message).



 :

result = chain.invoke({"input": "  ,    ?"})

print(result)



 StrOutputParser()    ,       AIMessage.



  (Tools)  OpenRouter

     ,   .    ,     .  LangChain  (Tools)   ,       .



  -.    GPT-4    ,     (  Python-)    .



 1:    

from langchain_core.tools import tool



@tool

def multiply(a: int, b: int) -> int:

"""   ."""

return a * b



 2:    

  ,     ,    .



from langchain.agents import create_tool_calling_agent, AgentExecutor



#  

tools = [multiply]



#     (    )

from langchain_core.prompts import ChatPromptTemplate, MessagesPlaceholder



agent_prompt = ChatPromptTemplate.from_messages([

("system", "  .       ."),

("human", "{input}"),

MessagesPlaceholder(variable_name="agent_scratchpad"), #      

])



#  

agent = create_tool_calling_agent(llm, tools, agent_prompt)



#  

agent_executor = AgentExecutor(agent=agent, tools=tools, verbose=True)



# 

response = agent_executor.invoke({"input": "  123   456?"})

print(response['output'])



  :

1. create_tool_calling_agent  ,    .

2. AgentExecutor   " ->   ->   ->  ".

3.  verbose=True       .



    OpenRouter  LangGraph

 AgentExecutor ( )  .     LangGraph.       .



     LangGraph,    OpenRouter.



 :

pip install langgraph



  agent_graph.py:



import os

from dotenv import load_dotenv

from typing import Literal

from langchain_openai import ChatOpenAI

from langchain_core.tools import tool

from langgraph.prebuilt import create_react_agent

from langchain_core.messages import HumanMessage



#  LLM (OpenRouter)

load_dotenv()

llm = ChatOpenAI(

model="openai/gpt-4o-mini",

openai_api_key=os.getenv("OPENROUTER_API_KEY"),

openai_api_base=os.getenv("OPENROUTER_API_BASE")

)



# 

@tool

def get_weather(city: str) -> str:

"""    .    ."""

return f"  {city}   +25C."



@tool

def get_stock_price(ticker: str) -> str:

"""    ."""

return f"  {ticker}    5%."



tools = [get_weather, get_stock_price]



#   LangGraph (React Agent  Reasoning and Acting)

#    : Thought -> Act -> Observation -> Thought

graph = create_react_agent(llm, tools)



#  

def run_agent(question: str):

inputs = {"messages": [HumanMessage(content=question)]}

print(f": {question}")



#   

for event in graph.stream(inputs, stream_mode="values"):

if "messages" in event:

last_msg = event["messages"][-1]

if isinstance(last_msg, HumanMessage):

print(f"\nUser: {last_msg.content}")

else:

print(f"\nAgent: {last_msg.content}")



if __name__ == "__main__":

#   ,   

run_agent("        AAPL?")



 :

1. create_react_agent    ,     "" (ReAct).         .

2. graph.stream        .

3.     :   ,   ,  .



 LangGraph ?

:    .

:     (, " ", "  ").

   (streaming)  " ".



      



1. 401 Unauthorized / Invalid API Key

:      .

:   .env. ,      . ,  load_dotenv()    os.getenv().



2. 404 Not Found (Model not found)

:      model=.

:    OpenRouter.      .    (, "openai/gpt-3.5-turbo").



3. 402 Payment Required

:   OpenRouter  .

:      OpenRouter.   (, GPT-4)       .



4. AttributeError: 'NoneType' object has no attribute 'content'

:        .

: ,    StrOutputParser()    content  .



5.  (Context Window Exceeded)

:       .

:    (Memory)  LangChain,  ConversationBufferWindowMemory,     N .



   OpenRouter

OpenRouter         . ,         "" (prefix)   .



 LangChain   ChatOpenAI       `extra_headers`  `extra_body`,    . ,   (temperature, top_p)  .



     `route`,   OpenRouter    ,     ,      .        (, "openai/gpt-4o-mini")    .



   

    ,     .  OpenRouter     `system`.  LangChain     ChatPromptTemplate.



    :

"  helpful .          .     ,    .   ,     ."



:       .    Llama (Meta)    ,  GPT. OpenRouter   ,     .



  LangChain:  ?

  LangChain    .

`langchain-core`:  . .

`langchain`:  "" .    ,     .

`langchain-community`:    ( OpenAI).

`langchain-openai`:    OpenAI ( OpenRouter).

`langgraph`:   .



      :

```

langchain-core

langchain-openai

langgraph

python-dotenv

```

   `langchain`,     .    .



       

 ,   ,   `diagnostics.py`:



import langchain

import langchain_openai

import sys



print(f"Python version: {sys.version}")

print(f"LangChain version: {langchain.__version__ if hasattr(langchain, '__version__') else 'Not installed'}")

print(f"LangChain OpenAI version: {langchain_openai.__version__ if hasattr(langchain_openai, '__version__') else 'Not installed'}")



#  

from dotenv import load_dotenv

import os

load_dotenv()

key = os.getenv("OPENROUTER_API_KEY")

if key:

print(" .  :", len(key))

print(" 5 :", key[:5] + "")

else:

print("     .")



#  

try:

from langchain_openai import ChatOpenAI

llm = ChatOpenAI(

model="openai/gpt-4o-mini",

openai_api_key=key,

openai_api_base="https://openrouter.ai/api/v1",

max_tokens=10

)

response = llm.invoke("Hi")

print(" ! :", response.content)

except Exception as e:

print(" :", e)



  .    " !",       AI-.



 

    ,   .  Python   ,   :

import logging

logging.basicConfig(level=logging.DEBUG)



  :    API-  stdout.      .



 OpenRouter      ,  ,      .      .



 

OpenRouter     ( + ).

1.       (GPT-3.5, GPT-4o-mini, Mixtral).

2.  `max_tokens`  `temperature`.

3.  :        ,    Redis  SQLite.

4.  "openai/gpt-4o-mini"        .



 

    :           .  :

  .

   LangChain   .

 ChatOpenAI    OpenRouter.

  (Chains)   (Tools).

   LangGraph.



       :      ,    (Memory)           .






 3:   AI-:   -   


             .     AI-  ,  OpenRouter  LangChain.      ,     , ,      API    .        ,     ,             .

 1:       

   :  ,     ,   ,       .            .       :      OpenRouter,  ,         .






 4:     


      ,     OpenRouter    ,    .    AI-   ,     ,      .  ,       ,    .    - ,   ,      ,    CRM,      .      ,     (Custom Tools)  Python   LangChain     ,   OpenRouter.

 Custom Tools ?

 LLM (Large Language Model),   OpenRouter,       .             .     ""  "" ,      .

   :

1. :         API      .

2. :  ,     ,    API-   .

3. :          (JSON, XML),   ,        .

4. :    ,      ,    LLM.



    LangChain

 LangChain    ,      `Tool`.         `@tool`.          .

 :

`name`:   ,    LLM.

`description`:  ,   .   ,   LLM        ( ).

`args_schema`: Pydantic-,   .   LLM ,    .

`func`:    ,    .



  :  

    ,   . ,    ,    . LLM    ,       .

 1:   `@tool`

```python

from langchain_core.tools import tool

import math



@tool

def calculate_factorial(n: int) -> int:

"""   n.       ."""

return math.factorial(n)

```

   LangChain    `calculate_factorial`,       .

         .



 2:   

```python

from langchain_core.tools import BaseTool

from typing import Optional, Type

from pydantic import BaseModel, Field



class CalculatorInput(BaseModel):

"""   ."""

operation: str = Field(description=":  (add),  (sub),  (mul)   (div)")

a: float = Field(description=" ")

b: float = Field(description=" ")



class CalculatorTool(BaseTool):

name: str = "calculator"

description: str = "    .     ."

args_schema: Type[BaseModel] = CalculatorInput



def _run(self, operation: str, a: float, b: float) -> str:

#   ( ,     async)

try:

if operation == "add":

return str(a + b)

elif operation == "sub":

return str(a  b)

elif operation == "mul":

return str(a * b)

elif operation == "div":

if b == 0:

return ":   "

return str(a / b)

else:

return ":  "

except Exception as e:

return f" : {e}"



# 

calc_tool = CalculatorTool()

```

    .    `args_schema`.       ,  ,    `operation`, `a`  `b`.    .



  OpenRouter  LangChain Agents

  ,    .    `create_react_agent` ( `create_tool_calling_agent`    LangChain)     OpenRouter.

,    :

" (5 + 7) * 3      ".

   :   5+7,    3,    . ,   ,     12*3   36^2.



```python

from langchain_openrouter import ChatOpenRouter

from langchain.agents import create_react_agent, AgentExecutor

from langchain_core.prompts import PromptTemplate

import os



#  OpenRouter

# ,     : pip install langchain-openrouter

#    OpenRouter    OpenAI,    base_url

from langchain_openai import ChatOpenAI



#    OpenRouter

#      OpenRouter , , Anthropic Claude  GPT-4

llm = ChatOpenAI(

base_url="https://openrouter.ai/api/v1",

api_key=os.getenv("OPENROUTER_API_KEY"),

model="anthropic/claude-3.5-sonnet", #  

temperature=0.0

)



#   

tools = [CalculatorTool()]



#    ReAct 

prompt_template = """

   .   ,    .

 : {tool_names}



:

1. ,        .

2.  ,     :

Thought: [ ]

Action: [ ]

Action Input: [ ]

Observation: [ ]

3.   1-3,     .

4.    .



: {input}

{agent_scratchpad}

"""



prompt = PromptTemplate.from_template(prompt_template)



#  

agent = create_react_agent(llm, tools, prompt)



#   

agent_executor = AgentExecutor(agent=agent, tools=tools, verbose=True)



# 

question = " (5 + 7) * 3      ."

response = agent_executor.invoke({"input": question})

print(response['output'])

```



   `create_react_agent`  ,   . LLM  OpenRouter  ,       . LangChain   ,  `_run`  `CalculatorTool`        .



    

 -       . LangChain   .  ,           API.

  :

1.   `BaseTool`.

2.   `_arun` (  `_run`).

3.  `_arun`  , LangChain   `_run`  `run_in_executor`,      .



      API ():

```python

import aiohttp

from langchain_core.tools import BaseTool

from pydantic import BaseModel, Field



class CryptoPriceInput(BaseModel):

coin_id: str = Field(description="ID  (, bitcoin, ethereum)")



class CryptoPriceTool(BaseTool):

name: str = "get_crypto_price"

description: str = "     USD.    ."

args_schema: Type[BaseModel] = CryptoPriceInput



async def _arun(self, coin_id: str) -> str:

#    CoinGecko API (  )

url = f"https://api.coingecko.com/api/v3/simple/price?ids={coin_id}&vs_currencies=usd"

try:

async with aiohttp.ClientSession() as session:

async with session.get(url) as response:

if response.status == 200:

data = await response.json()

if coin_id in data:

price = data[coin_id]['usd']

return f" {coin_id} : ${price}"

return "  "

return f" API: {response.status}"

except Exception as e:

return str(e)



def _run(self, coin_id: str) -> str:

#     (    requests)

#         asyncio.run

import asyncio

return asyncio.run(self._arun(coin_id))



crypto_tool = CryptoPriceTool()

```

 ,   `_arun`   `aiohttp`. ,        (,  FastAPI),      .



    (SQL Tool)

           .  LangChain    (`SQLDatabaseToolkit`),     SQL-     .

,    SQLite     `products`.

  ,      .   SQL-,      SQL  LLM.     ,      .



```python

from langchain_core.tools import BaseTool

from pydantic import BaseModel, Field

import sqlite3



class ProductSearchInput(BaseModel):

product_name: str = Field(description="   ")



class ProductSearchTool(BaseTool):

name: str = "sql_product_search"

description: str = "         .    ."

args_schema: Type[BaseModel] = ProductSearchInput



def _run(self, product_name: str) -> str:

#       (, SQLAlchemy)

conn = sqlite3.connect('example.db')

cursor = conn.cursor()






 5.       


 ,   ,    ,          .  -,    -,    ,      .      LangChain  ,      ,    .       ,          .        (Long-Term Memory Chat Agent)   ,   Large Language Model (LLM)     ,     (    -)     .        ,        openrouter.ai  LangChain.

    

     .       API   ,         .     Python ( 3.9  ).    LLM  openrouter.ai     HTTP-   LangChain,       ,      Chromadb  FAISS.

 :

pip install langchain langchain-openai chromadb faiss-cpu openai python-dotenv

  

   LangChain     :

1.  (LLM):  .    ,   openrouter.ai.       (, GPT-4, Claude, Llama)     .

2.  (Memory):  .     (   )   (   ).

3.  (Tools):  ,    ( ,  ,   API).

      ,      ,     ,       .

   OpenRouter

OpenRouter   API   .    LangChain     OpenAI- ,   URL OpenRouter.

  :

import os

from langchain_openai import OpenAI

from dotenv import load_dotenv

load_dotenv()

#  URL API OpenRouter   OpenAI

OPENROUTER_API_KEY = os.getenv("OPENROUTER_API_KEY")

BASE_URL = "https://openrouter.ai/api/v1"

llm = OpenAI(

base_url=BASE_URL,

api_key=OPENROUTER_API_KEY,

model="openai/gpt-3.5-turbo", #     

temperature=0.7

)

 ,      ,    ,    .    (temperature, max_tokens)   .

    

     ,     .  LangChain     ConversationBufferMemory.        ,    LLM   .   ,    -.

         ,    .    ConversationBufferMemory      .

  :

from langchain.memory import ConversationBufferMemory

memory = ConversationBufferMemory(memory_key="chat_history", return_messages=True)

    ,       ,      .

 : Vector Store  Retrieval

      .         -     (context window).            ,   .

    Retrieval-Augmented Generation (RAG).  :

1.      (,    ),           .

2.    (,     ?)          ( ).

3.        LLM.

   :

  (Chroma).

  (Embeddings).      OpenAI (   OpenRouter,       , , text-embedding-ada-002).

   :

from langchain_community.vectorstores import Chroma

from langchain_openai import OpenAIEmbeddings

#  

embeddings = OpenAIEmbeddings(

base_url=BASE_URL,

api_key=OPENROUTER_API_KEY,

model="text-embedding-ada-002" #     OpenRouter  OpenAI

)

#    

vectorstore = Chroma(

collection_name="agent_memory",

embedding_function=embeddings,

persist_directory="./chroma_db"

)

       ,     (Tool),       .    ,    ,   .

   (Memory Management)

      ,    .        .  LangChain      (Planner)     .

    :

1.  LLM     .

2.     (, , ), LLM      (JSON)     .

3.       ,       ().

       (Custom Tool).

   

   LangChain   BaseTool.    :

1. `SaveMemory`:    Chroma.

2. `LoadMemory`:    .

  :

from langchain.tools import BaseTool

from typing import Optional, Type

from pydantic import BaseModel, Field

class MemoryInput(BaseModel):

content: str = Field(description="     ")

category: Optional[str] = Field(default="general", description=" ")



class SaveMemoryTool(BaseTool):

name = "save_memory"

description = "  ,           .      ."

args_schema: Type[BaseModel] = MemoryInput

def _run(self, content: str, category: str = "general") -> str:

#    

try:

vectorstore.add_texts(

texts=[content],

metadatas=[{"category": category}]

)

return "    ."

except Exception as e:

return f"  : {e}"

     . ,    ,     :   ,     .

class SearchInput(BaseModel):

query: str = Field(description="    ")



class LoadMemoryTool(BaseTool):

name = "retrieve_memory"

description = "  ,         ,     .        ,   ."

args_schema: Type[BaseModel] = SearchInput

def _run(self, query: str) -> str:

try:

docs = vectorstore.similarity_search(query, k=3)

if not docs:

return "  ."

#  

result_text = "\n".join([doc.page_content for doc in docs])

return f" :\n{result_text}"

except Exception as e:

return f"  : {e}"

 :   

,      ,    ,    .   .   LangChain (AgentExecutor)   ,     .      :

1.   .

2. :      ? (      ,    ).

3. :       ?

     `AgentExecutor`   ,      .

     " "  `ConversationalAgent` ( `ToolCallingAgent`)   .

   `initialize_agent`  LangChain.

:    , LLM,   .

   `ConversationBufferMemory`  `initialize_agent`     .    ,     .

        ,     `RetrievalQA`   ,   ,     .

     :    `SaveMemoryTool`  `LoadMemoryTool`.     ,   .

          ,   `create_react_agent` ( `create_tool_calling_agent`),     ,      .

 

   .    :

*       .

*      ,   `save_memory`.

*     ,     ,  `retrieve_memory`.

*      {chat_history}.

* {agent_scratchpad}      .

 :

from langchain_core.prompts import ChatPromptTemplate, MessagesPlaceholder

from langchain.agents import AgentExecutor, create_tool_calling_agent

#   

system_prompt = """

   -    .

 :

1.     .

2.      (, ,   ),     `save_memory`    .

3.    ,       (        ),     `retrieve_memory`    .

4.   ,         .

5.     .

"""

#    (Tool Calling Agent)

prompt = ChatPromptTemplate.from_messages([

("system", system_prompt),

MessagesPlaceholder("chat_history"),

("user", "{input}"),

MessagesPlaceholder("agent_scratchpad"),

])

 

   .

from langchain.agents import AgentExecutor

from langchain_openai import ChatOpenAI #  ChatOpenAI   /

#  ChatOpenAI  ,       

#     OpenAI,     .

#    ChatOpenAI  OpenRouter

llm_chat = ChatOpenAI(

base_url=BASE_URL,

api_key=OPENROUTER_API_KEY,

model="openai/gpt-3.5-turbo", #    function calling

temperature=0.3

)

#  

tools = [SaveMemoryTool(), LoadMemoryTool()]

#  

agent = create_tool_calling_agent(llm_chat, tools, prompt)

#  

agent_executor = AgentExecutor(agent=agent, tools=tools, verbose=True, handle_parsing_errors=True)

#   (   )

chat_memory = ConversationBufferMemory(memory_key="chat_history", return_messages=True)

#   

def run_chat():

print(" .  'exit'  .")

while True:

user_input = input(": ")

if user_input.lower() == 'exit':

break



#    

history = chat_memory.load_memory_variables({})

chat_history = history.get("chat_history", [])



#  

try:

response = agent_executor.invoke({

"input": user_input,

"chat_history": chat_history

})

answer = response['output']

print(f": {answer}")



#      ( )

chat_memory.save_context({"input": user_input}, {"output": answer})

except Exception as e:

print(f": {e}")



if __name__ == "__main__":

run_chat()

   :

1.   .

2.      `ConversationBufferMemory`.

3.   `agent_executor`: ,    .

4.   .   ,    ,   `SaveMemoryTool`.     `verbose` .      Chroma.

5.   ,    ,   `LoadMemoryTool`.   Chroma   .

6.        ,      .

7.       `ConversationBufferMemory`.



 :    

  ,     :      `save_memory`  `retrieve_memory`    (Thought -> Action -> Observation).   .     ,     (Memory Transformers)    (Hidden Actions).

 LangChain    ,       (   ),         .

     `SaveMemoryTool`.    ,    JSON-,   .

  :

class EnhancedSaveMemoryTool(BaseTool):

name = "save_memory_enhanced"

description = "    .      , , ,  .   ,   ."

args_schema: Type[BaseModel] = MemoryInput

def _run(self, content: str, category: str = "general") -> str:

#      

try:

# ,     

vectorstore.add_texts(

texts=[f": {content}"],

metadatas=[{"source": "long_term_memory", "category": category}]

)

return " ."

except Exception as e:

return f" : {e}"

   

        :

1. **  **:   ,   (, " "),        .

* **:  `agent_scratchpad` ( )   .  ,    ,    .        : "       Observation".




  .


   .

   ,     (https://www.litres.ru/pages/biblio_book/?art=73084753)  .

      Visa, MasterCard, Maestro,    ,   ,     ,  PayPal, WebMoney, ., QIWI ,       .


