Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

core[patch]: remove requests #19891

Merged
merged 14 commits into from
Apr 2, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Jump to
Jump to file
Failed to load files.
Diff view
Diff view
Original file line number Diff line number Diff line change
@@ -1,11 +1,12 @@
"""Integration tests for the langchain tracer module."""

import asyncio
import os

from aiohttp import ClientSession
from langchain_core.callbacks.manager import atrace_as_chain_group, trace_as_chain_group
from langchain_core.prompts import PromptTemplate
from langchain_core.tracers.context import tracing_enabled, tracing_v2_enabled
from langchain_core.tracers.context import tracing_v2_enabled

from langchain_community.chat_models import ChatOpenAI
from langchain_community.llms import OpenAI
Expand Down Expand Up @@ -95,44 +96,6 @@ async def test_tracing_concurrent_bw_compat_environ() -> None:
del os.environ["LANGCHAIN_HANDLER"]


def test_tracing_context_manager() -> None:
from langchain.agents import AgentType, initialize_agent, load_tools

llm = OpenAI(temperature=0)
tools = load_tools(["llm-math", "serpapi"], llm=llm)
agent = initialize_agent(
tools, llm, agent=AgentType.ZERO_SHOT_REACT_DESCRIPTION, verbose=True
)
if "LANGCHAIN_TRACING" in os.environ:
del os.environ["LANGCHAIN_TRACING"]
with tracing_enabled() as session:
assert session
agent.run(questions[0]) # this should be traced

agent.run(questions[0]) # this should not be traced


async def test_tracing_context_manager_async() -> None:
from langchain.agents import AgentType, initialize_agent, load_tools

llm = OpenAI(temperature=0)
async_tools = load_tools(["llm-math", "serpapi"], llm=llm)
agent = initialize_agent(
async_tools, llm, agent=AgentType.ZERO_SHOT_REACT_DESCRIPTION, verbose=True
)
if "LANGCHAIN_TRACING" in os.environ:
del os.environ["LANGCHAIN_TRACING"]

# start a background task
task = asyncio.create_task(agent.arun(questions[0])) # this should not be traced
with tracing_enabled() as session:
assert session
tasks = [agent.arun(q) for q in questions[1:4]] # these should be traced
await asyncio.gather(*tasks)

await task


async def test_tracing_v2_environment_variable() -> None:
from langchain.agents import AgentType, initialize_agent, load_tools

Expand Down
30 changes: 12 additions & 18 deletions libs/core/langchain_core/callbacks/manager.py
Original file line number Diff line number Diff line change
Expand Up @@ -1914,7 +1914,6 @@ def _configure(
_configure_hooks,
_get_tracer_project,
_tracing_v2_is_enabled,
tracing_callback_var,
tracing_v2_callback_var,
)

Expand Down Expand Up @@ -1953,20 +1952,25 @@ def _configure(
callback_manager.add_metadata(inheritable_metadata or {})
callback_manager.add_metadata(local_metadata or {}, False)

tracer = tracing_callback_var.get()
tracing_enabled_ = (
Comment on lines -1956 to -1957
Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

can we leave dummy vars in they're imported anywhere

tracer = None
tracing_enabled = None

Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

These are both inside the _configure function so I don't think necessary to stub them?

env_var_is_set("LANGCHAIN_TRACING")
or tracer is not None
or env_var_is_set("LANGCHAIN_HANDLER")
v1_tracing_enabled_ = env_var_is_set("LANGCHAIN_TRACING") or env_var_is_set(
"LANGCHAIN_HANDLER"
)

tracer_v2 = tracing_v2_callback_var.get()
tracing_v2_enabled_ = _tracing_v2_is_enabled()

if v1_tracing_enabled_ and not tracing_v2_enabled_:
# if both are enabled, can silently ignore the v1 tracer
raise RuntimeError(
"Tracing using LangChainTracerV1 is no longer supported. "
"Please set the LANGCHAIN_TRACING_V2 environment variable to enable "
"tracing instead."
)

tracer_project = _get_tracer_project()
debug = _get_debug()
if verbose or debug or tracing_enabled_ or tracing_v2_enabled_:
if verbose or debug or tracing_v2_enabled_:
from langchain_core.tracers.langchain import LangChainTracer
from langchain_core.tracers.langchain_v1 import LangChainTracerV1
from langchain_core.tracers.stdout import ConsoleCallbackHandler

if verbose and not any(
Expand All @@ -1982,16 +1986,6 @@ def _configure(
for handler in callback_manager.handlers
):
callback_manager.add_handler(ConsoleCallbackHandler(), True)
if tracing_enabled_ and not any(
isinstance(handler, LangChainTracerV1)
for handler in callback_manager.handlers
):
if tracer:
callback_manager.add_handler(tracer, True)
else:
handler = LangChainTracerV1()
handler.load_session(tracer_project)
callback_manager.add_handler(handler, True)
if tracing_v2_enabled_ and not any(
isinstance(handler, LangChainTracer)
for handler in callback_manager.handlers
Expand Down
15 changes: 8 additions & 7 deletions libs/core/langchain_core/prompts/loading.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
"""Load prompts."""

import json
import logging
from pathlib import Path
Expand All @@ -11,7 +12,6 @@
from langchain_core.prompts.chat import ChatPromptTemplate
from langchain_core.prompts.few_shot import FewShotPromptTemplate
from langchain_core.prompts.prompt import PromptTemplate
from langchain_core.utils import try_load_from_hub

URL_BASE = "https://raw.githubusercontent.com/hwchase17/langchain-hub/master/prompts/"
logger = logging.getLogger(__name__)
Expand Down Expand Up @@ -127,12 +127,13 @@ def _load_prompt(config: dict) -> PromptTemplate:

def load_prompt(path: Union[str, Path]) -> BasePromptTemplate:
"""Unified method for loading a prompt from LangChainHub or local fs."""
if hub_result := try_load_from_hub(
path, _load_prompt_from_file, "prompts", {"py", "json", "yaml"}
):
return hub_result
else:
return _load_prompt_from_file(path)
if isinstance(path, str) and path.startswith("lc://"):
raise RuntimeError(
"Loading from the deprecated github-based Hub is no longer supported. "
"Please use the new LangChain Hub at https://smith.langchain.com/hub "
"instead."
)
return _load_prompt_from_file(path)


def _load_prompt_from_file(file: Union[str, Path]) -> BasePromptTemplate:
Expand Down
42 changes: 10 additions & 32 deletions libs/core/langchain_core/tracers/context.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,9 +18,7 @@
from langsmith import utils as ls_utils
from langsmith.run_helpers import get_run_tree_context

from langchain_core._api import deprecated
from langchain_core.tracers.langchain import LangChainTracer
from langchain_core.tracers.langchain_v1 import LangChainTracerV1
from langchain_core.tracers.run_collector import RunCollectorCallbackHandler
from langchain_core.tracers.schemas import TracerSessionV1
from langchain_core.utils.env import env_var_is_set
Expand All @@ -31,44 +29,24 @@
from langchain_core.callbacks.base import BaseCallbackHandler, Callbacks
from langchain_core.callbacks.manager import AsyncCallbackManager, CallbackManager

# Deprecated as of 0.1.0, will be removed in 0.2.0.
tracing_callback_var: ContextVar[Optional[LangChainTracerV1]] = ContextVar( # noqa: E501
"tracing_callback", default=None
)
Comment on lines -35 to -37
Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

dummy var

Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Adding


tracing_v2_callback_var: ContextVar[Optional[LangChainTracer]] = ContextVar( # noqa: E501
# for backwards partial compatibility if this is imported by users but unused
tracing_callback_var: Any = None
tracing_v2_callback_var: ContextVar[Optional[LangChainTracer]] = ContextVar(
"tracing_callback_v2", default=None
)
run_collector_var: ContextVar[Optional[RunCollectorCallbackHandler]] = ContextVar( # noqa: E501
) # noqa: E501
run_collector_var: ContextVar[Optional[RunCollectorCallbackHandler]] = ContextVar(
"run_collector", default=None
)
) # noqa: E501


@contextmanager
Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

is this breaking?

Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

The whole thing is breaking, but agreed not having @contextmanager will make the error message more esoteric (can't do with tracing_enabled(), instead of the intentional runtime error.

Will add back

Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Done

@deprecated("0.1.0", alternative="tracing_v2_enabled", removal="0.2.0")
def tracing_enabled(
session_name: str = "default",
) -> Generator[TracerSessionV1, None, None]:
"""Get the Deprecated LangChainTracer in a context manager.

Args:
session_name (str, optional): The name of the session.
Defaults to "default".

Returns:
TracerSessionV1: The LangChainTracer session.

Example:
>>> with tracing_enabled() as session:
... # Use the LangChainTracer session
"""
cb = LangChainTracerV1()
session = cast(TracerSessionV1, cb.load_session(session_name))
try:
tracing_callback_var.set(cb)
yield session
finally:
tracing_callback_var.set(None)
"""Throws an error because this has been replaced by tracing_v2_enabled."""
raise RuntimeError(
"tracing_enabled is no longer supported. Please use tracing_enabled_v2 instead."
)


@contextmanager
Expand Down