Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We鈥檒l occasionally send you account related emails.

Already on GitHub? Sign in to your account

langchain[minor]: add warnings when importing integrations from langchain #15505

Merged
merged 10 commits into from
Jan 4, 2024
Merged
Show file tree
Hide file tree
Changes from 4 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Jump to
Jump to file
Failed to load files.
Diff view
Diff view
48 changes: 23 additions & 25 deletions libs/langchain/langchain/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,16 +14,11 @@
del metadata # optional, avoids polluting the results of dir(__package__)


def _is_interactive_env() -> bool:
"""Determine if running within IPython or Jupyter."""
import sys

return hasattr(sys, "ps2")


def _warn_on_import(name: str, replacement: Optional[str] = None) -> None:
"""Warn on import of deprecated module."""
if _is_interactive_env():
from langchain.utils.interactive_env import is_interactive_env

if is_interactive_env():
# No warnings for interactive environments.
# This is done to avoid polluting the output of interactive environments
# where users rely on auto-complete and may trigger this warning
Expand Down Expand Up @@ -131,104 +126,107 @@ def __getattr__(name: str) -> Any:
elif name == "Anthropic":
from langchain_community.llms import Anthropic

_warn_on_import(name, replacement="langchain.llms.Anthropic")
_warn_on_import(name, replacement="langchain_community.llms.Anthropic")

return Anthropic
elif name == "Banana":
from langchain_community.llms import Banana

_warn_on_import(name, replacement="langchain.llms.Banana")
_warn_on_import(name, replacement="langchain_community.llms.Banana")

return Banana
elif name == "CerebriumAI":
from langchain_community.llms import CerebriumAI

_warn_on_import(name, replacement="langchain.llms.CerebriumAI")
_warn_on_import(name, replacement="langchain_community.llms.CerebriumAI")

return CerebriumAI
elif name == "Cohere":
from langchain_community.llms import Cohere

_warn_on_import(name, replacement="langchain.llms.Cohere")
_warn_on_import(name, replacement="langchain_community.llms.Cohere")

return Cohere
elif name == "ForefrontAI":
from langchain_community.llms import ForefrontAI

_warn_on_import(name, replacement="langchain.llms.ForefrontAI")
_warn_on_import(name, replacement="langchain_community.llms.ForefrontAI")

return ForefrontAI
elif name == "GooseAI":
from langchain_community.llms import GooseAI

_warn_on_import(name, replacement="langchain.llms.GooseAI")
_warn_on_import(name, replacement="langchain_community.llms.GooseAI")

return GooseAI
elif name == "HuggingFaceHub":
from langchain_community.llms import HuggingFaceHub

_warn_on_import(name, replacement="langchain.llms.HuggingFaceHub")
_warn_on_import(name, replacement="langchain_community.llms.HuggingFaceHub")

return HuggingFaceHub
elif name == "HuggingFaceTextGenInference":
from langchain_community.llms import HuggingFaceTextGenInference

_warn_on_import(name, replacement="langchain.llms.HuggingFaceTextGenInference")
_warn_on_import(
name, replacement="langchain_community.llms.HuggingFaceTextGenInference"
)

return HuggingFaceTextGenInference
elif name == "LlamaCpp":
from langchain_community.llms import LlamaCpp

_warn_on_import(name, replacement="langchain.llms.LlamaCpp")
_warn_on_import(name, replacement="langchain_community.llms.LlamaCpp")

return LlamaCpp
elif name == "Modal":
from langchain_community.llms import Modal

_warn_on_import(name, replacement="langchain.llms.Modal")
_warn_on_import(name, replacement="langchain_community.llms.Modal")

return Modal
elif name == "OpenAI":
from langchain_community.llms import OpenAI

_warn_on_import(name, replacement="langchain.llms.OpenAI")
_warn_on_import(name, replacement="langchain_community.llms.OpenAI")

return OpenAI
elif name == "Petals":
from langchain_community.llms import Petals

_warn_on_import(name, replacement="langchain.llms.Petals")
_warn_on_import(name, replacement="langchain_community.llms.Petals")

return Petals
elif name == "PipelineAI":
from langchain_community.llms import PipelineAI

_warn_on_import(name, replacement="langchain.llms.PipelineAI")
_warn_on_import(name, replacement="langchain_community.llms.PipelineAI")

return PipelineAI
elif name == "SagemakerEndpoint":
from langchain_community.llms import SagemakerEndpoint

_warn_on_import(name, replacement="langchain.llms.SagemakerEndpoint")
_warn_on_import(name, replacement="langchain_community.llms.SagemakerEndpoint")

return SagemakerEndpoint
elif name == "StochasticAI":
from langchain_community.llms import StochasticAI

_warn_on_import(name, replacement="langchain.llms.StochasticAI")
_warn_on_import(name, replacement="langchain_community.llms.StochasticAI")

return StochasticAI
elif name == "Writer":
from langchain_community.llms import Writer

_warn_on_import(name, replacement="langchain.llms.Writer")
_warn_on_import(name, replacement="langchain_community.llms.Writer")

return Writer
elif name == "HuggingFacePipeline":
from langchain_community.llms.huggingface_pipeline import HuggingFacePipeline

_warn_on_import(
name, replacement="langchain.llms.huggingface_pipeline.HuggingFacePipeline"
name,
replacement="langchain_community.llms.huggingface_pipeline.HuggingFacePipeline",
)

return HuggingFacePipeline
Expand Down
52 changes: 22 additions & 30 deletions libs/langchain/langchain/chat_models/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,37 +16,29 @@

AIMessage, BaseMessage, HumanMessage
""" # noqa: E501
import warnings

from langchain_core._api import LangChainDeprecationWarning

from langchain.utils.interactive_env import is_interactive_env


def __getattr__(name: str) -> None:
from langchain_community import chat_models

# If not in interactive env, raise warning.
if not is_interactive_env():
warnings.warn(
"Importing chat models from langchain is deprecated. Importing from "
"langchain will no longer be supported as of langchain==0.2.0. "
"Please import from langchain-community instead:\n\n"
f"`from langchain_community.chat_models import {name}`.\n\n"
"To install langchain-community run `pip install -U langchain-community`.",
category=LangChainDeprecationWarning,
)

return getattr(chat_models, name)

from langchain_community.chat_models.anthropic import ChatAnthropic
from langchain_community.chat_models.anyscale import ChatAnyscale
from langchain_community.chat_models.azure_openai import AzureChatOpenAI
from langchain_community.chat_models.baichuan import ChatBaichuan
from langchain_community.chat_models.baidu_qianfan_endpoint import QianfanChatEndpoint
from langchain_community.chat_models.bedrock import BedrockChat
from langchain_community.chat_models.cohere import ChatCohere
from langchain_community.chat_models.databricks import ChatDatabricks
from langchain_community.chat_models.ernie import ErnieBotChat
from langchain_community.chat_models.everlyai import ChatEverlyAI
from langchain_community.chat_models.fake import FakeListChatModel
from langchain_community.chat_models.fireworks import ChatFireworks
from langchain_community.chat_models.gigachat import GigaChat
from langchain_community.chat_models.google_palm import ChatGooglePalm
from langchain_community.chat_models.human import HumanInputChatModel
from langchain_community.chat_models.hunyuan import ChatHunyuan
from langchain_community.chat_models.javelin_ai_gateway import ChatJavelinAIGateway
from langchain_community.chat_models.jinachat import JinaChat
from langchain_community.chat_models.konko import ChatKonko
from langchain_community.chat_models.litellm import ChatLiteLLM
from langchain_community.chat_models.minimax import MiniMaxChat
from langchain_community.chat_models.mlflow import ChatMlflow
from langchain_community.chat_models.mlflow_ai_gateway import ChatMLflowAIGateway
from langchain_community.chat_models.ollama import ChatOllama
from langchain_community.chat_models.openai import ChatOpenAI
from langchain_community.chat_models.pai_eas_endpoint import PaiEasChatEndpoint
from langchain_community.chat_models.promptlayer_openai import PromptLayerChatOpenAI
from langchain_community.chat_models.vertexai import ChatVertexAI
from langchain_community.chat_models.volcengine_maas import VolcEngineMaasChat
from langchain_community.chat_models.yandex import ChatYandexGPT

__all__ = [
"ChatOpenAI",
Expand Down
5 changes: 5 additions & 0 deletions libs/langchain/langchain/utils/interactive_env.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
def is_interactive_env() -> bool:
"""Determine if running within IPython or Jupyter."""
import sys

return hasattr(sys, "ps2")