Fix langchain llm and update changelog (#2508)

This commit is contained in:
Dev Khant
2025-04-07 11:49:39 +05:30
committed by GitHub
parent 93f34e4116
commit 3712522b14
4 changed files with 53 additions and 17 deletions

View File

@@ -13,7 +13,7 @@ install:
install_all:
poetry install
poetry run pip install groq together boto3 litellm ollama chromadb weaviate weaviate-client sentence_transformers vertexai \
google-generativeai elasticsearch opensearch-py vecs pinecone pinecone-text faiss-cpu
google-generativeai elasticsearch opensearch-py vecs pinecone pinecone-text faiss-cpu langchain-community
# Format code with ruff
format:

View File

@@ -6,6 +6,25 @@ mode: "wide"
<Tabs>
<Tab title="Python">
<Update label="2025-04-07" description="v0.1.82">
**New Features:**
- **LLM Integrations:** Added support for Langchain LLMs, Google as new LLM and embedder
- **Development:** Added development docker compose
**Improvements:**
- **Output Format:** Set output_format='v1.1' and updated documentation
**Documentation:**
- **Integrations:** Added LMStudio and Together.ai documentation
- **API Reference:** Updated output_format documentation
- **Integrations:** Added PipeCat integration documentation
- **Integrations:** Added Flowise integration documentation for Mem0 memory setup
**Bug Fixes:**
- **Tests:** Fixed failing unit tests
</Update>
<Update label="2025-04-02" description="v0.1.79">
**New Features:**
@@ -56,6 +75,17 @@ mode: "wide"
<Tab title="TypeScript">
<Update label="2025-04-01" description="v2.1.14">
**New Features:**
- **Mastra Example:** Added Mastra example
- **Integrations:** Added Flowise integration documentation for Mem0 memory setup
**Improvements:**
- **Demo:** Updated Demo Mem0AI
- **Client:** Enhanced Ping method in Mem0 Client
- **AI SDK:** Updated AI SDK implementation
</Update>
<Update label="2025-03-29" description="v2.1.13">
**Improvements:**
- **Introuced `ping` method to check if API key is valid and populate org/project id**

View File

@@ -12,7 +12,7 @@ except ImportError:
# Provider-specific package mapping
PROVIDER_PACKAGES = {
# "Anthropic": "langchain_anthropic", # Special handling for Anthropic with Pydantic v2
"Anthropic": "langchain_anthropic",
"MistralAI": "langchain_mistralai",
"Fireworks": "langchain_fireworks",
"AzureOpenAI": "langchain_openai",
@@ -135,18 +135,25 @@ class LangchainLLM(LLMBase):
try:
# Check if this provider needs a specialized package
if provider in PROVIDER_PACKAGES:
package_name = PROVIDER_PACKAGES[provider]
try:
# Import the model class directly from the package
module_path = f"{package_name}"
model_class = __import__(module_path, fromlist=[model_name])
model_class = getattr(model_class, model_name)
except ImportError:
raise ImportError(
f"Package {package_name} not found. " f"Please install it with `pip install {package_name}`"
)
except AttributeError:
raise ImportError(f"Model {model_name} not found in {package_name}")
if provider == "Anthropic": # Special handling for Anthropic with Pydantic v2
try:
from langchain_anthropic import ChatAnthropic
model_class = ChatAnthropic
except ImportError:
raise ImportError("langchain_anthropic not found. Please install it with `pip install langchain-anthropic`")
else:
package_name = PROVIDER_PACKAGES[provider]
try:
# Import the model class directly from the package
module_path = f"{package_name}"
model_class = __import__(module_path, fromlist=[model_name])
model_class = getattr(model_class, model_name)
except ImportError:
raise ImportError(
f"Package {package_name} not found. " f"Please install it with `pip install {package_name}`"
)
except AttributeError:
raise ImportError(f"Model {model_name} not found in {package_name}")
else:
# Use the default langchain_community module
if not hasattr(chat_models, model_name):
@@ -158,8 +165,7 @@ class LangchainLLM(LLMBase):
self.langchain_model = model_class(
model=self.config.model,
temperature=self.config.temperature,
max_tokens=self.config.max_tokens,
api_key=self.config.api_key,
max_tokens=self.config.max_tokens
)
except (ImportError, AttributeError, ValueError) as e:
raise ImportError(f"Error setting up langchain model for provider {provider}: {str(e)}")

View File

@@ -1,6 +1,6 @@
[tool.poetry]
name = "mem0ai"
version = "0.1.82"
version = "0.1.83"
description = "Long-term memory for AI Agents"
authors = ["Mem0 <founders@mem0.ai>"]
exclude = [