[feat]: add support for llama2 model (#331)

This commit is contained in:
Deshraj Yadav
2023-07-20 00:01:37 -07:00
committed by GitHub
parent 3bdec3b71a
commit cc43846d42
5 changed files with 92 additions and 1 deletions

View File

@@ -14,6 +14,29 @@ Embedchain is a framework to easily create LLM powered bots over any dataset. If
pip install embedchain
```
## 🔥 Latest
- **[2023/07/19]** Released support for 🦙 `llama2` model. Start creating your `llama2` based bots like this:
```python
import os
from embedchain import Llama2App
os.environ['REPLICATE_API_TOKEN'] = "REPLICATE API TOKEN"
zuck_bot = Llama2App()
# Embed your data
zuck_bot.add("youtube_video", "https://www.youtube.com/watch?v=Ff4fRgnuFgQ")
zuck_bot.add("web_page", "https://en.wikipedia.org/wiki/Mark_Zuckerberg")
# Nice, your bot is ready now. Start asking questions to your bot.
zuck_bot.query("Who is Mark Zuckerberg?")
# Answer: Mark Zuckerberg is an American internet entrepreneur and business magnate. He is the co-founder and CEO of Facebook.
```
## 🔍 Demo
Try out embedchain in your browser:

View File

@@ -23,6 +23,35 @@ import os
os.environ["OPENAI_API_KEY"] = "sk-xxxx"
```
### Llama2App
```python
import os
from embedchain import Llama2App
os.environ['REPLICATE_API_TOKEN'] = "REPLICATE API TOKEN"
zuck_bot = Llama2App()
# Embed your data
zuck_bot.add("youtube_video", "https://www.youtube.com/watch?v=Ff4fRgnuFgQ")
zuck_bot.add("web_page", "https://en.wikipedia.org/wiki/Mark_Zuckerberg")
# Nice, your bot is ready now. Start asking questions to your bot.
zuck_bot.query("Who is Mark Zuckerberg?")
# Answer: Mark Zuckerberg is an American internet entrepreneur and business magnate. He is the co-founder and CEO of Facebook. Born in 1984, he dropped out of Harvard University to focus on his social media platform, which has since grown to become one of the largest and most influential technology companies in the world.
# Enable web search for your bot
zuck_bot.online = True # enable internet access for the bot
zuck_bot.query("Who owns the new threads app and when it was founded?")
# Answer: Based on the context provided, the new Threads app is owned by Meta, the parent company of Facebook, Instagram, and WhatsApp.
```
- `Llama2App` uses Replicate's LLM model, so these are paid models. You can get the `REPLICATE_API_TOKEN` by registering on [their website](https://replicate.com/account).
- `Llama2App` uses OpenAI's embedding model to create embeddings for chunks. Make sure that you have an OpenAI account and an API key. If you have don't have an API key, you can create one by visiting [this link](https://platform.openai.com/account/api-keys).
### OpenSourceApp
```python

View File

@@ -4,6 +4,7 @@ __version__ = importlib.metadata.version(__package__ or __name__)
from embedchain.apps.App import App # noqa: F401
from embedchain.apps.CustomApp import CustomApp # noqa: F401
from embedchain.apps.Llama2App import Llama2App # noqa: F401
from embedchain.apps.OpenSourceApp import OpenSourceApp # noqa: F401
from embedchain.apps.PersonApp import (PersonApp, # noqa: F401
PersonOpenSourceApp)

View File

@@ -0,0 +1,36 @@
import os
from langchain.llms import Replicate
from embedchain.config import AppConfig
from embedchain.embedchain import EmbedChain
class Llama2App(EmbedChain):
"""
The EmbedChain Llama2App class.
Has two functions: add and query.
adds(data_type, url): adds the data from the given URL to the vector db.
query(query): finds answer to the given query using vector database and LLM.
"""
def __init__(self, config: AppConfig = None):
"""
:param config: AppConfig instance to load as configuration. Optional.
"""
if "REPLICATE_API_TOKEN" not in os.environ:
raise ValueError("Please set the REPLICATE_API_TOKEN environment variable to your OpenAI API key.")
if config is None:
config = AppConfig()
super().__init__(config)
def get_llm_model_answer(self, prompt, config: AppConfig = None):
# TODO: Move the model and other inputs into config
llm = Replicate(
model="a16z-infra/llama13b-v2-chat:df7690f1994d94e96ad9d568eac121aecf50684a0b0963b25a41cc40061269e5",
input={"temperature": 0.75, "max_length": 500, "top_p": 1},
)
return llm(prompt)

View File

@@ -5,7 +5,7 @@ with open("README.md", "r", encoding="utf-8") as fh:
setuptools.setup(
name="embedchain",
version="0.0.24",
version="0.0.25",
author="Taranjeet Singh",
author_email="reachtotj@gmail.com",
description="embedchain is a framework to easily create LLM powered bots over any dataset", # noqa:E501
@@ -34,6 +34,8 @@ setuptools.setup(
"sentence_transformers",
"docx2txt",
"pydantic==1.10.8",
"replicate==0.9.0",
"duckduckgo-search==3.8.4",
],
extras_require={"dev": ["black", "ruff", "isort", "pytest"]},
)