Merge branch 'master' into patch-1

pull/980/head
lekapsy 2023-04-12 19:17:34 +02:00 committed by GitHub
commit 7729f198d4
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
6 changed files with 18 additions and 7 deletions

View File

@ -17,6 +17,10 @@ OPENAI_AZURE_API_BASE=your-base-url-for-azure
OPENAI_AZURE_API_VERSION=api-version-for-azure
# OPENAI_AZURE_DEPLOYMENT_ID - OpenAI deployment ID for Azure (Example: my-deployment-id)
OPENAI_AZURE_DEPLOYMENT_ID=deployment-id-for-azure
# OPENAI_AZURE_CHAT_DEPLOYMENT_ID - OpenAI deployment ID for Azure Chat (Example: my-deployment-id-for-azure-chat)
OPENAI_AZURE_CHAT_DEPLOYMENT_ID=deployment-id-for-azure-chat
# OPENAI_AZURE_EMBEDDINGS_DEPLOYMENT_ID - OpenAI deployment ID for Embedding (Example: my-deployment-id-for-azure-embeddigs)
OPENAI_AZURE_EMBEDDINGS_DEPLOYMENT_ID=deployment-id-for-azure-embeddigs
################################################################################
### LLM MODELS
@ -39,7 +43,7 @@ SMART_TOKEN_LIMIT=8000
################################################################################
# MEMORY_BACKEND - Memory backend type (Default: local)
MEMORY_BACKEND=redis
MEMORY_BACKEND=local
### PINECONE
# PINECONE_API_KEY - Pinecone API Key (Example: my-pinecone-api-key)

View File

@ -93,7 +93,7 @@ pip install -r requirements.txt
4. Rename `.env.template` to `.env` and fill in your `OPENAI_API_KEY`. If you plan to use Speech Mode, fill in your `ELEVEN_LABS_API_KEY` as well.
- Obtain your OpenAI API key from: https://platform.openai.com/account/api-keys.
- Obtain your ElevenLabs API key from: https://elevenlabs.io. You can view your xi-api-key using the "Profile" tab on the website.
- If you want to use GPT on an Azure instance, set `USE_AZURE` to `True` and provide the `OPENAI_AZURE_API_BASE`, `OPENAI_AZURE_API_VERSION` and `OPENAI_AZURE_DEPLOYMENT_ID` values as explained here: https://pypi.org/project/openai/ in the `Microsoft Azure Endpoints` section
- If you want to use GPT on an Azure instance, set `USE_AZURE` to `True` and provide the `OPENAI_AZURE_API_BASE`, `OPENAI_AZURE_API_VERSION` and `OPENAI_AZURE_DEPLOYMENT_ID` values as explained here: https://pypi.org/project/openai/ in the `Microsoft Azure Endpoints` section. Additionally you need separate deployments for both embeddings and chat. Add their ID values to `OPENAI_AZURE_CHAT_DEPLOYMENT_ID` and `OPENAI_AZURE_EMBEDDINGS_DEPLOYMENT_ID` respectively
## 🔧 Usage

View File

@ -49,6 +49,8 @@ class Config(metaclass=Singleton):
self.openai_api_base = os.getenv("OPENAI_AZURE_API_BASE")
self.openai_api_version = os.getenv("OPENAI_AZURE_API_VERSION")
self.openai_deployment_id = os.getenv("OPENAI_AZURE_DEPLOYMENT_ID")
self.azure_chat_deployment_id = os.getenv("OPENAI_AZURE_CHAT_DEPLOYMENT_ID")
self.azure_embeddigs_deployment_id = os.getenv("OPENAI_AZURE_EMBEDDINGS_DEPLOYMENT_ID")
openai.api_type = "azure"
openai.api_base = self.openai_api_base
openai.api_version = self.openai_api_version

View File

@ -26,7 +26,7 @@ JSON_SCHEMA = """
"""
def fix_and_parse_json(
def fix_and_parse_json(
json_str: str,
try_to_fix_with_gpt: bool = True
) -> Union[str, Dict[Any, Any]]:
@ -35,8 +35,8 @@ def fix_and_parse_json(
json_str = json_str.replace('\t', '')
return json.loads(json_str)
except json.JSONDecodeError as _: # noqa: F841
json_str = correct_json(json_str)
try:
json_str = correct_json(json_str)
return json.loads(json_str)
except json.JSONDecodeError as _: # noqa: F841
pass
@ -53,6 +53,7 @@ def fix_and_parse_json(
last_brace_index = json_str.rindex("}")
json_str = json_str[:last_brace_index+1]
return json.loads(json_str)
# Can throw a ValueError if there is no "{" or "}" in the json_str
except (json.JSONDecodeError, ValueError) as e: # noqa: F841
if try_to_fix_with_gpt:
print("Warning: Failed to parse AI output, attempting to fix."

View File

@ -9,7 +9,7 @@ def create_chat_completion(messages, model=None, temperature=None, max_tokens=No
"""Create a chat completion using the OpenAI API"""
if cfg.use_azure:
response = openai.ChatCompletion.create(
deployment_id=cfg.openai_deployment_id,
deployment_id=cfg.azure_chat_deployment_id,
model=model,
messages=messages,
temperature=temperature,

View File

@ -1,12 +1,16 @@
"""Base class for memory providers."""
import abc
from config import AbstractSingleton
from config import AbstractSingleton, Config
import openai
cfg = Config()
def get_ada_embedding(text):
text = text.replace("\n", " ")
return openai.Embedding.create(input=[text], model="text-embedding-ada-002")["data"][0]["embedding"]
if cfg.use_azure:
return openai.Embedding.create(input=[text], engine=cfg.azure_embeddigs_deployment_id, model="text-embedding-ada-002")["data"][0]["embedding"]
else:
return openai.Embedding.create(input=[text], model="text-embedding-ada-002")["data"][0]["embedding"]
class MemoryProviderSingleton(AbstractSingleton):