mirror of
https://github.com/khoaliber/khoj.git
synced 2026-03-07 13:23:15 +00:00
Deprecate unused beta search and answer API endpoints
This commit is contained in:
@@ -1,5 +1,4 @@
|
||||
# Standard Packages
|
||||
import json
|
||||
import logging
|
||||
from datetime import datetime
|
||||
from typing import Optional
|
||||
@@ -17,28 +16,6 @@ from khoj.processor.conversation.utils import (
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def answer(text, user_query, model, api_key=None, temperature=0.5, max_tokens=500):
|
||||
"""
|
||||
Answer user query using provided text as reference with OpenAI's GPT
|
||||
"""
|
||||
# Setup Prompt from arguments
|
||||
prompt = prompts.answer.format(text=text, user_query=user_query)
|
||||
|
||||
# Get Response from GPT
|
||||
logger.debug(f"Prompt for GPT: {prompt}")
|
||||
response = completion_with_backoff(
|
||||
prompt=prompt,
|
||||
model_name=model,
|
||||
temperature=temperature,
|
||||
max_tokens=max_tokens,
|
||||
model_kwargs={"stop": ['"""']},
|
||||
openai_api_key=api_key,
|
||||
)
|
||||
|
||||
# Extract, Clean Message from GPT's Response
|
||||
return str(response).replace("\n\n", "")
|
||||
|
||||
|
||||
def summarize(text, summary_type, model, user_query=None, api_key=None, temperature=0.5, max_tokens=200):
|
||||
"""
|
||||
Summarize user input using OpenAI's GPT
|
||||
@@ -126,31 +103,6 @@ def extract_questions(
|
||||
return questions
|
||||
|
||||
|
||||
def extract_search_type(text, model, api_key=None, temperature=0.5, max_tokens=100, verbose=0):
|
||||
"""
|
||||
Extract search type from user query using OpenAI's GPT
|
||||
"""
|
||||
# Setup Prompt to extract search type
|
||||
prompt = prompts.search_type + f"{text}\nA:"
|
||||
if verbose > 1:
|
||||
print(f"Message -> Prompt: {text} -> {prompt}")
|
||||
|
||||
# Get Response from GPT
|
||||
logger.debug(f"Prompt for GPT: {prompt}")
|
||||
response = completion_with_backoff(
|
||||
prompt=prompt,
|
||||
model_name=model,
|
||||
temperature=temperature,
|
||||
max_tokens=max_tokens,
|
||||
frequency_penalty=0.2,
|
||||
model_kwargs={"stop": ["\n"]},
|
||||
openai_api_key=api_key,
|
||||
)
|
||||
|
||||
# Extract, Clean Message from GPT's Response
|
||||
return json.loads(response.strip(empty_escape_sequences))
|
||||
|
||||
|
||||
def converse(
|
||||
references,
|
||||
user_query,
|
||||
|
||||
@@ -1,64 +1,9 @@
|
||||
# Standard Packages
|
||||
import logging
|
||||
from typing import Optional
|
||||
|
||||
# External Packages
|
||||
from fastapi import APIRouter
|
||||
|
||||
# Internal Packages
|
||||
from khoj.routers.api import search
|
||||
from khoj.processor.conversation.gpt import (
|
||||
answer,
|
||||
extract_search_type,
|
||||
)
|
||||
from khoj.utils.state import SearchType
|
||||
from khoj.utils.helpers import get_from_dict
|
||||
from khoj.utils import state
|
||||
|
||||
|
||||
# Initialize Router
|
||||
api_beta = APIRouter()
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
# Create Routes
|
||||
@api_beta.get("/search")
|
||||
def search_beta(q: str, n: Optional[int] = 1):
|
||||
# Initialize Variables
|
||||
model = state.processor_config.conversation.model
|
||||
api_key = state.processor_config.conversation.openai_api_key
|
||||
|
||||
# Extract Search Type using GPT
|
||||
try:
|
||||
metadata = extract_search_type(q, model=model, api_key=api_key, verbose=state.verbose)
|
||||
search_type = get_from_dict(metadata, "search-type")
|
||||
except Exception as e:
|
||||
return {"status": "error", "result": [str(e)], "type": None}
|
||||
|
||||
# Search
|
||||
search_results = search(q, n=n, t=SearchType(search_type))
|
||||
|
||||
# Return response
|
||||
return {"status": "ok", "result": search_results, "type": search_type}
|
||||
|
||||
|
||||
@api_beta.get("/answer")
|
||||
def answer_beta(q: str):
|
||||
# Initialize Variables
|
||||
model = state.processor_config.conversation.model
|
||||
api_key = state.processor_config.conversation.openai_api_key
|
||||
|
||||
# Collate context for GPT
|
||||
result_list = search(q, n=2, r=True, score_threshold=0, dedupe=False)
|
||||
collated_result = "\n\n".join([f"# {item.additional['compiled']}" for item in result_list])
|
||||
logger.debug(f"Reference Context:\n{collated_result}")
|
||||
|
||||
# Make GPT respond to user query using provided context
|
||||
try:
|
||||
gpt_response = answer(collated_result, user_query=q, model=model, api_key=api_key)
|
||||
status = "ok"
|
||||
except Exception as e:
|
||||
gpt_response = str(e)
|
||||
status = "error"
|
||||
|
||||
return {"status": status, "response": gpt_response}
|
||||
|
||||
Reference in New Issue
Block a user