Add support for text to speech and speech to text (#863)

- Add support for text to speech, speech to text. Add loading and responsive indicators to reflect state.
- When streaming for speech to text, show incremental transcription in the message input field
- When streaming text to speech, and a pause button in the chat message to allow user to stop playback
This commit is contained in:
sabaimran
2024-07-24 02:06:40 -07:00
committed by GitHub
parent 3e4325edab
commit 694bedc25b
14 changed files with 264 additions and 36 deletions

View File

@@ -18,6 +18,7 @@ export default function RootLayout({
<html lang="en">
<meta httpEquiv="Content-Security-Policy"
content="default-src 'self' https://assets.khoj.dev;
media-src * blob:;
script-src 'self' https://assets.khoj.dev 'unsafe-inline' 'unsafe-eval';
connect-src 'self' https://ipapi.co/json ws://localhost:42110;
style-src 'self' https://assets.khoj.dev 'unsafe-inline' https://fonts.googleapis.com;

View File

@@ -65,7 +65,7 @@ function ChatBodyData(props: ChatBodyDataProps) {
}
}, [props.streamedMessages]);
if(!conversationId) {
if (!conversationId) {
window.location.href = '/';
return;
}

View File

@@ -18,16 +18,31 @@ export function convertColorToTextClass(color: string) {
return `text-gray-500`;
}
export function convertSuggestionColorToTextClass(color: string) {
const colors = ['blue', 'yellow', 'green', 'pink', 'purple'];
if (colors.includes(color)) {
return "" + `bg-gradient-to-b from-[hsl(var(--background))] to-${color}-100/${color == "green" ? "90" : "70"} dark:from-[hsl(var(--background))] dark:to-${color}-950/30 dark:border dark:border-neutral-700`;
}
function convertToBGGradientClass(color: string) {
if (color === 'red') return `bg-gradient-to-b from-[hsl(var(--background))] to-red-100/70 dark:from-[hsl(var(--background))] dark:to-red-950/30 `;
if (color === 'yellow') return `bg-gradient-to-b from-[hsl(var(--background))] to-yellow-100/70 dark:from-[hsl(var(--background))] dark:to-yellow-950/30 `;
if (color === 'green') return `bg-gradient-to-b from-[hsl(var(--background))] to-green-100/90 dark:from-[hsl(var(--background))] dark:to-green-950/30 `;
if (color === 'blue') return `bg-gradient-to-b from-[hsl(var(--background))] to-blue-100/70 dark:from-[hsl(var(--background))] dark:to-blue-950/30 `;
if (color === 'orange') return `bg-gradient-to-b from-[hsl(var(--background))] to-orange-100/70 dark:from-[hsl(var(--background))] dark:to-orange-950/30 `;
if (color === 'purple') return `bg-gradient-to-b from-[hsl(var(--background))] to-purple-100/70 dark:from-[hsl(var(--background))] dark:to-purple-950/30 `;
if (color === 'pink') return `bg-gradient-to-b from-[hsl(var(--background))] to-pink-100/70 dark:from-[hsl(var(--background))] dark:to-pink-950/30 `;
if (color === 'teal') return `bg-gradient-to-b from-[hsl(var(--background))] to-teal-100/70 dark:from-[hsl(var(--background))] dark:to-teal-950/30 `;
if (color === 'cyan') return `bg-gradient-to-b from-[hsl(var(--background))] to-cyan-100/70 dark:from-[hsl(var(--background))] dark:to-cyan-950/30 `;
if (color === 'lime') return `bg-gradient-to-b from-[hsl(var(--background))] to-lime-100/70 dark:from-[hsl(var(--background))] dark:to-lime-950/30 `;
if (color === 'indigo') return `bg-gradient-to-b from-[hsl(var(--background))] to-indigo-100/70 dark:from-[hsl(var(--background))] dark:to-indigo-950/30 `;
if (color === 'fuschia') return `bg-gradient-to-b from-[hsl(var(--background))] to-fuschia-100/70 dark:from-[hsl(var(--background))] dark:to-fuschia-950/30 `;
if (color === 'rose') return `bg-gradient-to-b from-[hsl(var(--background))] to-rose-100/70 dark:from-[hsl(var(--background))] dark:to-rose-950/30 `;
if (color === 'sky') return `bg-gradient-to-b from-[hsl(var(--background))] to-sky-100/70 dark:from-[hsl(var(--background))] dark:to-sky-950/30 `;
if (color === 'amber') return `bg-gradient-to-b from-[hsl(var(--background))] to-amber-100/70 dark:from-[hsl(var(--background))] dark:to-amber-950/30 `;
if (color === 'emerald') return `bg-gradient-to-b from-[hsl(var(--background))] to-emerald-100/70 dark:from-[hsl(var(--background))] dark:to-emerald-950/30 `;
return `bg-gradient-to-b from-white to-orange-50`;
}
export function convertSuggestionColorToTextClass(color: string) {
return `${convertToBGGradientClass(color)} dark:border dark:border-neutral-700`;
}
export function convertColorToBorderClass(color: string) {
console.log("Color:", color);
if (color === 'red') return `border-red-500`;
if (color === 'yellow') return `border-yellow-500`;
if (color === 'green') return `border-green-500`;

View File

@@ -19,7 +19,10 @@ import {
Notebook,
Question,
Robot,
Shapes
Shapes,
Stop,
Waveform,
WaveSine
} from '@phosphor-icons/react';
import {
@@ -48,6 +51,8 @@ import { PopoverTrigger } from '@radix-ui/react-popover';
import Link from 'next/link';
import { AlertDialogCancel } from '@radix-ui/react-alert-dialog';
import LoginPrompt from '../loginPrompt/loginPrompt';
import { Tooltip, TooltipContent, TooltipProvider, TooltipTrigger } from '@/components/ui/tooltip';
import { InlineLoading } from '../loading/loading';
export interface ChatOptions {
[key: string]: string
@@ -96,6 +101,9 @@ export default function ChatInputArea(props: ChatInputProps) {
const [loginRedirectMessage, setLoginRedirectMessage] = useState<string | null>(null);
const [showLoginPrompt, setShowLoginPrompt] = useState(false);
const [recording, setRecording] = useState(false);
const [mediaRecorder, setMediaRecorder] = useState<MediaRecorder | null>(null);
const [progressValue, setProgressValue] = useState(0);
useEffect(() => {
@@ -195,6 +203,83 @@ export default function ChatInputArea(props: ChatInputProps) {
return <ArrowRight className={className} />
}
// Assuming this function is added within the same context as the provided excerpt
async function startRecordingAndTranscribe() {
try {
const microphone = await navigator.mediaDevices.getUserMedia({ audio: true });
const mediaRecorder = new MediaRecorder(microphone, { mimeType: 'audio/webm' });
const audioChunks: Blob[] = [];
mediaRecorder.ondataavailable = async (event) => {
audioChunks.push(event.data);
const audioBlob = new Blob(audioChunks, { type: 'audio/webm' });
const formData = new FormData();
formData.append('file', audioBlob);
// Send the incremental audio blob to the server
try {
const response = await fetch('/api/transcribe', {
method: 'POST',
body: formData,
});
if (!response.ok) {
throw new Error('Network response was not ok');
}
const transcription = await response.json();
setMessage(transcription.text.trim());
} catch (error) {
console.error('Error sending audio to server:', error);
}
};
// Send an audio blob every 1.5 seconds
mediaRecorder.start(1500);
mediaRecorder.onstop = async () => {
const audioBlob = new Blob(audioChunks, { type: 'audio/webm' });
const formData = new FormData();
formData.append('file', audioBlob);
// Send the audio blob to the server
try {
const response = await fetch('/api/transcribe', {
method: 'POST',
body: formData,
});
if (!response.ok) {
throw new Error('Network response was not ok');
}
const transcription = await response.json();
mediaRecorder.stream.getTracks().forEach(track => track.stop());
setMediaRecorder(null);
setMessage(transcription.text.trim());
} catch (error) {
console.error('Error sending audio to server:', error);
}
};
setMediaRecorder(mediaRecorder);
} catch (error) {
console.error("Error getting microphone", error);
}
}
useEffect(() => {
if (!recording && mediaRecorder) {
mediaRecorder.stop();
}
if (recording && !mediaRecorder) {
startRecordingAndTranscribe();
}
}, [recording]);
return (
<>
{
@@ -321,21 +406,58 @@ export default function ChatInputArea(props: ChatInputProps) {
}
}}
onChange={(e) => setMessage(e.target.value)}
disabled={props.sendDisabled} />
disabled={props.sendDisabled || recording} />
</div>
<Button
variant={'ghost'}
className="!bg-none p-1 h-auto text-3xl rounded-full text-gray-300 hover:text-gray-500"
disabled={props.sendDisabled}>
<Microphone weight='fill' className={`${props.isMobileWidth ? 'w-6 h-6' : 'w-8 h-8'}`} />
</Button>
{
recording ?
<TooltipProvider>
<Tooltip>
<TooltipTrigger asChild>
<Button
variant={'ghost'}
className="!bg-none p-1 h-auto text-3xl rounded-full text-gray-300 hover:text-gray-500"
onClick={() => setRecording(!recording)}
disabled={props.sendDisabled}
>
<Stop weight='fill' className={`${props.isMobileWidth ? 'w-6 h-6' : 'w-8 h-8'}`} />
</Button>
</TooltipTrigger>
<TooltipContent>
Click to stop recording and transcribe your voice.
</TooltipContent>
</Tooltip>
</TooltipProvider>
:
(
mediaRecorder ?
<InlineLoading />
:
< TooltipProvider >
<Tooltip>
<TooltipTrigger asChild>
<Button
variant={'ghost'}
className="!bg-none p-1 h-auto text-3xl rounded-full text-gray-300 hover:text-gray-500"
onClick={() => setRecording(!recording)}
disabled={props.sendDisabled}
>
<Microphone weight='fill' className={`${props.isMobileWidth ? 'w-6 h-6' : 'w-8 h-8'}`} />
</Button>
</TooltipTrigger>
<TooltipContent>
Click to start recording and transcribe your voice.
</TooltipContent>
</Tooltip>
</TooltipProvider>
)
}
<Button
className="bg-orange-300 hover:bg-orange-500 rounded-full p-0 h-auto text-3xl transition transform hover:-translate-y-1"
onClick={onSendMessage}
disabled={props.sendDisabled}>
<ArrowCircleUp className={`${props.isMobileWidth ? 'w-6 h-6' : 'w-8 h-8'}`} />
</Button>
</div>
</div >
</>
)
}

View File

@@ -10,9 +10,10 @@ import 'katex/dist/katex.min.css';
import { TeaserReferencesSection, constructAllReferences } from '../referencePanel/referencePanel';
import { ThumbsUp, ThumbsDown, Copy, Brain, Cloud, Folder, Book, Aperture, SpeakerHigh, MagnifyingGlass } from '@phosphor-icons/react';
import { ThumbsUp, ThumbsDown, Copy, Brain, Cloud, Folder, Book, Aperture, SpeakerHigh, MagnifyingGlass, Pause } from '@phosphor-icons/react';
import * as DomPurify from 'dompurify';
import { InlineLoading } from '../loading/loading';
const md = new markdownIt({
html: true,
@@ -206,8 +207,16 @@ export default function ChatMessage(props: ChatMessageProps) {
const [copySuccess, setCopySuccess] = useState<boolean>(false);
const [isHovering, setIsHovering] = useState<boolean>(false);
const [markdownRendered, setMarkdownRendered] = useState<string>('');
const [isPlaying, setIsPlaying] = useState<boolean>(false);
const [interrupted, setInterrupted] = useState<boolean>(false);
const interruptedRef = useRef<boolean>(false);
const messageRef = useRef<HTMLDivElement>(null);
useEffect(() => {
interruptedRef.current = interrupted;
}, [interrupted]);
useEffect(() => {
let message = props.chatMessage.message;
@@ -278,8 +287,8 @@ export default function ChatMessage(props: ChatMessageProps) {
function formatDate(timestamp: string) {
// Format date in HH:MM, DD MMM YYYY format
let date = new Date(timestamp + "Z");
let time_string = date.toLocaleTimeString('en-IN', { hour: '2-digit', minute: '2-digit', hour12: true }).toUpperCase();
let date_string = date.toLocaleString('en-IN', { year: 'numeric', month: 'short', day: '2-digit'}).replaceAll('-', ' ');
let time_string = date.toLocaleTimeString('en-US', { hour: '2-digit', minute: '2-digit', hour12: true }).toUpperCase();
let date_string = date.toLocaleString('en-US', { year: 'numeric', month: 'short', day: '2-digit' }).replaceAll('-', ' ');
return `${time_string} on ${date_string}`;
}
@@ -330,6 +339,79 @@ export default function ChatMessage(props: ChatMessageProps) {
return classes.join(' ');
}
async function playTextToSpeech() {
// Browser native speech API
// const utterance = new SpeechSynthesisUtterance(props.chatMessage.message);
// speechSynthesis.speak(utterance);
// Using the Khoj speech API
// Break the message up into chunks of sentences
const sentenceRegex = /[^.!?]+[.!?]*/g;
const chunks = props.chatMessage.message.match(sentenceRegex) || [];
if (!chunks) {
return;
}
if (chunks.length === 0) {
return;
}
if (!chunks[0]) {
return;
}
setIsPlaying(true);
let nextBlobPromise = fetchBlob(chunks[0]);
for (let i = 0; i < chunks.length; i++) {
if (interruptedRef.current) {
break; // Exit the loop if interrupted
}
const currentBlobPromise = nextBlobPromise;
if (i < chunks.length - 1) {
nextBlobPromise = fetchBlob(chunks[i + 1]);
}
try {
const blob = await currentBlobPromise;
const url = URL.createObjectURL(blob);
await playAudio(url);
} catch (error) {
console.error('Error:', error);
break; // Exit the loop on error
}
}
setIsPlaying(false);
setInterrupted(false); // Reset interrupted state after playback
}
async function fetchBlob(text: string) {
const response = await fetch(`/api/chat/speech?text=${encodeURIComponent(text)}`, {
method: 'POST',
headers: {
'Content-Type': 'application/json'
},
});
if (!response.ok) {
throw new Error('Network response was not ok');
}
return await response.blob();
}
function playAudio(url: string) {
return new Promise((resolve, reject) => {
const audio = new Audio(url);
audio.onended = resolve;
audio.onerror = reject;
audio.play();
});
}
const allReferences = constructAllReferences(props.chatMessage.context, props.chatMessage.onlineContext);
return (
@@ -349,7 +431,7 @@ export default function ChatMessage(props: ChatMessageProps) {
</div>
<div className={styles.chatFooter}>
{
(isHovering || props.isMobileWidth || props.isLastMessage) &&
(isHovering || props.isMobileWidth || props.isLastMessage || isPlaying) &&
(
<>
<div title={formatDate(props.chatMessage.created)} className={`text-gray-400 relative top-0 left-4`}>
@@ -359,9 +441,17 @@ export default function ChatMessage(props: ChatMessageProps) {
{
(props.chatMessage.by === "khoj") &&
(
<button title="Speak" onClick={(event) => console.log("speaker")}>
<SpeakerHigh alt="Speak Message" color='hsl(var(--muted-foreground))' />
</button>
isPlaying ?
(
interrupted ?
<InlineLoading iconClassName='p-0' className='m-0' />
: <button title="Pause Speech" onClick={(event) => setInterrupted(true)}>
<Pause alt="Pause Message" color='hsl(var(--muted-foreground))' />
</button>
)
: <button title="Speak" onClick={(event) => playTextToSpeech()}>
<SpeakerHigh alt="Speak Message" color='hsl(var(--muted-foreground))' />
</button>
)
}
<button title="Copy" className={`${styles.copyButton}`} onClick={() => {

View File

@@ -2,6 +2,7 @@ import { CircleNotch } from '@phosphor-icons/react';
interface LoadingProps {
className?: string;
iconClassName?: string;
message?: string;
}
@@ -17,7 +18,7 @@ export default function Loading(props: LoadingProps) {
export function InlineLoading(props: LoadingProps) {
return (
<button className={`${props.className}`}>
<span>{props.message} <CircleNotch className="inline animate-spin h-5 w-5 mx-3" /></span>
<span>{props.message} <CircleNotch className={`inline animate-spin ${props.iconClassName ? props.iconClassName : 'h-5 w-5 mx-3'}`}/></span>
</button>
)
}

View File

@@ -9,6 +9,7 @@ import {
import styles from "./suggestions.module.css";
import { getIconFromIconName } from "@/app/common/iconUtils";

View File

@@ -18,6 +18,7 @@ export default function RootLayout({
<html lang="en">
<meta httpEquiv="Content-Security-Policy"
content="default-src 'self' https://assets.khoj.dev;
media-src * blob:;
script-src 'self' https://assets.khoj.dev 'unsafe-inline' 'unsafe-eval';
connect-src 'self' https://ipapi.co/json ws://localhost:42110;
style-src 'self' https://assets.khoj.dev 'unsafe-inline' https://fonts.googleapis.com;

View File

@@ -88,7 +88,7 @@ function ChatBodyData(props: ChatBodyDataProps) {
}
}, [props.chatOptionsData]);
function onButtonClick() {
function shuffleSuggestionsCards() {
shuffleAndSetOptions();
}
@@ -224,7 +224,7 @@ function ChatBodyData(props: ChatBodyDataProps) {
}
<div className={`suggestions ${styles.suggestions} w-full ${props.isMobileWidth ? 'flex flex-col' : 'flex flex-row'} justify-center items-center`}>
{shuffledOptions.map(([key, styleClass, value, link], index) => (
<div key={key} onClick={() => fillArea(link, key, value)}>
<div key={`${key} ${value}`} onClick={() => fillArea(link, key, value)}>
<SuggestionCard
key={key + Math.random()}
title={key}
@@ -238,7 +238,7 @@ function ChatBodyData(props: ChatBodyDataProps) {
</div>
<div className="flex items-center justify-center margin-auto">
<button
onClick={onButtonClick}
onClick={shuffleSuggestionsCards}
className="m-2 p-1.5 rounded-lg dark:hover:bg-[var(--background-color)] hover:bg-stone-100 border border-stone-100 text-sm text-stone-500 dark:text-stone-300 dark:border-neutral-700">
More Examples <ClockCounterClockwise className='h-4 w-4 inline' />
</button>

View File

@@ -2,10 +2,6 @@ import type { Config } from "tailwindcss"
const config = {
safelist: [
{
pattern: /to-(blue|yellow|green|pink|purple)-(50|100|200|950)/,
variants: ['dark'],
},
],
darkMode: ["class"],
content: [

View File

@@ -4363,6 +4363,7 @@ string-argv@~0.3.2:
integrity sha512-aqD2Q0144Z+/RqG52NeHEkZauTAUWJO8c6yTftGJKO3Tja5tUgIfmIl6kExvhtxSDP7fXB6DvzkfMpCd/F3G+Q==
"string-width-cjs@npm:string-width@^4.2.0", string-width@^4.1.0:
name string-width-cjs
version "4.2.3"
resolved "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz"
integrity sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==

View File

@@ -222,10 +222,10 @@ async def transcribe(
common: CommonQueryParams,
file: UploadFile = File(...),
rate_limiter_per_minute=Depends(
ApiUserRateLimiter(requests=1, subscribed_requests=10, window=60, slug="transcribe_minute")
ApiUserRateLimiter(requests=20, subscribed_requests=20, window=60, slug="transcribe_minute")
),
rate_limiter_per_day=Depends(
ApiUserRateLimiter(requests=10, subscribed_requests=600, window=60 * 60 * 24, slug="transcribe_day")
ApiUserRateLimiter(requests=60, subscribed_requests=600, window=60 * 60 * 24, slug="transcribe_day")
),
):
user: KhojUser = request.user.object

View File

@@ -155,10 +155,10 @@ async def text_to_speech(
common: CommonQueryParams,
text: str,
rate_limiter_per_minute=Depends(
ApiUserRateLimiter(requests=5, subscribed_requests=20, window=60, slug="chat_minute")
ApiUserRateLimiter(requests=20, subscribed_requests=20, window=60, slug="chat_minute")
),
rate_limiter_per_day=Depends(
ApiUserRateLimiter(requests=5, subscribed_requests=300, window=60 * 60 * 24, slug="chat_day")
ApiUserRateLimiter(requests=50, subscribed_requests=300, window=60 * 60 * 24, slug="chat_day")
),
) -> Response:
voice_model = await ConversationAdapters.aget_voice_model_config(request.user.object)

View File

@@ -804,7 +804,7 @@ async def text_to_image(
with timer("Improve the original user query", logger):
if send_status_func:
await send_status_func("**✍🏽 Enhancing the Painting Prompt**")
await send_status_func("**Enhancing the Painting Prompt**")
improved_image_prompt = await generate_better_image_prompt(
message,
chat_history,