Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Add Chat Settings to Generic Langchain Provider #622

Merged
merged 8 commits into from
Jan 3, 2024
12 changes: 9 additions & 3 deletions backend/chainlit/playground/providers/langchain.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
from typing import Union
from typing import List, Union

from chainlit.input_widget import InputWidget
from chainlit.playground.provider import BaseProvider
from chainlit.sync import make_async
from chainlit_client import GenerationMessage
Expand All @@ -18,13 +19,14 @@ def __init__(
id: str,
name: str,
llm: Union[LLM, BaseChatModel],
inputs: List[InputWidget] = [],
is_chat: bool = False,
):
super().__init__(
id=id,
name=name,
env_vars={},
inputs=[],
inputs=inputs,
is_chat=is_chat,
)
self.llm = llm
Expand Down Expand Up @@ -65,7 +67,11 @@ async def create_completion(self, request):

messages = self.create_generation(request)

stream = make_async(self.llm.stream)
# /~https://github.com/langchain-ai/langchain/issues/14980
stream = make_async(self.llm.stream)(
input=messages,
**request.generation.settings
)

result = await stream(
input=messages,
Expand Down
Loading