Skip to content

Commit

Permalink
Merge branch 'master' of https:/mlot/langchain
Browse files Browse the repository at this point in the history
  • Loading branch information
mlot committed Jul 21, 2023
1 parent c38965f commit 4682dd7
Show file tree
Hide file tree
Showing 2 changed files with 6 additions and 18 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -55,7 +55,11 @@
" history=[[\"我将从美国到中国来旅游,出行前希望了解中国的城市\", \"欢迎问我任何问题。\"]],\n",
" top_p=0.9,\n",
" model_kwargs={\"sample_model_args\": False},\n",
")"
")\n",
"\n",
"# turn on with_history only when you want the LLM object to keep track of the conversation history \n",
"# and send the accumulated context to the backend model api, which make it stateful. By default it is stateless.\n",
"# llm.with_history = True"
]
},
{
Expand Down Expand Up @@ -95,22 +99,6 @@
"\n",
"llm_chain.run(question)"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"By Default, ChatGLM is statful to keep track of the conversation history and send the accumulated context to the model. To enable stateless mode, we could set ChatGLM.with_history as `False` explicitly."
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"llm.with_history = False"
]
}
],
"metadata": {
Expand Down
2 changes: 1 addition & 1 deletion langchain/llms/chatglm.py
Original file line number Diff line number Diff line change
Expand Up @@ -37,7 +37,7 @@ class ChatGLM(LLM):
"""History of the conversation"""
top_p: float = 0.7
"""Top P for nucleus sampling from 0 to 1"""
with_history: bool = True
with_history: bool = False
"""Whether to use history or not"""

@property
Expand Down

0 comments on commit 4682dd7

Please sign in to comment.