From 3c24b5a1a6109a699754d588a865cd29e3809726 Mon Sep 17 00:00:00 2001 From: yanqiangmiffy <1185918903@qq.com> Date: Wed, 19 Apr 2023 01:15:22 +0800 Subject: [PATCH] =?UTF-8?q?feature@=E6=B7=BB=E5=8A=A0websearch?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- README.md | 5 +- clc/__pycache__/__init__.cpython-310.pyc | Bin 0 -> 310 bytes clc/__pycache__/gpt_service.cpython-310.pyc | Bin 0 -> 1958 bytes .../langchain_application.cpython-310.pyc | Bin 0 -> 3214 bytes .../source_service.cpython-310.pyc | Bin 0 -> 2365 bytes clc/langchain_application.py | 25 ++- clc/source_service.py | 14 ++ main.py | 42 +++-- requirements.txt | 160 +----------------- tests/test_duckduckgo_search.py | 16 +- 10 files changed, 83 insertions(+), 179 deletions(-) create mode 100644 clc/__pycache__/__init__.cpython-310.pyc create mode 100644 clc/__pycache__/gpt_service.cpython-310.pyc create mode 100644 clc/__pycache__/langchain_application.cpython-310.pyc create mode 100644 clc/__pycache__/source_service.cpython-310.pyc diff --git a/README.md b/README.md index 5062a25..f376739 100644 --- a/README.md +++ b/README.md @@ -2,12 +2,15 @@ > Chinese-LangChain:中文langchain项目,基于ChatGLM-6b+langchain实现本地化知识库检索与智能答案生成 +俗称:小必应,Q.Talk,强聊,QiangTalk + ## 🔥 效果演示 ![](https://github.com/yanqiangmiffy/Chinese-LangChain/blob/master/images/web_demo.png) ## 🚀 特性 +- 🚀 2023/04/19 增加web search功能,需要确保网络畅通! - 🚀 2023/04/18 webui增加知识库选择功能 - 🚀 2023/04/18 修复推理预测超时5s报错问题 - 🎉 2023/04/17 支持多种文档上传与内容解析:pdf、docx,ppt等 @@ -29,7 +32,7 @@ * [x] 支持检索结果与LLM生成结果对比 * [ ] 支持检索生成结果与原始LLM生成结果对比 * [ ] 检索结果过滤与排序 -* [ ] 互联网检索结果接入 +* [x] 互联网检索结果接入 * [ ] 模型初始化有问题 * [ ] 增加非LangChain策略 diff --git a/clc/__pycache__/__init__.cpython-310.pyc b/clc/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..b74d3c894e8ed1224a99d27eacc6b64cd74e6faf GIT binary patch literal 310 zcmYjNv1-FG5VgJ32=WgevSf&priFlMi%OtVD1mOlAjCN!&$+&kWbdpyq0W-KDi%SZJ?`*#x8h)Hl7pAxC)7^(OzS|p!ejLRW3))l7#4UE|d zrKuTj5w!8T$ew!%jn4Ly2F-YdExAIV!5~E5kBkTG$l9zocdPaN>UPUGSQ8|KA{(W- z1a??eBd=wLZ4S0$+}kRANJn>vao0#kmeL*^_5rOaGO!9wo#*Uz5ua~_P!@#vj$A_Z uFZ8#3DawPh?;3(HcMTft^>R<;DGp5uEtFR+&*?*F)zIpv=@Wj6Nt1u#zFGkQ literal 0 HcmV?d00001 diff --git a/clc/__pycache__/gpt_service.cpython-310.pyc b/clc/__pycache__/gpt_service.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..99343ef93cac14611c2e29ec7fa316efe063de53 GIT binary patch literal 1958 zcmZWq-EJF26rP#=@p@y&&QF`9LLvwWWQn?=AV5)}u8Jr^NvPEJW)*3)-5J{(_Q#za z7i_s#iPQ_!2LN%AUiJ}q2Y7+uk}D*hphzj_j1whOK%bA|j|@8O>S5awl>q?w!ody~wk?oB4Sys#)I4g1jEp^Dqi2 zxkrRAYDYxWq<82<3nF+xI`!#s*o(({SgP$O# z9o#1}?Df+Oqyt$<6>HfYO~Rf|a}eF!y7|$Kt&eYf_-WWn%0kD9-rk(Vg}pFO`~698 z5a(&uP0Bp%Rb^j)A1nCo{^ag3Rv;3xN>n=1X<2MF z&*Q8^jkB|}3(JZu`lU)FuXH)$y8I682L1Y-u`YK@Av4oN_8}gmQ>hZX@ZzYQZmM5^ zktCu3GkXNUMvio)C)h0_oXD2|oN$5nqF_Lxy6~e=E`Y2CGCyiqKDb4mlRg!75gr9m zQ!a`H+3GXV5X~d^kVfsgFRqG3*%B?eB$s7tgKQA1GM+OSBcYyWk&d-kI7l?1a*&XOyz1+%j( z=Xx@dZ^PEU?y~9n<=4-Yj6YIkKGNVeWL$z^oed8dOebr^H^9uu0Xd*R*$k-z(}n2? z`oi01&zxuOj0rNP>YDav&Ws&`4D|$=IRiGJYj6X1W{!2Gc~pL2cH1l=oJuRT z23iVJSz#O$7J_;Yh-seIbRx41UO6wFWmLiXQ2`K?S04ZgNrN_Qj^Ns0%xP2RxhH_g zn~<>+#=;ZUcHc0%{KkH{*J+p<=b}u&TOXxD&@v#6(xM9mSal!A&@nJi z%<83+u}-tfrnzZAWQz*^7ILX7g^{(9x9?qW;k*R|x8{IB_X2iucViGLJ z88=f?OeGmBZnmbHYBJ8cxteBb7(I=c%5oGnm-MpL2&a58KGSeK zd#*U+SYAahyN=DhfR7jV*DZUB7Z1w0&Gqu6oaH6fYR=^3T)AS^9Cyt2YkE2GC&O7wz`keZ4oq1BM2rWvaO$Ds zd*emhXO34H8`C%7u&9Ki!gD+n4nJzS&+dom@H4`%)x+m`t?pVOml7}&K7H_)hq*Wk zk%DCCu-_0i-xS!=&|p(xQidn^A#@y>7;;KvPER5;!!k@^S(&d2npBy_@+YyWFdZ^h zh;|-?1c5up+h4Cknk*T!O~6dQWndt0^8h@-5_B!xLKKDg1cEndp(?3nphs1O;YEy5 zm~G(`Qb$dsIffcYJRE9_oPbr3sZ04~1ow#!BamjKv^kW>_01j1ZnO;27ZM!>g@w6a zVblg}G>z^3FQ8w;dYk-)NK5Hx7SEzm_D~6%qsgUG&#H0i3tG3rspcNXbHbuk43cCP zk8(ToMKOSmHy?I=ixrzwbk>=6>YO>>s#a`_LI&c6OYyre$7jy=E?-$+JssbCtz<+h zAZ6022#M{e;JP&$q}ONa(lkxtiH7CEc4#6iwTIJy`WvDh(-oQqN0Z=!C|ehf7n-B; z0CZD3)rjs(tPiS3`JcUL^8|gROr!+dofJ9g2lMxlxDQ5OjsJZ72*9oi83>LXm~uQG z@E>hK;fU?pM_d4gErCI;ZDhcoB+@8N!l3tqrQo~JsaV6KSjPsuhp|Qo-UIIkur^Qb zG{3iHb^u{xeH$Ax0hRM_%68+a%IE)(?VnF=Bw0kZ2CMyMz(8tm15p(^!!7j!xBQ5Lz_}hh6 z0sdY)4mE5HaJhW5bMuwXtt*|im*eXn186#*Eq9NvetG*$Z)LuF?Rfm>Go7{9I&0^9 z=TCMoU5PKe)%)=E-s|sm7p{Y3n<;VyY#Ns_d%H_Fx{E7=v80<)&+DsKQlEQ^-G#HD{qb`5;_thE ze6{oGh4sH*jsJETqUv2(i%(rm53(0bch(l;Q^(g=*5ZrrrT)Nu_xi2)4@MM^Wch}3#QF(RXGRX9!jcwuw+*xs zQv8qqCmw|v2q92 z{zbAX*6*kp1EkG;-&!^Qv$sQ~tSrn4-Jy|ACC6yEoYKgkbf#gs>5BLc1SB4jo#X)n zoji_^JaQ*Ift+bb!(<1Ne;5&N-FM%=iSp z-+#NrqADSO;9&BzV9rUbg zE8U9L-NYYOyH!fwCCp=$dxTYZ?XKCKVMd2E{LOFuc9^R{CR*!xoJJe<^*BuXemjXH zo=V=TUmk_gfY-0%IO2Z07bjqqJQUHOIoj~sDjtI2+?jLd7tdT+JbTe^M_H=ENVV!4 zVXBWC#=YJ~yB`kYq#0#Hzb&(#x*ZDGcVpwqAQWI?Tt*@usW?kp^(bR;+HW@f$AAD? zZqUMHtPh>rx!UPmdmk+eAEZ)=JW{#fYzh9=^( zAJ(^wv9S#Z?ojcXvc|?;N{K@Jj0c3l+ynAR(|F(5GRNk&vqNA9-FC;YpOSS?dD|6a zW6j?IHi9Z>)$JO{8JuS}pzq|D;cCCW_w9cAX!nQDc0d2-&xb$lKK$jc@4o;0=Z|-P z{d)JyuO5H$#lf%g#SHPs#Ugf12yPxz3hO8!U-3n^M|%oau_3pa_SVzCf2 za3L&9nYgig~RVCm=+G2W_uzV2Eb{qPZc#ViVDv7GQ7ouB#XjC z%z~j|i6dyYFaRL0vZ!$IJT5fNrI^PRM>TVMoR%^20@&p-5SEP`dh{eTpBf*{(W7*; zKF#N|+o$pZUBRZAJqL<{oWQgLo{&)rJekPiHn4_FXdN@e*1TX=n9BnHjW`aX;b?Tk z^Jvhn&xDLkV1j>s5&|fKdwA)f7cb#_9RyNbqJ!9RtErqFvq~0vNq^)!!B9(k+=zN4{2{0dn{vyKkd)F z-Zq)_=g}$5P7fS%%(;IZy7}U?ThHv7@(ok;z&9ilhy*Gh8mcUE^e){pm86o@gPqa)m2&$-wrT)}0YC|Oi*^VI+!C&;_PDXmXKw#9M?9zI>KP~-Zh%1^1OjVpcb?_ZO>F%P3Nb#@ z8kFvdXY9>f#fdBdJe5EpkFbz850av+3bL#@^}_{+uSj!HzO+4fY4yX?T{v7;#+QDa mWErkbwu-NZ{@?%SYT>s0FAw6cz(^OL89UBnE4+ONN7|5 literal 0 HcmV?d00001 diff --git a/clc/langchain_application.py b/clc/langchain_application.py index ca4ab3f..9090938 100644 --- a/clc/langchain_application.py +++ b/clc/langchain_application.py @@ -37,13 +37,24 @@ class LangChainApplication(object): history_len=5, temperature=0.1, top_p=0.9, + top_k=4, + web_content='', chat_history=[]): - prompt_template = """基于以下已知信息,简洁和专业的来回答用户的问题。 - 如果无法从中得到答案,请说 "根据已知信息无法回答该问题" 或 "没有提供足够的相关信息",不允许在答案中添加编造成分,答案请使用中文。 - 已知内容: - {context} - 问题: - {question}""" + if web_content: + prompt_template = f"""基于以下已知信息,简洁和专业的来回答用户的问题。 + 如果无法从中得到答案,请说 "根据已知信息无法回答该问题" 或 "没有提供足够的相关信息",不允许在答案中添加编造成分,答案请使用中文。 + 已知网络检索内容:{web_content}""" + """ + 已知内容: + {context} + 问题: + {question}""" + else: + prompt_template = """基于以下已知信息,简洁和专业的来回答用户的问题。 + 如果无法从中得到答案,请说 "根据已知信息无法回答该问题" 或 "没有提供足够的相关信息",不允许在答案中添加编造成分,答案请使用中文。 + 已知内容: + {context} + 问题: + {question}""" prompt = PromptTemplate(template=prompt_template, input_variables=["context", "question"]) self.llm_service.history = chat_history[-history_len:] if history_len > 0 else [] @@ -54,7 +65,7 @@ class LangChainApplication(object): knowledge_chain = RetrievalQA.from_llm( llm=self.llm_service, retriever=self.source_service.vector_store.as_retriever( - search_kwargs={"k": 4}), + search_kwargs={"k": top_k}), prompt=prompt) knowledge_chain.combine_documents_chain.document_prompt = PromptTemplate( input_variables=["page_content"], template="{page_content}") diff --git a/clc/source_service.py b/clc/source_service.py index c30cb40..87ebc05 100644 --- a/clc/source_service.py +++ b/clc/source_service.py @@ -12,6 +12,8 @@ import os +from duckduckgo_search import ddg +from duckduckgo_search.utils import SESSION from langchain.document_loaders import UnstructuredFileLoader from langchain.embeddings.huggingface import HuggingFaceEmbeddings from langchain.vectorstores import FAISS @@ -53,6 +55,18 @@ class SourceService(object): self.vector_store = FAISS.load_local(path, self.embeddings) return self.vector_store + def search_web(self, query): + + SESSION.proxies = { + "http": f"socks5h://localhost:7890", + "https": f"socks5h://localhost:7890" + } + results = ddg(query) + web_content = '' + if results: + for result in results: + web_content += result['body'] + return web_content # if __name__ == '__main__': # config = LangChainCFG() # source_service = SourceService(config) diff --git a/main.py b/main.py index 0bd68a2..93760c0 100644 --- a/main.py +++ b/main.py @@ -5,19 +5,19 @@ import gradio as gr from clc.langchain_application import LangChainApplication -os.environ["CUDA_VISIBLE_DEVICES"] = '1' +os.environ["CUDA_VISIBLE_DEVICES"] = '0' # 修改成自己的配置!!! class LangChainCFG: - llm_model_name = '../../pretrained_models/chatglm-6b-int4-qe' # 本地模型文件 or huggingface远程仓库 - embedding_model_name = '../../pretrained_models/text2vec-large-chinese' # 检索模型文件 or huggingface远程仓库 + llm_model_name = 'THUDM/chatglm-6b-int4-qe' # 本地模型文件 or huggingface远程仓库 + embedding_model_name = 'GanymedeNil/text2vec-large-chinese' # 检索模型文件 or huggingface远程仓库 vector_store_path = './cache' docs_path = './docs' kg_vector_stores = { - '中文维基百科': '/root/GoMall/Knowledge-ChatGLM/cache/zh_wikipedia', - '大规模金融研报知识图谱': '/root/GoMall/Knowledge-ChatGLM/cache/financial_research_reports', - '初始化知识库': '/root/GoMall/Knowledge-ChatGLM/cache', + '中文维基百科': './cache/zh_wikipedia', + '大规模金融研报知识图谱': '.cache/financial_research_reports', + '初始化知识库': '.cache', } # 可以替换成自己的知识库,如果没有需要设置为None # kg_vector_stores=None @@ -62,24 +62,35 @@ def clear_session(): def predict(input, large_language_model, embedding_model, + top_k, + use_web, history=None): # print(large_language_model, embedding_model) print(input) if history == None: history = [] + + if use_web == '使用': + web_content = application.source_service.search_web(query=input) + else: + web_content = '' resp = application.get_knowledge_based_answer( query=input, history_len=1, temperature=0.1, top_p=0.9, + top_k=top_k, + web_content=web_content, chat_history=history ) history.append((input, resp['result'])) search_text = '' for idx, source in enumerate(resp['source_documents'][:4]): - sep = f'----------【搜索结果{idx+1}:】---------------\n' + sep = f'----------【搜索结果{idx + 1}:】---------------\n' search_text += f'{sep}\n{source.page_content}\n\n' print(search_text) + search_text += "----------【网络检索内容】-----------\n" + search_text += web_content return '', history, history, search_text @@ -108,20 +119,22 @@ with block as demo: top_k = gr.Slider(1, 20, - value=2, + value=4, step=1, - label="向量匹配 top k", + label="检索top-k文档", interactive=True) kg_name = gr.Radio(['中文维基百科', '大规模金融研报知识图谱', '初始化知识库' ], label="知识库", - value='中文维基百科', + value='初始化知识库', interactive=True) set_kg_btn = gr.Button("重新加载知识库") - file = gr.File(label="将文件上传到数据库", + use_web = gr.Radio(["使用", "不使用"], label="web search", info="是否使用网络搜索,使用时确保网络通常") + + file = gr.File(label="将文件上传到知识库库,内容要尽量匹配", visible=True, file_types=['.txt', '.md', '.docx', '.pdf'] ) @@ -149,7 +162,9 @@ with block as demo: send.click(predict, inputs=[ message, large_language_model, - embedding_model, state + embedding_model, top_k, use_web, + + state ], outputs=[message, chatbot, state, search]) @@ -163,7 +178,8 @@ with block as demo: message.submit(predict, inputs=[ message, large_language_model, - embedding_model, state + embedding_model, top_k, use_web, + state ], outputs=[message, chatbot, state, search]) gr.Markdown("""提醒:
diff --git a/requirements.txt b/requirements.txt index 18f1e2b..e132ab1 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,153 +1,7 @@ -aiofiles==23.1.0 -aiohttp==3.8.4 -aiosignal==1.3.1 -altair==4.2.2 -antlr4-python3-runtime==4.9.3 -anyio==3.6.2 -argilla==1.6.0 -async-timeout==4.0.2 -attrs==23.1.0 -backoff==2.2.1 -beautifulsoup4==4.12.2 -brotlipy==0.7.0 -cachetools==5.3.0 -cchardet==2.1.7 -certifi -cffi -chardet==5.1.0 -charset-normalizer==3.1.0 -click==8.1.3 -coloredlogs==15.0.1 -commonmark==0.9.1 -contourpy==1.0.7 -cpm-kernels==1.0.11 -cryptography -cycler==0.11.0 -dataclasses-json==0.5.7 -Deprecated==1.2.13 -effdet==0.3.0 -entrypoints==0.4 -et-xmlfile==1.1.0 -faiss-gpu==1.7.2 -fastapi==0.95.1 -ffmpy==0.3.0 -filelock==3.11.0 -flatbuffers==23.3.3 -flit_core -fonttools==4.39.3 -frozenlist==1.3.3 -fsspec==2023.4.0 -gmpy2 -gptcache==0.1.14 -gradio==3.27.0 -gradio_client==0.1.3 -greenlet==2.0.2 -h11==0.14.0 -httpcore==0.16.3 -httpx==0.23.3 -huggingface-hub==0.13.4 -humanfriendly==10.0 -icetk==0.0.7 -idna -iopath==0.1.10 -Jinja2 -joblib==1.2.0 -jsonschema==4.17.3 -kiwisolver==1.4.4 -langchain==0.0.142 -layoutparser==0.3.4 -linkify-it-py==2.0.0 -lxml==4.9.2 -Markdown==3.4.3 -markdown-it-py==2.2.0 -MarkupSafe==2.1.2 -marshmallow==3.19.0 -marshmallow-enum==1.5.1 -matplotlib==3.7.1 -mdit-py-plugins==0.3.3 -mdurl==0.1.2 -mkl-fft==1.3.1 -mkl-random -mkl-service==2.4.0 -monotonic==1.6 -mpmath==1.2.1 -msg-parser==1.2.0 -multidict==6.0.4 -mypy-extensions==1.0.0 -networkx -nltk==3.8.1 -numexpr==2.8.4 -numpy -olefile==0.46 -omegaconf==2.3.0 -onnxruntime==1.14.1 -openai==0.27.4 -openapi-schema-pydantic==1.2.4 -opencv-python==4.6.0.66 -openpyxl==3.1.2 -orjson==3.8.10 -packaging==23.1 -pandas==1.5.3 -pdf2image==1.16.3 -pdfminer.six==20221105 -pdfplumber==0.9.0 -Pillow==9.5.0 -portalocker==2.7.0 -protobuf==3.18.3 -pycocotools==2.0.6 -pycparser -pydantic==1.10.7 -pydub==0.25.1 -Pygments==2.15.0 -pyOpenSSL -pypandoc==1.11 -pyparsing==3.0.9 -pyrsistent==0.19.3 -PySocks -pytesseract==0.3.10 -python-dateutil==2.8.2 -python-docx==0.8.11 -python-magic==0.4.27 -python-multipart==0.0.6 -python-pptx==0.6.21 -pytz==2023.3 -PyYAML==6.0 -regex==2023.3.23 -requests==2.28.2 -rfc3986==1.5.0 -rich==13.0.1 -scikit-learn==1.2.2 -scipy==1.10.1 -semantic-version==2.10.0 -sentence-transformers==2.2.2 -sentencepiece==0.1.98 -six -sniffio==1.3.0 -soupsieve==2.4.1 -SQLAlchemy==1.4.47 -starlette==0.26.1 -sympy -tenacity==8.2.2 -threadpoolctl==3.1.0 -timm==0.6.13 -tokenizers==0.13.3 -toolz==0.12.0 -torch==2.0.0 -torchaudio==2.0.0 -torchvision==0.15.0 -tqdm==4.65.0 -transformers==4.28.1 -triton==2.0.0 -typing-inspect==0.8.0 -typing_extensions==4.5.0 -tzdata==2023.3 -uc-micro-py==1.0.1 -unstructured==0.5.12 -unstructured-inference==0.3.2 -urllib3 -uvicorn==0.21.1 -Wand==0.6.11 -websockets==11.0.2 -wrapt==1.14.1 -XlsxWriter==3.1.0 -yarl==1.8.2 +langchain +gradio +transformers +sentence_transformers +faiss-cpu +unstructured +duckduckgo_search \ No newline at end of file diff --git a/tests/test_duckduckgo_search.py b/tests/test_duckduckgo_search.py index 7707eb9..b3a69dd 100644 --- a/tests/test_duckduckgo_search.py +++ b/tests/test_duckduckgo_search.py @@ -2,9 +2,15 @@ from duckduckgo_search import ddg from duckduckgo_search.utils import SESSION -# SESSION.proxies = { -# "http": f"socks5h://localhost:7890", -# "https": f"socks5h://localhost:7890" -# } +SESSION.proxies = { + "http": f"socks5h://localhost:7890", + "https": f"socks5h://localhost:7890" +} r = ddg("马保国") -print(r) \ No newline at end of file +print(r[:2]) +""" +[{'title': '马保国 - 维基百科,自由的百科全书', 'href': 'https://zh.wikipedia.org/wiki/%E9%A9%AC%E4%BF%9D%E5%9B%BD', 'body': '马保国(1951年 — ) ,男,籍贯 山东 临沂,出生及长大于河南,中国大陆太极拳师,自称"浑元形意太极门掌门人" 。 马保国因2017年约战mma格斗家徐晓冬首次出现 +大众视野中。 2020年5月,马保国在对阵民间武术爱好者王庆民的比赛中,30秒内被连续高速击倒三次,此事件成为了持续多日的社交 ...'}, {'title': '馬保國的主页 - 抖音', 'href': 'https://www.douyin.com/user/MS4wLjABAAAAW0E1ziOvxgUh3VVv5FE6xmoo3w5WtZalfphYZKj4mCg', 'body': '6.3万. #马马国教扛打功 最近有几个人模芳我动作,很危险啊,不可以的,朋友们不要受伤了。. 5.3万. #马保国直播带货榜第一 朋友们周末愉快,本周六早上湿点,我本人在此号进行第一次带货直播,活到老,学到老,越活越年轻。. 7.0万. #马保国击破红牛罐 昨天 ...'}] + + +""" \ No newline at end of file