add
Build-Deploy-Actions
Details
Build-Deploy-Actions
Details
This commit is contained in:
parent
146e57e369
commit
af1cf821da
|
@ -0,0 +1,47 @@
|
||||||
|
name: Build
|
||||||
|
run-name: ${{ github.actor }} is upgrade release 🚀
|
||||||
|
on: [push]
|
||||||
|
env:
|
||||||
|
REPOSITORY: ${{ github.repository }}
|
||||||
|
COMMIT_ID: ${{ github.sha }}
|
||||||
|
jobs:
|
||||||
|
Build-Deploy-Actions:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- run: echo "🎉 The job was automatically triggered by a ${{ github.event_name }} event."
|
||||||
|
- run: echo "🐧 This job is now running on a ${{ runner.os }} server hosted by Gitea!"
|
||||||
|
- run: echo "🔎 The name of your branch is ${{ github.ref }} and your repository is ${{ github.repository }}."
|
||||||
|
- name: Check out repository code
|
||||||
|
uses: actions/checkout@v3
|
||||||
|
-
|
||||||
|
name: Setup Git LFS
|
||||||
|
run: |
|
||||||
|
git lfs install
|
||||||
|
git lfs fetch
|
||||||
|
git lfs checkout
|
||||||
|
- name: List files in the repository
|
||||||
|
run: |
|
||||||
|
ls ${{ github.workspace }}
|
||||||
|
-
|
||||||
|
name: Docker Image Info
|
||||||
|
id: image-info
|
||||||
|
run: |
|
||||||
|
echo "::set-output name=image_name::$(echo $REPOSITORY | tr '[:upper:]' '[:lower:]')"
|
||||||
|
echo "::set-output name=image_tag::${COMMIT_ID:0:10}"
|
||||||
|
-
|
||||||
|
name: Login to Docker Hub
|
||||||
|
uses: docker/login-action@v2
|
||||||
|
with:
|
||||||
|
registry: artifacts.iflytek.com
|
||||||
|
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||||
|
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||||
|
- name: Set up Docker Buildx
|
||||||
|
uses: docker/setup-buildx-action@v2
|
||||||
|
-
|
||||||
|
name: Build and push
|
||||||
|
run: |
|
||||||
|
docker version
|
||||||
|
docker buildx build -t artifacts.iflytek.com/docker-private/atp/${{ steps.image-info.outputs.image_name }}:${{ steps.image-info.outputs.image_tag }} . --file ${{ github.workspace }}/Dockerfile --load
|
||||||
|
docker push artifacts.iflytek.com/docker-private/atp/${{ steps.image-info.outputs.image_name }}:${{ steps.image-info.outputs.image_tag }}
|
||||||
|
docker rmi artifacts.iflytek.com/docker-private/atp/${{ steps.image-info.outputs.image_name }}:${{ steps.image-info.outputs.image_tag }}
|
||||||
|
- run: echo "🍏 This job's status is ${{ job.status }}."
|
|
@ -0,0 +1,10 @@
|
||||||
|
FROM python:3.10-slim-buster
|
||||||
|
|
||||||
|
WORKDIR /app
|
||||||
|
|
||||||
|
COPY . /app
|
||||||
|
|
||||||
|
RUN pip config set global.index-url https://pypi.mirrors.ustc.edu.cn/simple
|
||||||
|
RUN pip install -r requirements.txt
|
||||||
|
|
||||||
|
CMD ["python", "app.py"]
|
|
@ -0,0 +1,70 @@
|
||||||
|
import os
|
||||||
|
from gradio.themes.utils import sizes
|
||||||
|
|
||||||
|
|
||||||
|
css = "footer {visibility: hidden}"
|
||||||
|
|
||||||
|
os.system('pip install tiktoken')
|
||||||
|
os.system('pip install "modelscope" --upgrade -f https://pypi.org/project/modelscope/')
|
||||||
|
os.system('pip install transformers_stream_generator')
|
||||||
|
|
||||||
|
import gradio as gr
|
||||||
|
from modelscope.pipelines import pipeline
|
||||||
|
from modelscope.utils.constant import Tasks
|
||||||
|
from modelscope import AutoModelForCausalLM, AutoTokenizer, GenerationConfig
|
||||||
|
import torch
|
||||||
|
# os.environ['CUDA_VISIBLE_DEVICES'] = '0,1'
|
||||||
|
|
||||||
|
|
||||||
|
theme = gr.themes.Default(radius_size=sizes.radius_none).set(
|
||||||
|
block_label_text_color = '#4D63FF',
|
||||||
|
block_title_text_color = '#4D63FF',
|
||||||
|
button_primary_text_color = '#4D63FF',
|
||||||
|
button_primary_background_fill='#FFFFFF',
|
||||||
|
button_primary_border_color='#4D63FF',
|
||||||
|
button_primary_background_fill_hover='#EDEFFF',
|
||||||
|
)
|
||||||
|
|
||||||
|
def clear_session():
|
||||||
|
return '', None
|
||||||
|
|
||||||
|
model_id = 'qwen/Qwen-7B-Chat'
|
||||||
|
tokenizer = AutoTokenizer.from_pretrained(model_id, revision='v1.0.1', trust_remote_code=True)
|
||||||
|
model = AutoModelForCausalLM.from_pretrained(model_id, device_map="auto", revision='v1.0.1',
|
||||||
|
trust_remote_code=True, fp16=True).eval()
|
||||||
|
model.generation_config = GenerationConfig.from_pretrained(model_id, trust_remote_code=True)
|
||||||
|
|
||||||
|
def generate_chat(input: str, history = None):
|
||||||
|
if input is None:
|
||||||
|
input = ''
|
||||||
|
if history is None:
|
||||||
|
history = []
|
||||||
|
history = history[-5:]
|
||||||
|
gen = model.chat(tokenizer, input, history=history, stream=True)
|
||||||
|
for x in gen:
|
||||||
|
history.append((input, x))
|
||||||
|
yield None, history
|
||||||
|
history.pop()
|
||||||
|
history.append((input, x))
|
||||||
|
return None, history
|
||||||
|
|
||||||
|
block = gr.Blocks(theme=theme, css=css)
|
||||||
|
with block as demo:
|
||||||
|
gr.Markdown("""<center><font size=8>Qwen-7B-Chat Bot</center>""")
|
||||||
|
|
||||||
|
chatbot = gr.Chatbot(lines=10, label='Qwen-7B-Chat', elem_classes="control-height")
|
||||||
|
message = gr.Textbox(lines=2, label='Input')
|
||||||
|
|
||||||
|
with gr.Row():
|
||||||
|
clear_history = gr.Button("🧹 清除历史对话")
|
||||||
|
sumbit = gr.Button("🚀 发送")
|
||||||
|
|
||||||
|
sumbit.click(generate_chat,
|
||||||
|
inputs=[message, chatbot],
|
||||||
|
outputs=[message, chatbot])
|
||||||
|
clear_history.click(fn=clear_session,
|
||||||
|
inputs=[],
|
||||||
|
outputs=[message, chatbot],
|
||||||
|
queue=False)
|
||||||
|
|
||||||
|
demo.queue().launch(server_name="0.0.0.0")
|
|
@ -0,0 +1,7 @@
|
||||||
|
transformers==4.31.0
|
||||||
|
accelerate
|
||||||
|
tiktoken
|
||||||
|
einops
|
||||||
|
transformers_stream_generator==0.0.4
|
||||||
|
scipy
|
||||||
|
gradio
|
Loading…
Reference in New Issue