ok
Build-Deploy-Actions
Details
Build-Deploy-Actions
Details
This commit is contained in:
commit
d9f8d092ec
|
@ -0,0 +1,48 @@
|
|||
name: Build
|
||||
run-name: ${{ github.actor }} is upgrade release 🚀
|
||||
on: [push]
|
||||
env:
|
||||
REPOSITORY: ${{ github.repository }}
|
||||
COMMIT_ID: ${{ github.sha }}
|
||||
jobs:
|
||||
Build-Deploy-Actions:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- run: echo "🎉 The job was automatically triggered by a ${{ github.event_name }} event."
|
||||
- run: echo "🐧 This job is now running on a ${{ runner.os }} server hosted by Gitea!"
|
||||
- run: echo "🔎 The name of your branch is ${{ github.ref }} and your repository is ${{ github.repository }}."
|
||||
- name: Check out repository code
|
||||
uses: actions/checkout@v3
|
||||
-
|
||||
name: Setup Git LFS
|
||||
run: |
|
||||
git lfs install
|
||||
git lfs fetch
|
||||
git lfs checkout
|
||||
- name: List files in the repository
|
||||
run: |
|
||||
ls ${{ github.workspace }}
|
||||
-
|
||||
name: Docker Image Info
|
||||
id: image-info
|
||||
run: |
|
||||
echo "::set-output name=image_name::$(echo $REPOSITORY | tr '[:upper:]' '[:lower:]')"
|
||||
echo "::set-output name=image_tag::${COMMIT_ID:0:10}"
|
||||
-
|
||||
name: Login to Docker Hub
|
||||
uses: docker/login-action@v2
|
||||
with:
|
||||
registry: artifacts.iflytek.com
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v2
|
||||
-
|
||||
name: Build and push
|
||||
run: |
|
||||
docker version
|
||||
docker buildx build -t artifacts.iflytek.com/docker-private/atp/${{ steps.image-info.outputs.image_name }}:${{ steps.image-info.outputs.image_tag }} . --file ${{ github.workspace }}/Dockerfile --load
|
||||
docker push artifacts.iflytek.com/docker-private/atp/${{ steps.image-info.outputs.image_name }}:${{ steps.image-info.outputs.image_tag }}
|
||||
docker rmi artifacts.iflytek.com/docker-private/atp/${{ steps.image-info.outputs.image_name }}:${{ steps.image-info.outputs.image_tag }}
|
||||
- run: echo "🍏 This job's status is ${{ job.status }}."
|
||||
|
|
@ -0,0 +1,10 @@
|
|||
FROM python:3.8.13
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
COPY . /app
|
||||
|
||||
RUN pip config set global.index-url https://pypi.mirrors.ustc.edu.cn/simple
|
||||
RUN pip install -r requirements.txt
|
||||
|
||||
CMD ["python", "app.py"]
|
|
@ -0,0 +1,35 @@
|
|||
import gradio as gr
|
||||
from simplet5 import SimpleT5
|
||||
from gradio.themes.utils import sizes
|
||||
theme = gr.themes.Default(radius_size=sizes.radius_none).set(
|
||||
block_label_text_color = '#4D63FF',
|
||||
block_title_text_color = '#4D63FF',
|
||||
button_primary_text_color = '#4D63FF',
|
||||
button_primary_background_fill='#FFFFFF',
|
||||
button_primary_border_color='#4D63FF',
|
||||
button_primary_background_fill_hover='#EDEFFF',
|
||||
)
|
||||
|
||||
|
||||
|
||||
model = SimpleT5()
|
||||
model.load_model("t5","snrspeaks/t5-one-line-summary")
|
||||
|
||||
def sentiment_analysis(text):
|
||||
result = model.predict(text)
|
||||
|
||||
return result[0]
|
||||
|
||||
demo = gr.Interface(fn=sentiment_analysis,
|
||||
inputs='text',
|
||||
outputs='text',
|
||||
examples=[["We describe a system called Overton, whose main design goal is to support engineers in building, monitoring, and improving production machinelearning systems. Key challenges engineers face are monitoring fine-grained quality, diagnosing errors in sophisticated applications, and handling contradictory or incomplete supervision data. Overton automates the life cycle of model construction, deployment, and monitoring by providing a set of novel high-level, declarative abstractions. Overton's vision is to shift developers to these higher-level tasks instead of lower-level machine learning tasks. In fact, using Overton, engineers can build deep-learning-based applications without writing any code in frameworks like TensorFlow. For over a year, Overton has been used in production to support multiple applications in both near-real-time applications and back-of-house processing. In that time, Overton-based applications have answered billions of queries in multiple languages and processed trillions of records reducing errors 1.7-2.9 times versus production systems."]],
|
||||
theme = theme,
|
||||
title = "摘要"
|
||||
)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
# demo.queue(concurrency_count=3)
|
||||
# demo.launch(server_name = "0.0.0.0", server_port = 7028)
|
||||
demo.launch()
|
|
@ -0,0 +1,3 @@
|
|||
gradio
|
||||
transformers
|
||||
torch
|
Loading…
Reference in New Issue