git remote add origin http://172.16.59.16:3000/jianjiang/DeepDanbooru.git
This commit is contained in:
commit
a2bd16f347
|
@ -0,0 +1,27 @@
|
||||||
|
*.7z filter=lfs diff=lfs merge=lfs -text
|
||||||
|
*.arrow filter=lfs diff=lfs merge=lfs -text
|
||||||
|
*.bin filter=lfs diff=lfs merge=lfs -text
|
||||||
|
*.bin.* filter=lfs diff=lfs merge=lfs -text
|
||||||
|
*.bz2 filter=lfs diff=lfs merge=lfs -text
|
||||||
|
*.ftz filter=lfs diff=lfs merge=lfs -text
|
||||||
|
*.gz filter=lfs diff=lfs merge=lfs -text
|
||||||
|
*.h5 filter=lfs diff=lfs merge=lfs -text
|
||||||
|
*.joblib filter=lfs diff=lfs merge=lfs -text
|
||||||
|
*.lfs.* filter=lfs diff=lfs merge=lfs -text
|
||||||
|
*.model filter=lfs diff=lfs merge=lfs -text
|
||||||
|
*.msgpack filter=lfs diff=lfs merge=lfs -text
|
||||||
|
*.onnx filter=lfs diff=lfs merge=lfs -text
|
||||||
|
*.ot filter=lfs diff=lfs merge=lfs -text
|
||||||
|
*.parquet filter=lfs diff=lfs merge=lfs -text
|
||||||
|
*.pb filter=lfs diff=lfs merge=lfs -text
|
||||||
|
*.pt filter=lfs diff=lfs merge=lfs -text
|
||||||
|
*.pth filter=lfs diff=lfs merge=lfs -text
|
||||||
|
*.rar filter=lfs diff=lfs merge=lfs -text
|
||||||
|
saved_model/**/* filter=lfs diff=lfs merge=lfs -text
|
||||||
|
*.tar.* filter=lfs diff=lfs merge=lfs -text
|
||||||
|
*.tflite filter=lfs diff=lfs merge=lfs -text
|
||||||
|
*.tgz filter=lfs diff=lfs merge=lfs -text
|
||||||
|
*.xz filter=lfs diff=lfs merge=lfs -text
|
||||||
|
*.zip filter=lfs diff=lfs merge=lfs -text
|
||||||
|
*.zstandard filter=lfs diff=lfs merge=lfs -text
|
||||||
|
*tfevents* filter=lfs diff=lfs merge=lfs -text
|
|
@ -0,0 +1,47 @@
|
||||||
|
name: Build
|
||||||
|
run-name: ${{ github.actor }} is upgrade release 🚀
|
||||||
|
on: [push]
|
||||||
|
env:
|
||||||
|
REPOSITORY: ${{ github.repository }}
|
||||||
|
COMMIT_ID: ${{ github.sha }}
|
||||||
|
jobs:
|
||||||
|
Build-Deploy-Actions:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- run: echo "🎉 The job was automatically triggered by a ${{ github.event_name }} event."
|
||||||
|
- run: echo "🐧 This job is now running on a ${{ runner.os }} server hosted by Gitea!"
|
||||||
|
- run: echo "🔎 The name of your branch is ${{ github.ref }} and your repository is ${{ github.repository }}."
|
||||||
|
- name: Check out repository code
|
||||||
|
uses: actions/checkout@v3
|
||||||
|
-
|
||||||
|
name: Setup Git LFS
|
||||||
|
run: |
|
||||||
|
git lfs install
|
||||||
|
git lfs fetch
|
||||||
|
git lfs checkout
|
||||||
|
- name: List files in the repository
|
||||||
|
run: |
|
||||||
|
ls ${{ github.workspace }}
|
||||||
|
-
|
||||||
|
name: Docker Image Info
|
||||||
|
id: image-info
|
||||||
|
run: |
|
||||||
|
echo "::set-output name=image_name::$(echo $REPOSITORY | tr '[:upper:]' '[:lower:]')"
|
||||||
|
echo "::set-output name=image_tag::${COMMIT_ID:0:10}"
|
||||||
|
-
|
||||||
|
name: Login to Docker Hub
|
||||||
|
uses: docker/login-action@v2
|
||||||
|
with:
|
||||||
|
registry: artifacts.iflytek.com
|
||||||
|
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||||
|
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||||
|
- name: Set up Docker Buildx
|
||||||
|
uses: docker/setup-buildx-action@v2
|
||||||
|
-
|
||||||
|
name: Build and push
|
||||||
|
run: |
|
||||||
|
docker version
|
||||||
|
docker buildx build -t artifacts.iflytek.com/docker-private/atp/${{ steps.image-info.outputs.image_name }}:${{ steps.image-info.outputs.image_tag }} . --file ${{ github.workspace }}/Dockerfile --load
|
||||||
|
docker push artifacts.iflytek.com/docker-private/atp/${{ steps.image-info.outputs.image_name }}:${{ steps.image-info.outputs.image_tag }}
|
||||||
|
docker rmi artifacts.iflytek.com/docker-private/atp/${{ steps.image-info.outputs.image_name }}:${{ steps.image-info.outputs.image_tag }}
|
||||||
|
- run: echo "🍏 This job's status is ${{ job.status }}."
|
|
@ -0,0 +1 @@
|
||||||
|
images
|
|
@ -0,0 +1,5 @@
|
||||||
|
[style]
|
||||||
|
based_on_style = pep8
|
||||||
|
blank_line_before_nested_class_or_def = false
|
||||||
|
spaces_before_comment = 2
|
||||||
|
split_before_logical_operator = true
|
|
@ -0,0 +1,14 @@
|
||||||
|
#FROM python:3.8.13
|
||||||
|
FROM artifacts.iflytek.com/docker-private/atp/base_image_for_ailab:0.0.1
|
||||||
|
|
||||||
|
WORKDIR /app
|
||||||
|
|
||||||
|
COPY requirements.txt /app
|
||||||
|
|
||||||
|
RUN pip config set global.index-url https://pypi.mirrors.ustc.edu.cn/simple
|
||||||
|
RUN pip install -r requirements.txt
|
||||||
|
|
||||||
|
COPY . /app
|
||||||
|
ENV HF_TOKEN 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
|
||||||
|
|
||||||
|
CMD ["python", "app.py"]
|
|
@ -0,0 +1,38 @@
|
||||||
|
---
|
||||||
|
title: DeepDanbooru
|
||||||
|
emoji: 🏃
|
||||||
|
colorFrom: gray
|
||||||
|
colorTo: purple
|
||||||
|
sdk: gradio
|
||||||
|
sdk_version: 3.6
|
||||||
|
app_file: app.py
|
||||||
|
pinned: false
|
||||||
|
---
|
||||||
|
|
||||||
|
# Configuration
|
||||||
|
|
||||||
|
`title`: _string_
|
||||||
|
Display title for the Space
|
||||||
|
|
||||||
|
`emoji`: _string_
|
||||||
|
Space emoji (emoji-only character allowed)
|
||||||
|
|
||||||
|
`colorFrom`: _string_
|
||||||
|
Color for Thumbnail gradient (red, yellow, green, blue, indigo, purple, pink, gray)
|
||||||
|
|
||||||
|
`colorTo`: _string_
|
||||||
|
Color for Thumbnail gradient (red, yellow, green, blue, indigo, purple, pink, gray)
|
||||||
|
|
||||||
|
`sdk`: _string_
|
||||||
|
Can be either `gradio`, `streamlit`, or `static`
|
||||||
|
|
||||||
|
`sdk_version` : _string_
|
||||||
|
Only applicable for `streamlit` SDK.
|
||||||
|
See [doc](https://hf.co/docs/hub/spaces) for more info on supported versions.
|
||||||
|
|
||||||
|
`app_file`: _string_
|
||||||
|
Path to your main application file (which contains either `gradio` or `streamlit` Python code, or `static` html code).
|
||||||
|
Path is relative to the root of the repository.
|
||||||
|
|
||||||
|
`pinned`: _boolean_
|
||||||
|
Whether the Space stays on top of your list.
|
|
@ -0,0 +1,124 @@
|
||||||
|
#!/usr/bin/env python
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import argparse
|
||||||
|
import functools
|
||||||
|
import os
|
||||||
|
import pathlib
|
||||||
|
import tarfile
|
||||||
|
|
||||||
|
import deepdanbooru as dd
|
||||||
|
import gradio as gr
|
||||||
|
import huggingface_hub
|
||||||
|
import numpy as np
|
||||||
|
import PIL.Image
|
||||||
|
import tensorflow as tf
|
||||||
|
|
||||||
|
TITLE = 'KichangKim/DeepDanbooru'
|
||||||
|
DESCRIPTION = 'This is an unofficial demo for https://github.com/KichangKim/DeepDanbooru.'
|
||||||
|
ARTICLE = '<center><img src="https://visitor-badge.glitch.me/badge?page_id=hysts.deepdanbooru" alt="visitor badge"/></center>'
|
||||||
|
|
||||||
|
HF_TOKEN = os.environ['HF_TOKEN']
|
||||||
|
MODEL_REPO = 'hysts/DeepDanbooru'
|
||||||
|
MODEL_FILENAME = 'model-resnet_custom_v3.h5'
|
||||||
|
LABEL_FILENAME = 'tags.txt'
|
||||||
|
|
||||||
|
|
||||||
|
def parse_args() -> argparse.Namespace:
|
||||||
|
parser = argparse.ArgumentParser()
|
||||||
|
parser.add_argument('--score-slider-step', type=float, default=0.05)
|
||||||
|
parser.add_argument('--score-threshold', type=float, default=0.5)
|
||||||
|
parser.add_argument('--share', action='store_true')
|
||||||
|
return parser.parse_args()
|
||||||
|
|
||||||
|
|
||||||
|
def load_sample_image_paths() -> list[pathlib.Path]:
|
||||||
|
image_dir = pathlib.Path('images')
|
||||||
|
if not image_dir.exists():
|
||||||
|
dataset_repo = 'hysts/sample-images-TADNE'
|
||||||
|
path = huggingface_hub.hf_hub_download(dataset_repo,
|
||||||
|
'images.tar.gz',
|
||||||
|
repo_type='dataset',
|
||||||
|
use_auth_token=HF_TOKEN)
|
||||||
|
with tarfile.open(path) as f:
|
||||||
|
f.extractall()
|
||||||
|
return sorted(image_dir.glob('*'))
|
||||||
|
|
||||||
|
|
||||||
|
def load_model() -> tf.keras.Model:
|
||||||
|
# path = huggingface_hub.hf_hub_download(MODEL_REPO,
|
||||||
|
# MODEL_FILENAME,
|
||||||
|
# use_auth_token=HF_TOKEN)
|
||||||
|
model = tf.keras.models.load_model("./model-resnet_custom_v3.h5")
|
||||||
|
return model
|
||||||
|
|
||||||
|
|
||||||
|
def load_labels() -> list[str]:
|
||||||
|
# path = huggingface_hub.hf_hub_download(MODEL_REPO,
|
||||||
|
# LABEL_FILENAME,
|
||||||
|
# use_auth_token=HF_TOKEN)
|
||||||
|
|
||||||
|
path = "./tags.txt"
|
||||||
|
with open(path) as f:
|
||||||
|
labels = [line.strip() for line in f.readlines()]
|
||||||
|
return labels
|
||||||
|
|
||||||
|
|
||||||
|
def predict(image: PIL.Image.Image, score_threshold: float,
|
||||||
|
model: tf.keras.Model, labels: list[str]) -> dict[str, float]:
|
||||||
|
_, height, width, _ = model.input_shape
|
||||||
|
image = np.asarray(image)
|
||||||
|
image = tf.image.resize(image,
|
||||||
|
size=(height, width),
|
||||||
|
method=tf.image.ResizeMethod.AREA,
|
||||||
|
preserve_aspect_ratio=True)
|
||||||
|
image = image.numpy()
|
||||||
|
image = dd.image.transform_and_pad_image(image, width, height)
|
||||||
|
image = image / 255.
|
||||||
|
probs = model.predict(image[None, ...])[0]
|
||||||
|
probs = probs.astype(float)
|
||||||
|
res = dict()
|
||||||
|
for prob, label in zip(probs.tolist(), labels):
|
||||||
|
if prob < score_threshold:
|
||||||
|
continue
|
||||||
|
res[label] = prob
|
||||||
|
return res
|
||||||
|
|
||||||
|
|
||||||
|
def main():
|
||||||
|
args = parse_args()
|
||||||
|
|
||||||
|
#image_paths = load_sample_image_paths()
|
||||||
|
#examples = [[path.as_posix(), args.score_threshold]
|
||||||
|
# for path in image_paths]
|
||||||
|
|
||||||
|
model = load_model()
|
||||||
|
labels = load_labels()
|
||||||
|
|
||||||
|
func = functools.partial(predict, model=model, labels=labels)
|
||||||
|
|
||||||
|
gr.Interface(
|
||||||
|
func,
|
||||||
|
[
|
||||||
|
gr.Image(type='pil', label='Input'),
|
||||||
|
gr.Slider(0,
|
||||||
|
1,
|
||||||
|
step=args.score_slider_step,
|
||||||
|
value=args.score_threshold,
|
||||||
|
label='Score Threshold'),
|
||||||
|
],
|
||||||
|
gr.Label(label='Output'),
|
||||||
|
#examples=examples,
|
||||||
|
#title=TITLE,
|
||||||
|
#description=DESCRIPTION,
|
||||||
|
#article=ARTICLE,
|
||||||
|
allow_flagging='never',
|
||||||
|
).launch(
|
||||||
|
enable_queue=True,
|
||||||
|
server_name = "0.0.0.0",
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
main()
|
|
@ -0,0 +1,14 @@
|
||||||
|
[
|
||||||
|
{
|
||||||
|
"name": "General",
|
||||||
|
"start_index": 0
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "Character",
|
||||||
|
"start_index": 6891
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "System",
|
||||||
|
"start_index": 9173
|
||||||
|
}
|
||||||
|
]
|
Binary file not shown.
|
@ -0,0 +1,32 @@
|
||||||
|
{
|
||||||
|
"image_width": 512,
|
||||||
|
"image_height": 512,
|
||||||
|
"database_path": "F:/Dataset/danbooru-family/danbooru-training-20211112.sqlite",
|
||||||
|
"minimum_tag_count": 20,
|
||||||
|
"model": "resnet_custom_v3",
|
||||||
|
"minibatch_size": 4,
|
||||||
|
"epoch_count": 32,
|
||||||
|
"export_model_per_epoch": 4,
|
||||||
|
"checkpoint_frequency_mb": 1000,
|
||||||
|
"console_logging_frequency_mb": 50,
|
||||||
|
"optimizer": "sgd",
|
||||||
|
"learning_rates": [
|
||||||
|
{
|
||||||
|
"used_epoch": 0,
|
||||||
|
"learning_rate": 5.0
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"rotation_range": [
|
||||||
|
0.0,
|
||||||
|
360.0
|
||||||
|
],
|
||||||
|
"scale_range": [
|
||||||
|
0.9,
|
||||||
|
1.1
|
||||||
|
],
|
||||||
|
"shift_range": [
|
||||||
|
-0.1,
|
||||||
|
0.1
|
||||||
|
],
|
||||||
|
"mixed_precision": false
|
||||||
|
}
|
|
@ -0,0 +1,3 @@
|
||||||
|
pillow>=9.0.0
|
||||||
|
tensorflow>=2.7.0
|
||||||
|
git+https://github.com/KichangKim/DeepDanbooru@v3-20200915-sgd-e30#egg=deepdanbooru
|
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
|
@ -0,0 +1,5 @@
|
||||||
|
{
|
||||||
|
"date": "2021/11/12 22:30:46",
|
||||||
|
"limit": 10000,
|
||||||
|
"minimum_post_count": 500
|
||||||
|
}
|
Loading…
Reference in New Issue