RemBG
Build-Deploy-Actions
Details
Build-Deploy-Actions
Details
This commit is contained in:
commit
049b836f04
|
@ -0,0 +1,27 @@
|
|||
*.7z filter=lfs diff=lfs merge=lfs -text
|
||||
*.arrow filter=lfs diff=lfs merge=lfs -text
|
||||
*.bin filter=lfs diff=lfs merge=lfs -text
|
||||
*.bz2 filter=lfs diff=lfs merge=lfs -text
|
||||
*.ftz filter=lfs diff=lfs merge=lfs -text
|
||||
*.gz filter=lfs diff=lfs merge=lfs -text
|
||||
*.h5 filter=lfs diff=lfs merge=lfs -text
|
||||
*.joblib filter=lfs diff=lfs merge=lfs -text
|
||||
*.lfs.* filter=lfs diff=lfs merge=lfs -text
|
||||
*.model filter=lfs diff=lfs merge=lfs -text
|
||||
*.msgpack filter=lfs diff=lfs merge=lfs -text
|
||||
*.onnx filter=lfs diff=lfs merge=lfs -text
|
||||
*.ot filter=lfs diff=lfs merge=lfs -text
|
||||
*.parquet filter=lfs diff=lfs merge=lfs -text
|
||||
*.pb filter=lfs diff=lfs merge=lfs -text
|
||||
*.pt filter=lfs diff=lfs merge=lfs -text
|
||||
*.pth filter=lfs diff=lfs merge=lfs -text
|
||||
*.rar filter=lfs diff=lfs merge=lfs -text
|
||||
saved_model/**/* filter=lfs diff=lfs merge=lfs -text
|
||||
*.tar.* filter=lfs diff=lfs merge=lfs -text
|
||||
*.tflite filter=lfs diff=lfs merge=lfs -text
|
||||
*.tgz filter=lfs diff=lfs merge=lfs -text
|
||||
*.wasm filter=lfs diff=lfs merge=lfs -text
|
||||
*.xz filter=lfs diff=lfs merge=lfs -text
|
||||
*.zip filter=lfs diff=lfs merge=lfs -text
|
||||
*.zstandard filter=lfs diff=lfs merge=lfs -text
|
||||
*tfevents* filter=lfs diff=lfs merge=lfs -text
|
|
@ -0,0 +1,47 @@
|
|||
name: Build
|
||||
run-name: ${{ github.actor }} is upgrade release 🚀
|
||||
on: [push]
|
||||
env:
|
||||
REPOSITORY: ${{ github.repository }}
|
||||
COMMIT_ID: ${{ github.sha }}
|
||||
jobs:
|
||||
Build-Deploy-Actions:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- run: echo "🎉 The job was automatically triggered by a ${{ github.event_name }} event."
|
||||
- run: echo "🐧 This job is now running on a ${{ runner.os }} server hosted by Gitea!"
|
||||
- run: echo "🔎 The name of your branch is ${{ github.ref }} and your repository is ${{ github.repository }}."
|
||||
- name: Check out repository code
|
||||
uses: actions/checkout@v3
|
||||
-
|
||||
name: Setup Git LFS
|
||||
run: |
|
||||
git lfs install
|
||||
git lfs fetch
|
||||
git lfs checkout
|
||||
- name: List files in the repository
|
||||
run: |
|
||||
ls ${{ github.workspace }}
|
||||
-
|
||||
name: Docker Image Info
|
||||
id: image-info
|
||||
run: |
|
||||
echo "::set-output name=image_name::$(echo $REPOSITORY | tr '[:upper:]' '[:lower:]')"
|
||||
echo "::set-output name=image_tag::${COMMIT_ID:0:10}"
|
||||
-
|
||||
name: Login to Docker Hub
|
||||
uses: docker/login-action@v2
|
||||
with:
|
||||
registry: artifacts.iflytek.com
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v2
|
||||
-
|
||||
name: Build and push
|
||||
run: |
|
||||
docker version
|
||||
docker buildx build -t artifacts.iflytek.com/docker-private/atp/${{ steps.image-info.outputs.image_name }}:${{ steps.image-info.outputs.image_tag }} . --file ${{ github.workspace }}/Dockerfile --load
|
||||
docker push artifacts.iflytek.com/docker-private/atp/${{ steps.image-info.outputs.image_name }}:${{ steps.image-info.outputs.image_tag }}
|
||||
docker rmi artifacts.iflytek.com/docker-private/atp/${{ steps.image-info.outputs.image_name }}:${{ steps.image-info.outputs.image_tag }}
|
||||
- run: echo "🍏 This job's status is ${{ job.status }}."
|
|
@ -0,0 +1,13 @@
|
|||
#FROM python:3.8.13
|
||||
FROM artifacts.iflytek.com/docker-private/atp/base_image_for_ailab:0.0.1
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
COPY . /app
|
||||
|
||||
RUN pip config set global.index-url https://pypi.mirrors.ustc.edu.cn/simple
|
||||
RUN pip install -r requirements.txt
|
||||
|
||||
RUN mkdir -p /root/.u2net && mv u2net.onnx /root/.u2net/
|
||||
|
||||
CMD ["python", "app.py"]
|
|
@ -0,0 +1,21 @@
|
|||
MIT License
|
||||
|
||||
Copyright (c) 2020 Daniel Gatis
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
|
@ -0,0 +1,12 @@
|
|||
---
|
||||
title: Rembg
|
||||
emoji: 👀
|
||||
colorFrom: pink
|
||||
colorTo: indigo
|
||||
sdk: gradio
|
||||
sdk_version: 3.0.20
|
||||
app_file: app.py
|
||||
pinned: false
|
||||
---
|
||||
|
||||
Check out the configuration reference at https://huggingface.co/docs/hub/spaces-config-reference
|
|
@ -0,0 +1,67 @@
|
|||
## Modified from Akhaliq Hugging Face Demo
|
||||
## https://huggingface.co/akhaliq
|
||||
|
||||
import gradio as gr
|
||||
import os
|
||||
import cv2
|
||||
|
||||
def inference(file, af, mask):
|
||||
im = cv2.imread(file, cv2.IMREAD_COLOR)
|
||||
cv2.imwrite(os.path.join("input.png"), im)
|
||||
|
||||
model = "u2net"
|
||||
from rembg import remove
|
||||
from rembg.session_base import BaseSession
|
||||
from rembg.session_factory import new_session
|
||||
|
||||
input_path = 'input.png'
|
||||
output_path = 'output.png'
|
||||
|
||||
with open(input_path, 'rb') as i:
|
||||
with open(output_path, 'wb') as o:
|
||||
input = i.read()
|
||||
sessions: dict[str, BaseSession] = {}
|
||||
output = remove(
|
||||
input,
|
||||
session=sessions.setdefault(
|
||||
model, new_session(model)
|
||||
),
|
||||
alpha_matting_erode_size = af,
|
||||
only_mask = (True if mask == "Mask only" else False)
|
||||
)
|
||||
o.write(output)
|
||||
return os.path.join("output.png")
|
||||
|
||||
|
||||
|
||||
gr.Interface(
|
||||
inference,
|
||||
[
|
||||
gr.inputs.Image(type="filepath", label="Input"),
|
||||
gr.inputs.Slider(10, 25, default=10, label="Alpha matting"),
|
||||
gr.inputs.Radio(
|
||||
[
|
||||
"Default",
|
||||
"Mask only"
|
||||
],
|
||||
type="value",
|
||||
default="Default",
|
||||
label="Choices"
|
||||
),
|
||||
#gr.inputs.Dropdown([
|
||||
# "u2net",
|
||||
# "u2netp",
|
||||
# "u2net_human_seg",
|
||||
# "u2net_cloth_seg",
|
||||
# "silueta"
|
||||
# ],
|
||||
# type="value",
|
||||
# default="u2net",
|
||||
# label="Models"
|
||||
#),
|
||||
],
|
||||
# gr.outputs.Image(type="filepath", label="Output"),
|
||||
gr.Image(),
|
||||
examples=[["lion.png", 10, "Default", "u2net"], ["girl.jpg", 10, "Default", "u2net"]],
|
||||
enable_queue=True
|
||||
).launch(server_name="0.0.0.0" , share=True)
|
|
@ -0,0 +1,6 @@
|
|||
from . import _version
|
||||
|
||||
__version__ = _version.get_versions()["version"]
|
||||
|
||||
from .bg import remove
|
||||
from .session_factory import new_session
|
|
@ -0,0 +1,677 @@
|
|||
# This file helps to compute a version number in source trees obtained from
|
||||
# git-archive tarball (such as those provided by githubs download-from-tag
|
||||
# feature). Distribution tarballs (built by setup.py sdist) and build
|
||||
# directories (produced by setup.py build) will contain a much shorter file
|
||||
# that just contains the computed version number.
|
||||
|
||||
# This file is released into the public domain. Generated by
|
||||
# versioneer-0.21 (https://github.com/python-versioneer/python-versioneer)
|
||||
|
||||
"""Git implementation of _version.py."""
|
||||
|
||||
import errno
|
||||
import os
|
||||
import re
|
||||
import subprocess
|
||||
import sys
|
||||
from typing import Callable, Dict
|
||||
|
||||
|
||||
def get_keywords():
|
||||
"""Get the keywords needed to look up the version information."""
|
||||
# these strings will be replaced by git during git-archive.
|
||||
# setup.py/versioneer.py will grep for the variable names, so they must
|
||||
# each be defined on a line of their own. _version.py will just call
|
||||
# get_keywords().
|
||||
git_refnames = " (HEAD -> main)"
|
||||
git_full = "d62227d5866e2178e88f06074917484a4424082e"
|
||||
git_date = "2022-12-10 11:51:49 -0300"
|
||||
keywords = {"refnames": git_refnames, "full": git_full, "date": git_date}
|
||||
return keywords
|
||||
|
||||
|
||||
class VersioneerConfig:
|
||||
"""Container for Versioneer configuration parameters."""
|
||||
|
||||
|
||||
def get_config():
|
||||
"""Create, populate and return the VersioneerConfig() object."""
|
||||
# these strings are filled in when 'setup.py versioneer' creates
|
||||
# _version.py
|
||||
cfg = VersioneerConfig()
|
||||
cfg.VCS = "git"
|
||||
cfg.style = "pep440"
|
||||
cfg.tag_prefix = "v"
|
||||
cfg.parentdir_prefix = "rembg-"
|
||||
cfg.versionfile_source = "rembg/_version.py"
|
||||
cfg.verbose = False
|
||||
return cfg
|
||||
|
||||
|
||||
class NotThisMethod(Exception):
|
||||
"""Exception raised if a method is not valid for the current scenario."""
|
||||
|
||||
|
||||
LONG_VERSION_PY: Dict[str, str] = {}
|
||||
HANDLERS: Dict[str, Dict[str, Callable]] = {}
|
||||
|
||||
|
||||
def register_vcs_handler(vcs, method): # decorator
|
||||
"""Create decorator to mark a method as the handler of a VCS."""
|
||||
|
||||
def decorate(f):
|
||||
"""Store f in HANDLERS[vcs][method]."""
|
||||
if vcs not in HANDLERS:
|
||||
HANDLERS[vcs] = {}
|
||||
HANDLERS[vcs][method] = f
|
||||
return f
|
||||
|
||||
return decorate
|
||||
|
||||
|
||||
def run_command(commands, args, cwd=None, verbose=False, hide_stderr=False, env=None):
|
||||
"""Call the given command(s)."""
|
||||
assert isinstance(commands, list)
|
||||
process = None
|
||||
for command in commands:
|
||||
try:
|
||||
dispcmd = str([command] + args)
|
||||
# remember shell=False, so use git.cmd on windows, not just git
|
||||
process = subprocess.Popen(
|
||||
[command] + args,
|
||||
cwd=cwd,
|
||||
env=env,
|
||||
stdout=subprocess.PIPE,
|
||||
stderr=(subprocess.PIPE if hide_stderr else None),
|
||||
)
|
||||
break
|
||||
except OSError:
|
||||
e = sys.exc_info()[1]
|
||||
if e.errno == errno.ENOENT:
|
||||
continue
|
||||
if verbose:
|
||||
print("unable to run %s" % dispcmd)
|
||||
print(e)
|
||||
return None, None
|
||||
else:
|
||||
if verbose:
|
||||
print("unable to find command, tried %s" % (commands,))
|
||||
return None, None
|
||||
stdout = process.communicate()[0].strip().decode()
|
||||
if process.returncode != 0:
|
||||
if verbose:
|
||||
print("unable to run %s (error)" % dispcmd)
|
||||
print("stdout was %s" % stdout)
|
||||
return None, process.returncode
|
||||
return stdout, process.returncode
|
||||
|
||||
|
||||
def versions_from_parentdir(parentdir_prefix, root, verbose):
|
||||
"""Try to determine the version from the parent directory name.
|
||||
|
||||
Source tarballs conventionally unpack into a directory that includes both
|
||||
the project name and a version string. We will also support searching up
|
||||
two directory levels for an appropriately named parent directory
|
||||
"""
|
||||
rootdirs = []
|
||||
|
||||
for _ in range(3):
|
||||
dirname = os.path.basename(root)
|
||||
if dirname.startswith(parentdir_prefix):
|
||||
return {
|
||||
"version": dirname[len(parentdir_prefix) :],
|
||||
"full-revisionid": None,
|
||||
"dirty": False,
|
||||
"error": None,
|
||||
"date": None,
|
||||
}
|
||||
rootdirs.append(root)
|
||||
root = os.path.dirname(root) # up a level
|
||||
|
||||
if verbose:
|
||||
print(
|
||||
"Tried directories %s but none started with prefix %s"
|
||||
% (str(rootdirs), parentdir_prefix)
|
||||
)
|
||||
raise NotThisMethod("rootdir doesn't start with parentdir_prefix")
|
||||
|
||||
|
||||
@register_vcs_handler("git", "get_keywords")
|
||||
def git_get_keywords(versionfile_abs):
|
||||
"""Extract version information from the given file."""
|
||||
# the code embedded in _version.py can just fetch the value of these
|
||||
# keywords. When used from setup.py, we don't want to import _version.py,
|
||||
# so we do it with a regexp instead. This function is not used from
|
||||
# _version.py.
|
||||
keywords = {}
|
||||
try:
|
||||
with open(versionfile_abs, "r") as fobj:
|
||||
for line in fobj:
|
||||
if line.strip().startswith("git_refnames ="):
|
||||
mo = re.search(r'=\s*"(.*)"', line)
|
||||
if mo:
|
||||
keywords["refnames"] = mo.group(1)
|
||||
if line.strip().startswith("git_full ="):
|
||||
mo = re.search(r'=\s*"(.*)"', line)
|
||||
if mo:
|
||||
keywords["full"] = mo.group(1)
|
||||
if line.strip().startswith("git_date ="):
|
||||
mo = re.search(r'=\s*"(.*)"', line)
|
||||
if mo:
|
||||
keywords["date"] = mo.group(1)
|
||||
except OSError:
|
||||
pass
|
||||
return keywords
|
||||
|
||||
|
||||
@register_vcs_handler("git", "keywords")
|
||||
def git_versions_from_keywords(keywords, tag_prefix, verbose):
|
||||
"""Get version information from git keywords."""
|
||||
if "refnames" not in keywords:
|
||||
raise NotThisMethod("Short version file found")
|
||||
date = keywords.get("date")
|
||||
if date is not None:
|
||||
# Use only the last line. Previous lines may contain GPG signature
|
||||
# information.
|
||||
date = date.splitlines()[-1]
|
||||
|
||||
# git-2.2.0 added "%cI", which expands to an ISO-8601 -compliant
|
||||
# datestamp. However we prefer "%ci" (which expands to an "ISO-8601
|
||||
# -like" string, which we must then edit to make compliant), because
|
||||
# it's been around since git-1.5.3, and it's too difficult to
|
||||
# discover which version we're using, or to work around using an
|
||||
# older one.
|
||||
date = date.strip().replace(" ", "T", 1).replace(" ", "", 1)
|
||||
refnames = keywords["refnames"].strip()
|
||||
if refnames.startswith("$Format"):
|
||||
if verbose:
|
||||
print("keywords are unexpanded, not using")
|
||||
raise NotThisMethod("unexpanded keywords, not a git-archive tarball")
|
||||
refs = {r.strip() for r in refnames.strip("()").split(",")}
|
||||
# starting in git-1.8.3, tags are listed as "tag: foo-1.0" instead of
|
||||
# just "foo-1.0". If we see a "tag: " prefix, prefer those.
|
||||
TAG = "tag: "
|
||||
tags = {r[len(TAG) :] for r in refs if r.startswith(TAG)}
|
||||
if not tags:
|
||||
# Either we're using git < 1.8.3, or there really are no tags. We use
|
||||
# a heuristic: assume all version tags have a digit. The old git %d
|
||||
# expansion behaves like git log --decorate=short and strips out the
|
||||
# refs/heads/ and refs/tags/ prefixes that would let us distinguish
|
||||
# between branches and tags. By ignoring refnames without digits, we
|
||||
# filter out many common branch names like "release" and
|
||||
# "stabilization", as well as "HEAD" and "master".
|
||||
tags = {r for r in refs if re.search(r"\d", r)}
|
||||
if verbose:
|
||||
print("discarding '%s', no digits" % ",".join(refs - tags))
|
||||
if verbose:
|
||||
print("likely tags: %s" % ",".join(sorted(tags)))
|
||||
for ref in sorted(tags):
|
||||
# sorting will prefer e.g. "2.0" over "2.0rc1"
|
||||
if ref.startswith(tag_prefix):
|
||||
r = ref[len(tag_prefix) :]
|
||||
# Filter out refs that exactly match prefix or that don't start
|
||||
# with a number once the prefix is stripped (mostly a concern
|
||||
# when prefix is '')
|
||||
if not re.match(r"\d", r):
|
||||
continue
|
||||
if verbose:
|
||||
print("picking %s" % r)
|
||||
return {
|
||||
"version": r,
|
||||
"full-revisionid": keywords["full"].strip(),
|
||||
"dirty": False,
|
||||
"error": None,
|
||||
"date": date,
|
||||
}
|
||||
# no suitable tags, so version is "0+unknown", but full hex is still there
|
||||
if verbose:
|
||||
print("no suitable tags, using unknown + full revision id")
|
||||
return {
|
||||
"version": "0+unknown",
|
||||
"full-revisionid": keywords["full"].strip(),
|
||||
"dirty": False,
|
||||
"error": "no suitable tags",
|
||||
"date": None,
|
||||
}
|
||||
|
||||
|
||||
@register_vcs_handler("git", "pieces_from_vcs")
|
||||
def git_pieces_from_vcs(tag_prefix, root, verbose, runner=run_command):
|
||||
"""Get version from 'git describe' in the root of the source tree.
|
||||
|
||||
This only gets called if the git-archive 'subst' keywords were *not*
|
||||
expanded, and _version.py hasn't already been rewritten with a short
|
||||
version string, meaning we're inside a checked out source tree.
|
||||
"""
|
||||
GITS = ["git"]
|
||||
TAG_PREFIX_REGEX = "*"
|
||||
if sys.platform == "win32":
|
||||
GITS = ["git.cmd", "git.exe"]
|
||||
TAG_PREFIX_REGEX = r"\*"
|
||||
|
||||
_, rc = runner(GITS, ["rev-parse", "--git-dir"], cwd=root, hide_stderr=True)
|
||||
if rc != 0:
|
||||
if verbose:
|
||||
print("Directory %s not under git control" % root)
|
||||
raise NotThisMethod("'git rev-parse --git-dir' returned error")
|
||||
|
||||
# if there is a tag matching tag_prefix, this yields TAG-NUM-gHEX[-dirty]
|
||||
# if there isn't one, this yields HEX[-dirty] (no NUM)
|
||||
describe_out, rc = runner(
|
||||
GITS,
|
||||
[
|
||||
"describe",
|
||||
"--tags",
|
||||
"--dirty",
|
||||
"--always",
|
||||
"--long",
|
||||
"--match",
|
||||
"%s%s" % (tag_prefix, TAG_PREFIX_REGEX),
|
||||
],
|
||||
cwd=root,
|
||||
)
|
||||
# --long was added in git-1.5.5
|
||||
if describe_out is None:
|
||||
raise NotThisMethod("'git describe' failed")
|
||||
describe_out = describe_out.strip()
|
||||
full_out, rc = runner(GITS, ["rev-parse", "HEAD"], cwd=root)
|
||||
if full_out is None:
|
||||
raise NotThisMethod("'git rev-parse' failed")
|
||||
full_out = full_out.strip()
|
||||
|
||||
pieces = {}
|
||||
pieces["long"] = full_out
|
||||
pieces["short"] = full_out[:7] # maybe improved later
|
||||
pieces["error"] = None
|
||||
|
||||
branch_name, rc = runner(GITS, ["rev-parse", "--abbrev-ref", "HEAD"], cwd=root)
|
||||
# --abbrev-ref was added in git-1.6.3
|
||||
if rc != 0 or branch_name is None:
|
||||
raise NotThisMethod("'git rev-parse --abbrev-ref' returned error")
|
||||
branch_name = branch_name.strip()
|
||||
|
||||
if branch_name == "HEAD":
|
||||
# If we aren't exactly on a branch, pick a branch which represents
|
||||
# the current commit. If all else fails, we are on a branchless
|
||||
# commit.
|
||||
branches, rc = runner(GITS, ["branch", "--contains"], cwd=root)
|
||||
# --contains was added in git-1.5.4
|
||||
if rc != 0 or branches is None:
|
||||
raise NotThisMethod("'git branch --contains' returned error")
|
||||
branches = branches.split("\n")
|
||||
|
||||
# Remove the first line if we're running detached
|
||||
if "(" in branches[0]:
|
||||
branches.pop(0)
|
||||
|
||||
# Strip off the leading "* " from the list of branches.
|
||||
branches = [branch[2:] for branch in branches]
|
||||
if "master" in branches:
|
||||
branch_name = "master"
|
||||
elif not branches:
|
||||
branch_name = None
|
||||
else:
|
||||
# Pick the first branch that is returned. Good or bad.
|
||||
branch_name = branches[0]
|
||||
|
||||
pieces["branch"] = branch_name
|
||||
|
||||
# parse describe_out. It will be like TAG-NUM-gHEX[-dirty] or HEX[-dirty]
|
||||
# TAG might have hyphens.
|
||||
git_describe = describe_out
|
||||
|
||||
# look for -dirty suffix
|
||||
dirty = git_describe.endswith("-dirty")
|
||||
pieces["dirty"] = dirty
|
||||
if dirty:
|
||||
git_describe = git_describe[: git_describe.rindex("-dirty")]
|
||||
|
||||
# now we have TAG-NUM-gHEX or HEX
|
||||
|
||||
if "-" in git_describe:
|
||||
# TAG-NUM-gHEX
|
||||
mo = re.search(r"^(.+)-(\d+)-g([0-9a-f]+)$", git_describe)
|
||||
if not mo:
|
||||
# unparsable. Maybe git-describe is misbehaving?
|
||||
pieces["error"] = "unable to parse git-describe output: '%s'" % describe_out
|
||||
return pieces
|
||||
|
||||
# tag
|
||||
full_tag = mo.group(1)
|
||||
if not full_tag.startswith(tag_prefix):
|
||||
if verbose:
|
||||
fmt = "tag '%s' doesn't start with prefix '%s'"
|
||||
print(fmt % (full_tag, tag_prefix))
|
||||
pieces["error"] = "tag '%s' doesn't start with prefix '%s'" % (
|
||||
full_tag,
|
||||
tag_prefix,
|
||||
)
|
||||
return pieces
|
||||
pieces["closest-tag"] = full_tag[len(tag_prefix) :]
|
||||
|
||||
# distance: number of commits since tag
|
||||
pieces["distance"] = int(mo.group(2))
|
||||
|
||||
# commit: short hex revision ID
|
||||
pieces["short"] = mo.group(3)
|
||||
|
||||
else:
|
||||
# HEX: no tags
|
||||
pieces["closest-tag"] = None
|
||||
count_out, rc = runner(GITS, ["rev-list", "HEAD", "--count"], cwd=root)
|
||||
pieces["distance"] = int(count_out) # total number of commits
|
||||
|
||||
# commit date: see ISO-8601 comment in git_versions_from_keywords()
|
||||
date = runner(GITS, ["show", "-s", "--format=%ci", "HEAD"], cwd=root)[0].strip()
|
||||
# Use only the last line. Previous lines may contain GPG signature
|
||||
# information.
|
||||
date = date.splitlines()[-1]
|
||||
pieces["date"] = date.strip().replace(" ", "T", 1).replace(" ", "", 1)
|
||||
|
||||
return pieces
|
||||
|
||||
|
||||
def plus_or_dot(pieces):
|
||||
"""Return a + if we don't already have one, else return a ."""
|
||||
if "+" in pieces.get("closest-tag", ""):
|
||||
return "."
|
||||
return "+"
|
||||
|
||||
|
||||
def render_pep440(pieces):
|
||||
"""Build up version string, with post-release "local version identifier".
|
||||
|
||||
Our goal: TAG[+DISTANCE.gHEX[.dirty]] . Note that if you
|
||||
get a tagged build and then dirty it, you'll get TAG+0.gHEX.dirty
|
||||
|
||||
Exceptions:
|
||||
1: no tags. git_describe was just HEX. 0+untagged.DISTANCE.gHEX[.dirty]
|
||||
"""
|
||||
if pieces["closest-tag"]:
|
||||
rendered = pieces["closest-tag"]
|
||||
if pieces["distance"] or pieces["dirty"]:
|
||||
rendered += plus_or_dot(pieces)
|
||||
rendered += "%d.g%s" % (pieces["distance"], pieces["short"])
|
||||
if pieces["dirty"]:
|
||||
rendered += ".dirty"
|
||||
else:
|
||||
# exception #1
|
||||
rendered = "0+untagged.%d.g%s" % (pieces["distance"], pieces["short"])
|
||||
if pieces["dirty"]:
|
||||
rendered += ".dirty"
|
||||
return rendered
|
||||
|
||||
|
||||
def render_pep440_branch(pieces):
|
||||
"""TAG[[.dev0]+DISTANCE.gHEX[.dirty]] .
|
||||
|
||||
The ".dev0" means not master branch. Note that .dev0 sorts backwards
|
||||
(a feature branch will appear "older" than the master branch).
|
||||
|
||||
Exceptions:
|
||||
1: no tags. 0[.dev0]+untagged.DISTANCE.gHEX[.dirty]
|
||||
"""
|
||||
if pieces["closest-tag"]:
|
||||
rendered = pieces["closest-tag"]
|
||||
if pieces["distance"] or pieces["dirty"]:
|
||||
if pieces["branch"] != "master":
|
||||
rendered += ".dev0"
|
||||
rendered += plus_or_dot(pieces)
|
||||
rendered += "%d.g%s" % (pieces["distance"], pieces["short"])
|
||||
if pieces["dirty"]:
|
||||
rendered += ".dirty"
|
||||
else:
|
||||
# exception #1
|
||||
rendered = "0"
|
||||
if pieces["branch"] != "master":
|
||||
rendered += ".dev0"
|
||||
rendered += "+untagged.%d.g%s" % (pieces["distance"], pieces["short"])
|
||||
if pieces["dirty"]:
|
||||
rendered += ".dirty"
|
||||
return rendered
|
||||
|
||||
|
||||
def pep440_split_post(ver):
|
||||
"""Split pep440 version string at the post-release segment.
|
||||
|
||||
Returns the release segments before the post-release and the
|
||||
post-release version number (or -1 if no post-release segment is present).
|
||||
"""
|
||||
vc = str.split(ver, ".post")
|
||||
return vc[0], int(vc[1] or 0) if len(vc) == 2 else None
|
||||
|
||||
|
||||
def render_pep440_pre(pieces):
|
||||
"""TAG[.postN.devDISTANCE] -- No -dirty.
|
||||
|
||||
Exceptions:
|
||||
1: no tags. 0.post0.devDISTANCE
|
||||
"""
|
||||
if pieces["closest-tag"]:
|
||||
if pieces["distance"]:
|
||||
# update the post release segment
|
||||
tag_version, post_version = pep440_split_post(pieces["closest-tag"])
|
||||
rendered = tag_version
|
||||
if post_version is not None:
|
||||
rendered += ".post%d.dev%d" % (post_version + 1, pieces["distance"])
|
||||
else:
|
||||
rendered += ".post0.dev%d" % (pieces["distance"])
|
||||
else:
|
||||
# no commits, use the tag as the version
|
||||
rendered = pieces["closest-tag"]
|
||||
else:
|
||||
# exception #1
|
||||
rendered = "0.post0.dev%d" % pieces["distance"]
|
||||
return rendered
|
||||
|
||||
|
||||
def render_pep440_post(pieces):
|
||||
"""TAG[.postDISTANCE[.dev0]+gHEX] .
|
||||
|
||||
The ".dev0" means dirty. Note that .dev0 sorts backwards
|
||||
(a dirty tree will appear "older" than the corresponding clean one),
|
||||
but you shouldn't be releasing software with -dirty anyways.
|
||||
|
||||
Exceptions:
|
||||
1: no tags. 0.postDISTANCE[.dev0]
|
||||
"""
|
||||
if pieces["closest-tag"]:
|
||||
rendered = pieces["closest-tag"]
|
||||
if pieces["distance"] or pieces["dirty"]:
|
||||
rendered += ".post%d" % pieces["distance"]
|
||||
if pieces["dirty"]:
|
||||
rendered += ".dev0"
|
||||
rendered += plus_or_dot(pieces)
|
||||
rendered += "g%s" % pieces["short"]
|
||||
else:
|
||||
# exception #1
|
||||
rendered = "0.post%d" % pieces["distance"]
|
||||
if pieces["dirty"]:
|
||||
rendered += ".dev0"
|
||||
rendered += "+g%s" % pieces["short"]
|
||||
return rendered
|
||||
|
||||
|
||||
def render_pep440_post_branch(pieces):
|
||||
"""TAG[.postDISTANCE[.dev0]+gHEX[.dirty]] .
|
||||
|
||||
The ".dev0" means not master branch.
|
||||
|
||||
Exceptions:
|
||||
1: no tags. 0.postDISTANCE[.dev0]+gHEX[.dirty]
|
||||
"""
|
||||
if pieces["closest-tag"]:
|
||||
rendered = pieces["closest-tag"]
|
||||
if pieces["distance"] or pieces["dirty"]:
|
||||
rendered += ".post%d" % pieces["distance"]
|
||||
if pieces["branch"] != "master":
|
||||
rendered += ".dev0"
|
||||
rendered += plus_or_dot(pieces)
|
||||
rendered += "g%s" % pieces["short"]
|
||||
if pieces["dirty"]:
|
||||
rendered += ".dirty"
|
||||
else:
|
||||
# exception #1
|
||||
rendered = "0.post%d" % pieces["distance"]
|
||||
if pieces["branch"] != "master":
|
||||
rendered += ".dev0"
|
||||
rendered += "+g%s" % pieces["short"]
|
||||
if pieces["dirty"]:
|
||||
rendered += ".dirty"
|
||||
return rendered
|
||||
|
||||
|
||||
def render_pep440_old(pieces):
|
||||
"""TAG[.postDISTANCE[.dev0]] .
|
||||
|
||||
The ".dev0" means dirty.
|
||||
|
||||
Exceptions:
|
||||
1: no tags. 0.postDISTANCE[.dev0]
|
||||
"""
|
||||
if pieces["closest-tag"]:
|
||||
rendered = pieces["closest-tag"]
|
||||
if pieces["distance"] or pieces["dirty"]:
|
||||
rendered += ".post%d" % pieces["distance"]
|
||||
if pieces["dirty"]:
|
||||
rendered += ".dev0"
|
||||
else:
|
||||
# exception #1
|
||||
rendered = "0.post%d" % pieces["distance"]
|
||||
if pieces["dirty"]:
|
||||
rendered += ".dev0"
|
||||
return rendered
|
||||
|
||||
|
||||
def render_git_describe(pieces):
|
||||
"""TAG[-DISTANCE-gHEX][-dirty].
|
||||
|
||||
Like 'git describe --tags --dirty --always'.
|
||||
|
||||
Exceptions:
|
||||
1: no tags. HEX[-dirty] (note: no 'g' prefix)
|
||||
"""
|
||||
if pieces["closest-tag"]:
|
||||
rendered = pieces["closest-tag"]
|
||||
if pieces["distance"]:
|
||||
rendered += "-%d-g%s" % (pieces["distance"], pieces["short"])
|
||||
else:
|
||||
# exception #1
|
||||
rendered = pieces["short"]
|
||||
if pieces["dirty"]:
|
||||
rendered += "-dirty"
|
||||
return rendered
|
||||
|
||||
|
||||
def render_git_describe_long(pieces):
|
||||
"""TAG-DISTANCE-gHEX[-dirty].
|
||||
|
||||
Like 'git describe --tags --dirty --always -long'.
|
||||
The distance/hash is unconditional.
|
||||
|
||||
Exceptions:
|
||||
1: no tags. HEX[-dirty] (note: no 'g' prefix)
|
||||
"""
|
||||
if pieces["closest-tag"]:
|
||||
rendered = pieces["closest-tag"]
|
||||
rendered += "-%d-g%s" % (pieces["distance"], pieces["short"])
|
||||
else:
|
||||
# exception #1
|
||||
rendered = pieces["short"]
|
||||
if pieces["dirty"]:
|
||||
rendered += "-dirty"
|
||||
return rendered
|
||||
|
||||
|
||||
def render(pieces, style):
|
||||
"""Render the given version pieces into the requested style."""
|
||||
if pieces["error"]:
|
||||
return {
|
||||
"version": "unknown",
|
||||
"full-revisionid": pieces.get("long"),
|
||||
"dirty": None,
|
||||
"error": pieces["error"],
|
||||
"date": None,
|
||||
}
|
||||
|
||||
if not style or style == "default":
|
||||
style = "pep440" # the default
|
||||
|
||||
if style == "pep440":
|
||||
rendered = render_pep440(pieces)
|
||||
elif style == "pep440-branch":
|
||||
rendered = render_pep440_branch(pieces)
|
||||
elif style == "pep440-pre":
|
||||
rendered = render_pep440_pre(pieces)
|
||||
elif style == "pep440-post":
|
||||
rendered = render_pep440_post(pieces)
|
||||
elif style == "pep440-post-branch":
|
||||
rendered = render_pep440_post_branch(pieces)
|
||||
elif style == "pep440-old":
|
||||
rendered = render_pep440_old(pieces)
|
||||
elif style == "git-describe":
|
||||
rendered = render_git_describe(pieces)
|
||||
elif style == "git-describe-long":
|
||||
rendered = render_git_describe_long(pieces)
|
||||
else:
|
||||
raise ValueError("unknown style '%s'" % style)
|
||||
|
||||
return {
|
||||
"version": rendered,
|
||||
"full-revisionid": pieces["long"],
|
||||
"dirty": pieces["dirty"],
|
||||
"error": None,
|
||||
"date": pieces.get("date"),
|
||||
}
|
||||
|
||||
|
||||
def get_versions():
|
||||
"""Get version information or return default if unable to do so."""
|
||||
# I am in _version.py, which lives at ROOT/VERSIONFILE_SOURCE. If we have
|
||||
# __file__, we can work backwards from there to the root. Some
|
||||
# py2exe/bbfreeze/non-CPython implementations don't do __file__, in which
|
||||
# case we can only use expanded keywords.
|
||||
|
||||
cfg = get_config()
|
||||
verbose = cfg.verbose
|
||||
|
||||
try:
|
||||
return git_versions_from_keywords(get_keywords(), cfg.tag_prefix, verbose)
|
||||
except NotThisMethod:
|
||||
pass
|
||||
|
||||
try:
|
||||
root = os.path.realpath(__file__)
|
||||
# versionfile_source is the relative path from the top of the source
|
||||
# tree (where the .git directory might live) to this file. Invert
|
||||
# this to find the root from __file__.
|
||||
for _ in cfg.versionfile_source.split("/"):
|
||||
root = os.path.dirname(root)
|
||||
except NameError:
|
||||
return {
|
||||
"version": "0+unknown",
|
||||
"full-revisionid": None,
|
||||
"dirty": None,
|
||||
"error": "unable to find root of source tree",
|
||||
"date": None,
|
||||
}
|
||||
|
||||
try:
|
||||
pieces = git_pieces_from_vcs(cfg.tag_prefix, root, verbose)
|
||||
return render(pieces, cfg.style)
|
||||
except NotThisMethod:
|
||||
pass
|
||||
|
||||
try:
|
||||
if cfg.parentdir_prefix:
|
||||
return versions_from_parentdir(cfg.parentdir_prefix, root, verbose)
|
||||
except NotThisMethod:
|
||||
pass
|
||||
|
||||
return {
|
||||
"version": "0+unknown",
|
||||
"full-revisionid": None,
|
||||
"dirty": None,
|
||||
"error": "unable to compute version",
|
||||
"date": None,
|
||||
}
|
|
@ -0,0 +1,176 @@
|
|||
import io
|
||||
from enum import Enum
|
||||
from typing import List, Optional, Union
|
||||
|
||||
import numpy as np
|
||||
from cv2 import (
|
||||
BORDER_DEFAULT,
|
||||
MORPH_ELLIPSE,
|
||||
MORPH_OPEN,
|
||||
GaussianBlur,
|
||||
getStructuringElement,
|
||||
morphologyEx,
|
||||
)
|
||||
from PIL import Image
|
||||
from PIL.Image import Image as PILImage
|
||||
from pymatting.alpha.estimate_alpha_cf import estimate_alpha_cf
|
||||
from pymatting.foreground.estimate_foreground_ml import estimate_foreground_ml
|
||||
from pymatting.util.util import stack_images
|
||||
from scipy.ndimage import binary_erosion
|
||||
|
||||
from .session_base import BaseSession
|
||||
from .session_factory import new_session
|
||||
|
||||
kernel = getStructuringElement(MORPH_ELLIPSE, (3, 3))
|
||||
|
||||
|
||||
class ReturnType(Enum):
|
||||
BYTES = 0
|
||||
PILLOW = 1
|
||||
NDARRAY = 2
|
||||
|
||||
|
||||
def alpha_matting_cutout(
|
||||
img: PILImage,
|
||||
mask: PILImage,
|
||||
foreground_threshold: int,
|
||||
background_threshold: int,
|
||||
erode_structure_size: int,
|
||||
) -> PILImage:
|
||||
|
||||
if img.mode == "RGBA" or img.mode == "CMYK":
|
||||
img = img.convert("RGB")
|
||||
|
||||
img = np.asarray(img)
|
||||
mask = np.asarray(mask)
|
||||
|
||||
is_foreground = mask > foreground_threshold
|
||||
is_background = mask < background_threshold
|
||||
|
||||
structure = None
|
||||
if erode_structure_size > 0:
|
||||
structure = np.ones(
|
||||
(erode_structure_size, erode_structure_size), dtype=np.uint8
|
||||
)
|
||||
|
||||
is_foreground = binary_erosion(is_foreground, structure=structure)
|
||||
is_background = binary_erosion(is_background, structure=structure, border_value=1)
|
||||
|
||||
trimap = np.full(mask.shape, dtype=np.uint8, fill_value=128)
|
||||
trimap[is_foreground] = 255
|
||||
trimap[is_background] = 0
|
||||
|
||||
img_normalized = img / 255.0
|
||||
trimap_normalized = trimap / 255.0
|
||||
|
||||
alpha = estimate_alpha_cf(img_normalized, trimap_normalized)
|
||||
foreground = estimate_foreground_ml(img_normalized, alpha)
|
||||
cutout = stack_images(foreground, alpha)
|
||||
|
||||
cutout = np.clip(cutout * 255, 0, 255).astype(np.uint8)
|
||||
cutout = Image.fromarray(cutout)
|
||||
|
||||
return cutout
|
||||
|
||||
|
||||
def naive_cutout(img: PILImage, mask: PILImage) -> PILImage:
|
||||
empty = Image.new("RGBA", (img.size), 0)
|
||||
cutout = Image.composite(img, empty, mask)
|
||||
return cutout
|
||||
|
||||
|
||||
def get_concat_v_multi(imgs: List[PILImage]) -> PILImage:
|
||||
pivot = imgs.pop(0)
|
||||
for im in imgs:
|
||||
pivot = get_concat_v(pivot, im)
|
||||
return pivot
|
||||
|
||||
|
||||
def get_concat_v(img1: PILImage, img2: PILImage) -> PILImage:
|
||||
dst = Image.new("RGBA", (img1.width, img1.height + img2.height))
|
||||
dst.paste(img1, (0, 0))
|
||||
dst.paste(img2, (0, img1.height))
|
||||
return dst
|
||||
|
||||
|
||||
def post_process(mask: np.ndarray) -> np.ndarray:
|
||||
"""
|
||||
Post Process the mask for a smooth boundary by applying Morphological Operations
|
||||
Research based on paper: https://www.sciencedirect.com/science/article/pii/S2352914821000757
|
||||
args:
|
||||
mask: Binary Numpy Mask
|
||||
"""
|
||||
mask = morphologyEx(mask, MORPH_OPEN, kernel)
|
||||
mask = GaussianBlur(mask, (5, 5), sigmaX=2, sigmaY=2, borderType=BORDER_DEFAULT)
|
||||
mask = np.where(mask < 127, 0, 255).astype(np.uint8) # convert again to binary
|
||||
return mask
|
||||
|
||||
|
||||
def remove(
|
||||
data: Union[bytes, PILImage, np.ndarray],
|
||||
alpha_matting: bool = False,
|
||||
alpha_matting_foreground_threshold: int = 240,
|
||||
alpha_matting_background_threshold: int = 10,
|
||||
alpha_matting_erode_size: int = 10,
|
||||
session: Optional[BaseSession] = None,
|
||||
only_mask: bool = False,
|
||||
post_process_mask: bool = False,
|
||||
) -> Union[bytes, PILImage, np.ndarray]:
|
||||
|
||||
if isinstance(data, PILImage):
|
||||
return_type = ReturnType.PILLOW
|
||||
img = data
|
||||
elif isinstance(data, bytes):
|
||||
return_type = ReturnType.BYTES
|
||||
img = Image.open(io.BytesIO(data))
|
||||
elif isinstance(data, np.ndarray):
|
||||
return_type = ReturnType.NDARRAY
|
||||
img = Image.fromarray(data)
|
||||
else:
|
||||
raise ValueError("Input type {} is not supported.".format(type(data)))
|
||||
|
||||
if session is None:
|
||||
session = new_session("u2net")
|
||||
|
||||
masks = session.predict(img)
|
||||
cutouts = []
|
||||
|
||||
for mask in masks:
|
||||
if post_process_mask:
|
||||
mask = Image.fromarray(post_process(np.array(mask)))
|
||||
|
||||
if only_mask:
|
||||
cutout = mask
|
||||
|
||||
elif alpha_matting:
|
||||
try:
|
||||
cutout = alpha_matting_cutout(
|
||||
img,
|
||||
mask,
|
||||
alpha_matting_foreground_threshold,
|
||||
alpha_matting_background_threshold,
|
||||
alpha_matting_erode_size,
|
||||
)
|
||||
except ValueError:
|
||||
cutout = naive_cutout(img, mask)
|
||||
|
||||
else:
|
||||
cutout = naive_cutout(img, mask)
|
||||
|
||||
cutouts.append(cutout)
|
||||
|
||||
cutout = img
|
||||
if len(cutouts) > 0:
|
||||
cutout = get_concat_v_multi(cutouts)
|
||||
|
||||
if ReturnType.PILLOW == return_type:
|
||||
return cutout
|
||||
|
||||
if ReturnType.NDARRAY == return_type:
|
||||
return np.asarray(cutout)
|
||||
|
||||
bio = io.BytesIO()
|
||||
cutout.save(bio, "PNG")
|
||||
bio.seek(0)
|
||||
|
||||
return bio.read()
|
|
@ -0,0 +1,440 @@
|
|||
import pathlib
|
||||
import sys
|
||||
import time
|
||||
from enum import Enum
|
||||
from typing import IO, cast
|
||||
|
||||
import aiohttp
|
||||
import click
|
||||
import filetype
|
||||
import uvicorn
|
||||
from asyncer import asyncify
|
||||
from fastapi import Depends, FastAPI, File, Form, Query
|
||||
from fastapi.middleware.cors import CORSMiddleware
|
||||
from starlette.responses import Response
|
||||
from tqdm import tqdm
|
||||
from watchdog.events import FileSystemEvent, FileSystemEventHandler
|
||||
from watchdog.observers import Observer
|
||||
|
||||
from . import _version
|
||||
from .bg import remove
|
||||
from .session_base import BaseSession
|
||||
from .session_factory import new_session
|
||||
|
||||
|
||||
@click.group()
|
||||
@click.version_option(version=_version.get_versions()["version"])
|
||||
def main() -> None:
|
||||
pass
|
||||
|
||||
|
||||
@main.command(help="for a file as input")
|
||||
@click.option(
|
||||
"-m",
|
||||
"--model",
|
||||
default="u2net",
|
||||
type=click.Choice(
|
||||
["u2net", "u2netp", "u2net_human_seg", "u2net_cloth_seg", "silueta"]
|
||||
),
|
||||
show_default=True,
|
||||
show_choices=True,
|
||||
help="model name",
|
||||
)
|
||||
@click.option(
|
||||
"-a",
|
||||
"--alpha-matting",
|
||||
is_flag=True,
|
||||
show_default=True,
|
||||
help="use alpha matting",
|
||||
)
|
||||
@click.option(
|
||||
"-af",
|
||||
"--alpha-matting-foreground-threshold",
|
||||
default=240,
|
||||
type=int,
|
||||
show_default=True,
|
||||
help="trimap fg threshold",
|
||||
)
|
||||
@click.option(
|
||||
"-ab",
|
||||
"--alpha-matting-background-threshold",
|
||||
default=10,
|
||||
type=int,
|
||||
show_default=True,
|
||||
help="trimap bg threshold",
|
||||
)
|
||||
@click.option(
|
||||
"-ae",
|
||||
"--alpha-matting-erode-size",
|
||||
default=10,
|
||||
type=int,
|
||||
show_default=True,
|
||||
help="erode size",
|
||||
)
|
||||
@click.option(
|
||||
"-om",
|
||||
"--only-mask",
|
||||
is_flag=True,
|
||||
show_default=True,
|
||||
help="output only the mask",
|
||||
)
|
||||
@click.option(
|
||||
"-ppm",
|
||||
"--post-process-mask",
|
||||
is_flag=True,
|
||||
show_default=True,
|
||||
help="post process the mask",
|
||||
)
|
||||
@click.argument(
|
||||
"input", default=(None if sys.stdin.isatty() else "-"), type=click.File("rb")
|
||||
)
|
||||
@click.argument(
|
||||
"output",
|
||||
default=(None if sys.stdin.isatty() else "-"),
|
||||
type=click.File("wb", lazy=True),
|
||||
)
|
||||
def i(model: str, input: IO, output: IO, **kwargs) -> None:
|
||||
output.write(remove(input.read(), session=new_session(model), **kwargs))
|
||||
|
||||
|
||||
@main.command(help="for a folder as input")
|
||||
@click.option(
|
||||
"-m",
|
||||
"--model",
|
||||
default="u2net",
|
||||
type=click.Choice(
|
||||
["u2net", "u2netp", "u2net_human_seg", "u2net_cloth_seg", "silueta"]
|
||||
),
|
||||
show_default=True,
|
||||
show_choices=True,
|
||||
help="model name",
|
||||
)
|
||||
@click.option(
|
||||
"-a",
|
||||
"--alpha-matting",
|
||||
is_flag=True,
|
||||
show_default=True,
|
||||
help="use alpha matting",
|
||||
)
|
||||
@click.option(
|
||||
"-af",
|
||||
"--alpha-matting-foreground-threshold",
|
||||
default=240,
|
||||
type=int,
|
||||
show_default=True,
|
||||
help="trimap fg threshold",
|
||||
)
|
||||
@click.option(
|
||||
"-ab",
|
||||
"--alpha-matting-background-threshold",
|
||||
default=10,
|
||||
type=int,
|
||||
show_default=True,
|
||||
help="trimap bg threshold",
|
||||
)
|
||||
@click.option(
|
||||
"-ae",
|
||||
"--alpha-matting-erode-size",
|
||||
default=10,
|
||||
type=int,
|
||||
show_default=True,
|
||||
help="erode size",
|
||||
)
|
||||
@click.option(
|
||||
"-om",
|
||||
"--only-mask",
|
||||
is_flag=True,
|
||||
show_default=True,
|
||||
help="output only the mask",
|
||||
)
|
||||
@click.option(
|
||||
"-ppm",
|
||||
"--post-process-mask",
|
||||
is_flag=True,
|
||||
show_default=True,
|
||||
help="post process the mask",
|
||||
)
|
||||
@click.option(
|
||||
"-w",
|
||||
"--watch",
|
||||
default=False,
|
||||
is_flag=True,
|
||||
show_default=True,
|
||||
help="watches a folder for changes",
|
||||
)
|
||||
@click.argument(
|
||||
"input",
|
||||
type=click.Path(
|
||||
exists=True,
|
||||
path_type=pathlib.Path,
|
||||
file_okay=False,
|
||||
dir_okay=True,
|
||||
readable=True,
|
||||
),
|
||||
)
|
||||
@click.argument(
|
||||
"output",
|
||||
type=click.Path(
|
||||
exists=False,
|
||||
path_type=pathlib.Path,
|
||||
file_okay=False,
|
||||
dir_okay=True,
|
||||
writable=True,
|
||||
),
|
||||
)
|
||||
def p(
|
||||
model: str, input: pathlib.Path, output: pathlib.Path, watch: bool, **kwargs
|
||||
) -> None:
|
||||
session = new_session(model)
|
||||
|
||||
def process(each_input: pathlib.Path) -> None:
|
||||
try:
|
||||
mimetype = filetype.guess(each_input)
|
||||
if mimetype is None:
|
||||
return
|
||||
if mimetype.mime.find("image") < 0:
|
||||
return
|
||||
|
||||
each_output = (output / each_input.name).with_suffix(".png")
|
||||
each_output.parents[0].mkdir(parents=True, exist_ok=True)
|
||||
|
||||
if not each_output.exists():
|
||||
each_output.write_bytes(
|
||||
cast(
|
||||
bytes,
|
||||
remove(each_input.read_bytes(), session=session, **kwargs),
|
||||
)
|
||||
)
|
||||
|
||||
if watch:
|
||||
print(
|
||||
f"processed: {each_input.absolute()} -> {each_output.absolute()}"
|
||||
)
|
||||
except Exception as e:
|
||||
print(e)
|
||||
|
||||
inputs = list(input.glob("**/*"))
|
||||
if not watch:
|
||||
inputs = tqdm(inputs)
|
||||
|
||||
for each_input in inputs:
|
||||
if not each_input.is_dir():
|
||||
process(each_input)
|
||||
|
||||
if watch:
|
||||
observer = Observer()
|
||||
|
||||
class EventHandler(FileSystemEventHandler):
|
||||
def on_any_event(self, event: FileSystemEvent) -> None:
|
||||
if not (
|
||||
event.is_directory or event.event_type in ["deleted", "closed"]
|
||||
):
|
||||
process(pathlib.Path(event.src_path))
|
||||
|
||||
event_handler = EventHandler()
|
||||
observer.schedule(event_handler, input, recursive=False)
|
||||
observer.start()
|
||||
|
||||
try:
|
||||
while True:
|
||||
time.sleep(1)
|
||||
|
||||
finally:
|
||||
observer.stop()
|
||||
observer.join()
|
||||
|
||||
|
||||
@main.command(help="for a http server")
|
||||
@click.option(
|
||||
"-p",
|
||||
"--port",
|
||||
default=5000,
|
||||
type=int,
|
||||
show_default=True,
|
||||
help="port",
|
||||
)
|
||||
@click.option(
|
||||
"-l",
|
||||
"--log_level",
|
||||
default="info",
|
||||
type=str,
|
||||
show_default=True,
|
||||
help="log level",
|
||||
)
|
||||
@click.option(
|
||||
"-t",
|
||||
"--threads",
|
||||
default=None,
|
||||
type=int,
|
||||
show_default=True,
|
||||
help="number of worker threads",
|
||||
)
|
||||
def s(port: int, log_level: str, threads: int) -> None:
|
||||
sessions: dict[str, BaseSession] = {}
|
||||
tags_metadata = [
|
||||
{
|
||||
"name": "Background Removal",
|
||||
"description": "Endpoints that perform background removal with different image sources.",
|
||||
"externalDocs": {
|
||||
"description": "GitHub Source",
|
||||
"url": "https://github.com/danielgatis/rembg",
|
||||
},
|
||||
},
|
||||
]
|
||||
app = FastAPI(
|
||||
title="Rembg",
|
||||
description="Rembg is a tool to remove images background. That is it.",
|
||||
version=_version.get_versions()["version"],
|
||||
contact={
|
||||
"name": "Daniel Gatis",
|
||||
"url": "https://github.com/danielgatis",
|
||||
"email": "danielgatis@gmail.com",
|
||||
},
|
||||
license_info={
|
||||
"name": "MIT License",
|
||||
"url": "https://github.com/danielgatis/rembg/blob/main/LICENSE.txt",
|
||||
},
|
||||
openapi_tags=tags_metadata,
|
||||
)
|
||||
|
||||
app.add_middleware(
|
||||
CORSMiddleware,
|
||||
allow_credentials=True,
|
||||
allow_origins=["*"],
|
||||
allow_methods=["*"],
|
||||
allow_headers=["*"],
|
||||
)
|
||||
|
||||
class ModelType(str, Enum):
|
||||
u2net = "u2net"
|
||||
u2netp = "u2netp"
|
||||
u2net_human_seg = "u2net_human_seg"
|
||||
u2net_cloth_seg = "u2net_cloth_seg"
|
||||
silueta = "silueta"
|
||||
|
||||
class CommonQueryParams:
|
||||
def __init__(
|
||||
self,
|
||||
model: ModelType = Query(
|
||||
default=ModelType.u2net,
|
||||
description="Model to use when processing image",
|
||||
),
|
||||
a: bool = Query(default=False, description="Enable Alpha Matting"),
|
||||
af: int = Query(
|
||||
default=240,
|
||||
ge=0,
|
||||
le=255,
|
||||
description="Alpha Matting (Foreground Threshold)",
|
||||
),
|
||||
ab: int = Query(
|
||||
default=10,
|
||||
ge=0,
|
||||
le=255,
|
||||
description="Alpha Matting (Background Threshold)",
|
||||
),
|
||||
ae: int = Query(
|
||||
default=10, ge=0, description="Alpha Matting (Erode Structure Size)"
|
||||
),
|
||||
om: bool = Query(default=False, description="Only Mask"),
|
||||
ppm: bool = Query(default=False, description="Post Process Mask"),
|
||||
):
|
||||
self.model = model
|
||||
self.a = a
|
||||
self.af = af
|
||||
self.ab = ab
|
||||
self.ae = ae
|
||||
self.om = om
|
||||
self.ppm = ppm
|
||||
|
||||
class CommonQueryPostParams:
|
||||
def __init__(
|
||||
self,
|
||||
model: ModelType = Form(
|
||||
default=ModelType.u2net,
|
||||
description="Model to use when processing image",
|
||||
),
|
||||
a: bool = Form(default=False, description="Enable Alpha Matting"),
|
||||
af: int = Form(
|
||||
default=240,
|
||||
ge=0,
|
||||
le=255,
|
||||
description="Alpha Matting (Foreground Threshold)",
|
||||
),
|
||||
ab: int = Form(
|
||||
default=10,
|
||||
ge=0,
|
||||
le=255,
|
||||
description="Alpha Matting (Background Threshold)",
|
||||
),
|
||||
ae: int = Form(
|
||||
default=10, ge=0, description="Alpha Matting (Erode Structure Size)"
|
||||
),
|
||||
om: bool = Form(default=False, description="Only Mask"),
|
||||
ppm: bool = Form(default=False, description="Post Process Mask"),
|
||||
):
|
||||
self.model = model
|
||||
self.a = a
|
||||
self.af = af
|
||||
self.ab = ab
|
||||
self.ae = ae
|
||||
self.om = om
|
||||
self.ppm = ppm
|
||||
|
||||
def im_without_bg(content: bytes, commons: CommonQueryParams) -> Response:
|
||||
return Response(
|
||||
remove(
|
||||
content,
|
||||
session=sessions.setdefault(
|
||||
commons.model.value, new_session(commons.model.value)
|
||||
),
|
||||
alpha_matting=commons.a,
|
||||
alpha_matting_foreground_threshold=commons.af,
|
||||
alpha_matting_background_threshold=commons.ab,
|
||||
alpha_matting_erode_size=commons.ae,
|
||||
only_mask=commons.om,
|
||||
post_process_mask=commons.ppm,
|
||||
),
|
||||
media_type="image/png",
|
||||
)
|
||||
|
||||
@app.on_event("startup")
|
||||
def startup():
|
||||
if threads is not None:
|
||||
from anyio import CapacityLimiter
|
||||
from anyio.lowlevel import RunVar
|
||||
|
||||
RunVar("_default_thread_limiter").set(CapacityLimiter(threads))
|
||||
|
||||
@app.get(
|
||||
path="/",
|
||||
tags=["Background Removal"],
|
||||
summary="Remove from URL",
|
||||
description="Removes the background from an image obtained by retrieving an URL.",
|
||||
)
|
||||
async def get_index(
|
||||
url: str = Query(
|
||||
default=..., description="URL of the image that has to be processed."
|
||||
),
|
||||
commons: CommonQueryParams = Depends(),
|
||||
):
|
||||
async with aiohttp.ClientSession() as session:
|
||||
async with session.get(url) as response:
|
||||
file = await response.read()
|
||||
return await asyncify(im_without_bg)(file, commons)
|
||||
|
||||
@app.post(
|
||||
path="/",
|
||||
tags=["Background Removal"],
|
||||
summary="Remove from Stream",
|
||||
description="Removes the background from an image sent within the request itself.",
|
||||
)
|
||||
async def post_index(
|
||||
file: bytes = File(
|
||||
default=...,
|
||||
description="Image file (byte stream) that has to be processed.",
|
||||
),
|
||||
commons: CommonQueryPostParams = Depends(),
|
||||
):
|
||||
return await asyncify(im_without_bg)(file, commons)
|
||||
|
||||
uvicorn.run(app, host="0.0.0.0", port=port, log_level=log_level)
|
|
@ -0,0 +1,40 @@
|
|||
from typing import Dict, List, Tuple
|
||||
|
||||
import numpy as np
|
||||
import onnxruntime as ort
|
||||
from PIL import Image
|
||||
from PIL.Image import Image as PILImage
|
||||
|
||||
|
||||
class BaseSession:
|
||||
def __init__(self, model_name: str, inner_session: ort.InferenceSession):
|
||||
self.model_name = model_name
|
||||
self.inner_session = inner_session
|
||||
|
||||
def normalize(
|
||||
self,
|
||||
img: PILImage,
|
||||
mean: Tuple[float, float, float],
|
||||
std: Tuple[float, float, float],
|
||||
size: Tuple[int, int],
|
||||
) -> Dict[str, np.ndarray]:
|
||||
im = img.convert("RGB").resize(size, Image.LANCZOS)
|
||||
|
||||
im_ary = np.array(im)
|
||||
im_ary = im_ary / np.max(im_ary)
|
||||
|
||||
tmpImg = np.zeros((im_ary.shape[0], im_ary.shape[1], 3))
|
||||
tmpImg[:, :, 0] = (im_ary[:, :, 0] - mean[0]) / std[0]
|
||||
tmpImg[:, :, 1] = (im_ary[:, :, 1] - mean[1]) / std[1]
|
||||
tmpImg[:, :, 2] = (im_ary[:, :, 2] - mean[2]) / std[2]
|
||||
|
||||
tmpImg = tmpImg.transpose((2, 0, 1))
|
||||
|
||||
return {
|
||||
self.inner_session.get_inputs()[0]
|
||||
.name: np.expand_dims(tmpImg, 0)
|
||||
.astype(np.float32)
|
||||
}
|
||||
|
||||
def predict(self, img: PILImage) -> List[PILImage]:
|
||||
raise NotImplementedError
|
|
@ -0,0 +1,88 @@
|
|||
from typing import List
|
||||
|
||||
import numpy as np
|
||||
from PIL import Image
|
||||
from PIL.Image import Image as PILImage
|
||||
from scipy.special import log_softmax
|
||||
|
||||
from .session_base import BaseSession
|
||||
|
||||
pallete1 = [
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
255,
|
||||
255,
|
||||
255,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
]
|
||||
|
||||
pallete2 = [
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
255,
|
||||
255,
|
||||
255,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
]
|
||||
|
||||
pallete3 = [
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
255,
|
||||
255,
|
||||
255,
|
||||
]
|
||||
|
||||
|
||||
class ClothSession(BaseSession):
|
||||
def predict(self, img: PILImage) -> List[PILImage]:
|
||||
ort_outs = self.inner_session.run(
|
||||
None, self.normalize(img, (0.5, 0.5, 0.5), (0.5, 0.5, 0.5), (768, 768))
|
||||
)
|
||||
|
||||
pred = ort_outs
|
||||
pred = log_softmax(pred[0], 1)
|
||||
pred = np.argmax(pred, axis=1, keepdims=True)
|
||||
pred = np.squeeze(pred, 0)
|
||||
pred = np.squeeze(pred, 0)
|
||||
|
||||
mask = Image.fromarray(pred.astype("uint8"), mode="L")
|
||||
mask = mask.resize(img.size, Image.LANCZOS)
|
||||
|
||||
masks = []
|
||||
|
||||
mask1 = mask.copy()
|
||||
mask1.putpalette(pallete1)
|
||||
mask1 = mask1.convert("RGB").convert("L")
|
||||
masks.append(mask1)
|
||||
|
||||
mask2 = mask.copy()
|
||||
mask2.putpalette(pallete2)
|
||||
mask2 = mask2.convert("RGB").convert("L")
|
||||
masks.append(mask2)
|
||||
|
||||
mask3 = mask.copy()
|
||||
mask3.putpalette(pallete3)
|
||||
mask3 = mask3.convert("RGB").convert("L")
|
||||
masks.append(mask3)
|
||||
|
||||
return masks
|
|
@ -0,0 +1,71 @@
|
|||
import hashlib
|
||||
import os
|
||||
import sys
|
||||
from contextlib import redirect_stdout
|
||||
from pathlib import Path
|
||||
from typing import Type
|
||||
|
||||
import onnxruntime as ort
|
||||
import pooch
|
||||
|
||||
from .session_base import BaseSession
|
||||
from .session_cloth import ClothSession
|
||||
from .session_simple import SimpleSession
|
||||
|
||||
|
||||
def new_session(model_name: str = "u2net") -> BaseSession:
|
||||
session_class: Type[BaseSession]
|
||||
md5 = "60024c5c889badc19c04ad937298a77b"
|
||||
url = "https://github.com/danielgatis/rembg/releases/download/v0.0.0/u2net.onnx"
|
||||
session_class = SimpleSession
|
||||
|
||||
if model_name == "u2netp":
|
||||
md5 = "8e83ca70e441ab06c318d82300c84806"
|
||||
url = (
|
||||
"https://github.com/danielgatis/rembg/releases/download/v0.0.0/u2netp.onnx"
|
||||
)
|
||||
session_class = SimpleSession
|
||||
elif model_name == "u2net_human_seg":
|
||||
md5 = "c09ddc2e0104f800e3e1bb4652583d1f"
|
||||
url = "https://github.com/danielgatis/rembg/releases/download/v0.0.0/u2net_human_seg.onnx"
|
||||
session_class = SimpleSession
|
||||
elif model_name == "u2net_cloth_seg":
|
||||
md5 = "2434d1f3cb744e0e49386c906e5a08bb"
|
||||
url = "https://github.com/danielgatis/rembg/releases/download/v0.0.0/u2net_cloth_seg.onnx"
|
||||
session_class = ClothSession
|
||||
elif model_name == "silueta":
|
||||
md5 = "55e59e0d8062d2f5d013f4725ee84782"
|
||||
url = (
|
||||
"https://github.com/danielgatis/rembg/releases/download/v0.0.0/silueta.onnx"
|
||||
)
|
||||
session_class = SimpleSession
|
||||
|
||||
u2net_home = os.getenv(
|
||||
"U2NET_HOME", os.path.join(os.getenv("XDG_DATA_HOME", "~"), ".u2net")
|
||||
)
|
||||
|
||||
fname = f"{model_name}.onnx"
|
||||
path = Path(u2net_home).expanduser()
|
||||
full_path = Path(u2net_home).expanduser() / fname
|
||||
|
||||
pooch.retrieve(
|
||||
url,
|
||||
f"md5:{md5}",
|
||||
fname=fname,
|
||||
path=Path(u2net_home).expanduser(),
|
||||
progressbar=True,
|
||||
)
|
||||
|
||||
sess_opts = ort.SessionOptions()
|
||||
|
||||
if "OMP_NUM_THREADS" in os.environ:
|
||||
sess_opts.inter_op_num_threads = int(os.environ["OMP_NUM_THREADS"])
|
||||
|
||||
return session_class(
|
||||
model_name,
|
||||
ort.InferenceSession(
|
||||
str(full_path),
|
||||
providers=ort.get_available_providers(),
|
||||
sess_options=sess_opts,
|
||||
),
|
||||
)
|
|
@ -0,0 +1,30 @@
|
|||
from typing import List
|
||||
|
||||
import numpy as np
|
||||
from PIL import Image
|
||||
from PIL.Image import Image as PILImage
|
||||
|
||||
from .session_base import BaseSession
|
||||
|
||||
|
||||
class SimpleSession(BaseSession):
|
||||
def predict(self, img: PILImage) -> List[PILImage]:
|
||||
ort_outs = self.inner_session.run(
|
||||
None,
|
||||
self.normalize(
|
||||
img, (0.485, 0.456, 0.406), (0.229, 0.224, 0.225), (320, 320)
|
||||
),
|
||||
)
|
||||
|
||||
pred = ort_outs[0][:, 0, :, :]
|
||||
|
||||
ma = np.max(pred)
|
||||
mi = np.min(pred)
|
||||
|
||||
pred = (pred - mi) / (ma - mi)
|
||||
pred = np.squeeze(pred)
|
||||
|
||||
mask = Image.fromarray((pred * 255).astype("uint8"), mode="L")
|
||||
mask = mask.resize(img.size, Image.LANCZOS)
|
||||
|
||||
return [mask]
|
|
@ -0,0 +1,18 @@
|
|||
aiohttp==3.8.1
|
||||
asyncer==0.0.2
|
||||
click==8.1.3
|
||||
fastapi==0.87.0
|
||||
filetype==1.2.0
|
||||
pooch==1.6.0
|
||||
imagehash==4.3.1
|
||||
numpy==1.23.5
|
||||
onnxruntime==1.13.1
|
||||
opencv-python-headless==4.6.0.66
|
||||
pillow==9.3.0
|
||||
pymatting==1.1.8
|
||||
python-multipart==0.0.5
|
||||
scikit-image==0.19.3
|
||||
scipy==1.9.3
|
||||
tqdm==4.64.1
|
||||
uvicorn==0.20.0
|
||||
watchdog==2.1.9
|
Binary file not shown.
Loading…
Reference in New Issue