Add venv to dockerignore

Improve readme
Improve dockerization as ipfs cli exec runs not needed anymore
Fix pyproject toml for gpu workers
Add more sections on example config
Drop and siomplify many cli commands, try to use config.ini for everything now
Use more dynamic imports on cli to speed up startup
Improve model pipelines to allow low mem cards to run big models
Add upscaler download to `skynet download` cmd
pull/26/head
Guillermo Rodriguez 2023-10-03 12:55:57 -03:00
parent 454545d096
commit 3622c8ea11
No known key found for this signature in database
GPG Key ID: EC3AB66D5D83B392
18 changed files with 4006 additions and 459 deletions

View File

@ -7,3 +7,4 @@ outputs
*.egg-info *.egg-info
**/*.key **/*.key
**/*.cert **/*.cert
.venv

View File

@ -2,29 +2,61 @@
### decentralized compute platform ### decentralized compute platform
To launch a worker: To launch a worker:
### native install
system dependencies:
- `cuda` 11.8
- `llvm` 10
- `python` 3.10+
- `docker` (for ipfs node)
``` ```
# create and edit config from template # create and edit config from template
cp skynet.ini.example skynet.ini cp skynet.ini.example skynet.ini
# create python virtual envoirment 3.10+ # install poetry package manager
python3 -m venv venv curl -sSL https://install.python-poetry.org | python3 -
# enable envoirment # install
source venv/bin/activate poetry install
# install requirements # enable environment
pip install -r requirements.txt poetry shell
pip install -r requirements.cuda.0.txt
pip install -r requirements.cuda.1.txt
pip install -r requirements.cuda.2.txt
# install skynet
pip install -e .
# test you can run this command # test you can run this command
skynet --help skynet --help
# launch ipfs node
skynet run ipfs
# to launch worker # to launch worker
skynet run dgpu skynet run dgpu
``` ```
### dockerized install
system dependencies:
- `docker` with gpu enabled
```
# pull runtime container
docker pull guilledk/skynet:runtime-cuda
# or build it (takes a bit of time)
./build_docker.sh
# launch simple ipfs node
./launch_ipfs.sh
# run worker
docker run \
-it \
--rm \
--gpus all \
--network host \
--mount type=bind,source="$(pwd)",target=/root/target \
guilledk/skynet:runtime-cuda \
skynet run dgpu
```

View File

@ -1,7 +1,3 @@
docker build \ docker build \
-t skynet:runtime-cuda \ -t guilledk/skynet:runtime-cuda \
-f docker/Dockerfile.runtime+cuda . -f docker/Dockerfile.runtime+cuda .
docker build \
-t skynet:runtime \
-f docker/Dockerfile.runtime .

View File

@ -2,15 +2,14 @@ from python:3.11
env DEBIAN_FRONTEND=noninteractive env DEBIAN_FRONTEND=noninteractive
run apt-get update && apt-get install -y git
run curl -sSL https://install.python-poetry.org | python3 -
env PATH "/root/.local/bin:$PATH"
copy . /skynet
workdir /skynet workdir /skynet
copy requirements.txt requirements.txt run poetry install -v
copy pytest.ini ./
copy setup.py ./
copy skynet ./skynet
run pip install \
-e . \
-r requirements.txt
copy tests ./

View File

@ -1,29 +1,46 @@
from nvidia/cuda:11.7.0-devel-ubuntu20.04 from nvidia/cuda:11.8.0-devel-ubuntu20.04
from python:3.11 from python:3.11
env DEBIAN_FRONTEND=noninteractive env DEBIAN_FRONTEND=noninteractive
run apt-get update && \ run apt-get update && apt-get install -y \
apt-get install -y ffmpeg libsm6 libxext6 git \
clang \
cmake \
ffmpeg \
libsm6 \
libxext6 \
ninja-build
env CC /usr/bin/clang
env CXX /usr/bin/clang++
# install llvm10 as required by llvm-lite
run git clone https://github.com/llvm/llvm-project.git -b llvmorg-10.0.1
workdir /llvm-project
# this adds a commit from 12.0.0 that fixes build on newer compilers
run git cherry-pick -n b498303066a63a203d24f739b2d2e0e56dca70d1
run cmake -S llvm -B build -G Ninja -DCMAKE_BUILD_TYPE=Release
run ninja -C build install # -j8
run curl -sSL https://install.python-poetry.org | python3 -
env PATH "/root/.local/bin:$PATH"
copy . /skynet
workdir /skynet workdir /skynet
copy requirements.cuda* ./ env POETRY_VIRTUALENVS_PATH /skynet/.venv
run pip install -U pip ninja run poetry install --with=cuda -v
run pip install -v -r requirements.cuda.0.txt
run pip install -v -r requirements.cuda.1.txt
run pip install -v -r requirements.cuda.2.txt
copy requirements.txt requirements.txt workdir /root/target
copy pytest.ini pytest.ini
copy setup.py setup.py
copy skynet skynet
run pip install -e . -r requirements.txt
env PYTORCH_CUDA_ALLOC_CONF max_split_size_mb:128 env PYTORCH_CUDA_ALLOC_CONF max_split_size_mb:128
env NVIDIA_VISIBLE_DEVICES=all env NVIDIA_VISIBLE_DEVICES=all
env HF_HOME /hf_home
copy tests tests copy docker/entrypoint.sh /entrypoint.sh
entrypoint ["/entrypoint.sh"]
cmd ["skynet", "--help"]

View File

@ -0,0 +1,6 @@
#!/bin/sh
export VIRTUAL_ENV='/skynet/.venv'
poetry env use $VIRTUAL_ENV/bin/python
exec "$@"

36
launch_ipfs.sh 100755
View File

@ -0,0 +1,36 @@
#!/bin/bash
name='skynet-ipfs'
peers=("$@")
data_dir="$(pwd)/ipfs-docker-data"
data_target='/data/ipfs'
# Create data directory if it doesn't exist
mkdir -p "$data_dir"
# Run the container
docker run -d \
--name "$name" \
-p 8080:8080/tcp \
-p 4001:4001/tcp \
-p 127.0.0.1:5001:5001/tcp \
--mount type=bind,source="$data_dir",target="$data_target" \
--rm \
ipfs/go-ipfs:latest
# Change ownership
docker exec "$name" chown 1000:1000 -R "$data_target"
# Wait for Daemon to be ready
while read -r log; do
echo "$log"
if [[ "$log" == *"Daemon is ready"* ]]; then
break
fi
done < <(docker logs -f "$name")
# Connect to peers
for peer in "${peers[@]}"; do
docker exec "$name" ipfs swarm connect "$peer" || echo "Error connecting to peer: $peer"
done

3635
poetry.lock generated 100644

File diff suppressed because it is too large Load Diff

View File

@ -11,10 +11,10 @@ python = '>=3.10,<3.12'
pytz = '^2023.3.post1' pytz = '^2023.3.post1'
trio = '^0.22.2' trio = '^0.22.2'
asks = '^3.0.0' asks = '^3.0.0'
numpy = '^1.26.0'
Pillow = '^10.0.1' Pillow = '^10.0.1'
docker = '^6.1.3' docker = '^6.1.3'
py-leap = {git = 'https://github.com/guilledk/py-leap.git', rev = 'v0.1a14'} py-leap = {git = 'https://github.com/guilledk/py-leap.git', rev = 'v0.1a14'}
xformers = "^0.0.22"
[tool.poetry.group.frontend] [tool.poetry.group.frontend]
optional = true optional = true
@ -37,12 +37,14 @@ pytest = {version = '^7.4.2'}
optional = true optional = true
[tool.poetry.group.cuda.dependencies] [tool.poetry.group.cuda.dependencies]
torch = {version = '1.13.0+cu117', source = 'torch'} torch = {version = '2.0.1+cu118', source = 'torch'}
scipy = {version = '^1.11.2'} scipy = {version = '^1.11.2'}
triton = {version = '^2.1.0'} numba = {version = '0.57.0'}
triton = {version = '2.0.0', source = 'torch'}
basicsr = {version = '^1.4.2'} basicsr = {version = '^1.4.2'}
diffusers = {version = '^0.21.2'} diffusers = {version = '^0.21.2'}
realesrgan = {version = '^0.3.0'} realesrgan = {version = '^0.3.0'}
torchvision = {version = '0.15.2+cu118', source = 'torch'}
accelerate = {version = '^0.23.0'} accelerate = {version = '^0.23.0'}
transformers = {version = '^4.33.2'} transformers = {version = '^4.33.2'}
huggingface-hub = {version = '^0.17.3'} huggingface-hub = {version = '^0.17.3'}
@ -51,9 +53,12 @@ invisible-watermark = {version = '^0.2.0'}
[[tool.poetry.source]] [[tool.poetry.source]]
name = 'torch' name = 'torch'
url = 'https://download.pytorch.org/whl/cu117' url = 'https://download.pytorch.org/whl/cu118'
priority = 'explicit' priority = 'explicit'
[build-system] [build-system]
requires = ['poetry-core'] requires = ['poetry-core', 'cython']
build-backend = 'poetry.core.masonry.api' build-backend = 'poetry.core.masonry.api'
[tool.poetry.scripts]
skynet = 'skynet.cli:skynet'

View File

@ -1,3 +1,6 @@
# config sections are optional, depending on which services
# you wish to run
[skynet.dgpu] [skynet.dgpu]
account = testworkerX account = testworkerX
permission = active permission = active
@ -36,3 +39,13 @@ ipfs_gateway_url = /ip4/169.197.140.154/tcp/4001/p2p/12D3KooWKWogLFNEcNNMKnzU7Sn
ipfs_url = http://127.0.0.1:5001 ipfs_url = http://127.0.0.1:5001
token = XXXXXXXXXX:xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx token = XXXXXXXXXX:xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx
[skynet.pinner]
hyperion_url = https://testnet.skygpu.net
ipfs_url = http://127.0.0.1:5001
[skynet.user]
account = testuser
permission = active
key = 5Xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx
node_url = https://testnet.skygpu.net

View File

@ -6,25 +6,12 @@ import random
from functools import partial from functools import partial
import trio
import asks
import click import click
import asyncio
import requests
from leap.cleos import CLEOS from leap.sugar import Name, asset_from_str
from leap.sugar import collect_stdout, Name, asset_from_str
from leap.hyperion import HyperionAPI
from skynet.ipfs import AsyncIPFSHTTP
from .db import open_new_database
from .config import * from .config import *
from .nodeos import open_cleos, open_nodeos
from .constants import * from .constants import *
from .frontend.telegram import SkynetTelegramFrontend
from .frontend.discord import SkynetDiscordFrontend
@click.group() @click.group()
@ -44,7 +31,9 @@ def skynet(*args, **kwargs):
@click.option('--seed', '-S', default=None) @click.option('--seed', '-S', default=None)
def txt2img(*args, **kwargs): def txt2img(*args, **kwargs):
from . import utils from . import utils
_, hf_token, _, _ = init_env_from_config()
config = load_skynet_ini()
hf_token = load_key(config, 'skynet.dgpu', 'hf_token')
utils.txt2img(hf_token, **kwargs) utils.txt2img(hf_token, **kwargs)
@click.command() @click.command()
@ -59,7 +48,8 @@ def txt2img(*args, **kwargs):
@click.option('--seed', '-S', default=None) @click.option('--seed', '-S', default=None)
def img2img(model, prompt, input, output, strength, guidance, steps, seed): def img2img(model, prompt, input, output, strength, guidance, steps, seed):
from . import utils from . import utils
_, hf_token, _, _ = init_env_from_config() config = load_skynet_ini()
hf_token = load_key(config, 'skynet.dgpu', 'hf_token')
utils.img2img( utils.img2img(
hf_token, hf_token,
model=model, model=model,
@ -87,95 +77,83 @@ def upscale(input, output, model):
@skynet.command() @skynet.command()
def download(): def download():
from . import utils from . import utils
_, hf_token, _, _ = init_env_from_config() config = load_skynet_ini()
hf_token = load_key(config, 'skynet.dgpu', 'hf_token')
utils.download_all_models(hf_token) utils.download_all_models(hf_token)
@skynet.command() @skynet.command()
@click.option(
'--account', '-A', default=None)
@click.option(
'--permission', '-P', default=None)
@click.option(
'--key', '-k', default=None)
@click.option(
'--node-url', '-n', default='https://skynet.ancap.tech')
@click.option( @click.option(
'--reward', '-r', default='20.0000 GPU') '--reward', '-r', default='20.0000 GPU')
@click.option('--jobs', '-j', default=1) @click.option('--jobs', '-j', default=1)
@click.option('--model', '-m', default='prompthero/openjourney') @click.option('--model', '-m', default='stabilityai/stable-diffusion-xl-base-1.0')
@click.option( @click.option(
'--prompt', '-p', default='a red old tractor in a sunny wheat field') '--prompt', '-p', default='a red old tractor in a sunny wheat field')
@click.option('--output', '-o', default='output.png') @click.option('--output', '-o', default='output.png')
@click.option('--width', '-w', default=512) @click.option('--width', '-w', default=1024)
@click.option('--height', '-h', default=512) @click.option('--height', '-h', default=1024)
@click.option('--guidance', '-g', default=10) @click.option('--guidance', '-g', default=10)
@click.option('--step', '-s', default=26) @click.option('--step', '-s', default=26)
@click.option('--seed', '-S', default=None) @click.option('--seed', '-S', default=None)
@click.option('--upscaler', '-U', default='x4') @click.option('--upscaler', '-U', default='x4')
@click.option('--binary_data', '-b', default='') @click.option('--binary_data', '-b', default='')
def enqueue( def enqueue(
account: str,
permission: str,
key: str | None,
node_url: str,
reward: str, reward: str,
jobs: int, jobs: int,
**kwargs **kwargs
): ):
key, account, permission = load_account_info( import trio
'user', key, account, permission) from leap.cleos import CLEOS
node_url, _, _, _ = load_endpoint_info('user', node_url=node_url) config = load_skynet_ini()
with open_cleos(node_url, key=key) as cleos: key = load_key(config, 'skynet.user', 'key')
async def enqueue_n_jobs(): account = load_key(config, 'skynet.user', 'account')
for i in range(jobs): permission = load_key(config, 'skynet.user', 'permission')
if not kwargs['seed']: node_url = load_key(config, 'skynet.user', 'node_url')
kwargs['seed'] = random.randint(0, 10e9)
req = json.dumps({ cleos = CLEOS(None, None, url=node_url, remote=node_url)
'method': 'diffuse',
'params': kwargs
})
binary = kwargs['binary_data']
res = await cleos.a_push_action( async def enqueue_n_jobs():
'telos.gpu', for i in range(jobs):
'enqueue', if not kwargs['seed']:
{ kwargs['seed'] = random.randint(0, 10e9)
'user': Name(account),
'request_body': req, req = json.dumps({
'binary_data': binary, 'method': 'diffuse',
'reward': asset_from_str(reward), 'params': kwargs
'min_verification': 1 })
}, binary = kwargs['binary_data']
account, key, permission,
) res = await cleos.a_push_action(
print(res) 'telos.gpu',
trio.run(enqueue_n_jobs) 'enqueue',
{
'user': Name(account),
'request_body': req,
'binary_data': binary,
'reward': asset_from_str(reward),
'min_verification': 1
},
account, key, permission,
)
print(res)
trio.run(enqueue_n_jobs)
@skynet.command() @skynet.command()
@click.option('--loglevel', '-l', default='INFO', help='Logging level') @click.option('--loglevel', '-l', default='INFO', help='Logging level')
@click.option(
'--account', '-A', default='telos.gpu')
@click.option(
'--permission', '-P', default='active')
@click.option(
'--key', '-k', default=None)
@click.option(
'--node-url', '-n', default='https://skynet.ancap.tech')
def clean( def clean(
loglevel: str, loglevel: str,
account: str,
permission: str,
key: str | None,
node_url: str,
): ):
key, account, permission = load_account_info( import trio
'user', key, account, permission) from leap.cleos import CLEOS
node_url, _, _, _ = load_endpoint_info('user', node_url=node_url) config = load_skynet_ini()
key = load_key(config, 'skynet.user', 'key')
account = load_key(config, 'skynet.user', 'account')
permission = load_key(config, 'skynet.user', 'permission')
node_url = load_key(config, 'skynet.user', 'node_url')
logging.basicConfig(level=loglevel) logging.basicConfig(level=loglevel)
cleos = CLEOS(None, None, url=node_url, remote=node_url) cleos = CLEOS(None, None, url=node_url, remote=node_url)
@ -190,10 +168,10 @@ def clean(
) )
@skynet.command() @skynet.command()
@click.option( def queue():
'--node-url', '-n', default='https://skynet.ancap.tech') import requests
def queue(node_url: str): config = load_skynet_ini()
node_url, _, _, _ = load_endpoint_info('user', node_url=node_url) node_url = load_key(config, 'skynet.user', 'node_url')
resp = requests.post( resp = requests.post(
f'{node_url}/v1/chain/get_table_rows', f'{node_url}/v1/chain/get_table_rows',
json={ json={
@ -206,11 +184,11 @@ def queue(node_url: str):
print(json.dumps(resp.json(), indent=4)) print(json.dumps(resp.json(), indent=4))
@skynet.command() @skynet.command()
@click.option(
'--node-url', '-n', default='https://skynet.ancap.tech')
@click.argument('request-id') @click.argument('request-id')
def status(node_url: str, request_id: int): def status(request_id: int):
node_url, _, _, _ = load_endpoint_info('user', node_url=node_url) import requests
config = load_skynet_ini()
node_url = load_key(config, 'skynet.user', 'node_url')
resp = requests.post( resp = requests.post(
f'{node_url}/v1/chain/get_table_rows', f'{node_url}/v1/chain/get_table_rows',
json={ json={
@ -223,26 +201,16 @@ def status(node_url: str, request_id: int):
print(json.dumps(resp.json(), indent=4)) print(json.dumps(resp.json(), indent=4))
@skynet.command() @skynet.command()
@click.option(
'--account', '-a', default='telegram')
@click.option(
'--permission', '-p', default='active')
@click.option(
'--key', '-k', default=None)
@click.option(
'--node-url', '-n', default='https://skynet.ancap.tech')
@click.argument('request-id') @click.argument('request-id')
def dequeue( def dequeue(request_id: int):
account: str, import trio
permission: str, from leap.cleos import CLEOS
key: str | None,
node_url: str,
request_id: int
):
key, account, permission = load_account_info(
'user', key, account, permission)
node_url, _, _, _ = load_endpoint_info('user', node_url=node_url) config = load_skynet_ini()
key = load_key(config, 'skynet.user', 'key')
account = load_key(config, 'skynet.user', 'account')
permission = load_key(config, 'skynet.user', 'permission')
node_url = load_key(config, 'skynet.user', 'node_url')
cleos = CLEOS(None, None, url=node_url, remote=node_url) cleos = CLEOS(None, None, url=node_url, remote=node_url)
res = trio.run( res = trio.run(
@ -261,29 +229,24 @@ def dequeue(
@skynet.command() @skynet.command()
@click.option(
'--account', '-a', default='telos.gpu')
@click.option(
'--permission', '-p', default='active')
@click.option(
'--key', '-k', default=None)
@click.option(
'--node-url', '-n', default='https://skynet.ancap.tech')
@click.option( @click.option(
'--token-contract', '-c', default='eosio.token') '--token-contract', '-c', default='eosio.token')
@click.option( @click.option(
'--token-symbol', '-S', default='4,GPU') '--token-symbol', '-S', default='4,GPU')
def config( def config(
account: str,
permission: str,
key: str | None,
node_url: str,
token_contract: str, token_contract: str,
token_symbol: str token_symbol: str
): ):
key, account, permission = load_account_info( import trio
'user', key, account, permission) from leap.cleos import CLEOS
node_url, _, _, _ = load_endpoint_info('user', node_url=node_url)
config = load_skynet_ini()
key = load_key(config, 'skynet.user', 'key')
account = load_key(config, 'skynet.user', 'account')
permission = load_key(config, 'skynet.user', 'permission')
node_url = load_key(config, 'skynet.user', 'node_url')
cleos = CLEOS(None, None, url=node_url, remote=node_url) cleos = CLEOS(None, None, url=node_url, remote=node_url)
res = trio.run( res = trio.run(
partial( partial(
@ -301,26 +264,18 @@ def config(
@skynet.command() @skynet.command()
@click.option(
'--account', '-a', default='telegram')
@click.option(
'--permission', '-p', default='active')
@click.option(
'--key', '-k', default=None)
@click.option(
'--node-url', '-n', default='https://skynet.ancap.tech')
@click.argument('quantity') @click.argument('quantity')
def deposit( def deposit(quantity: str):
account: str, import trio
permission: str, from leap.cleos import CLEOS
key: str | None,
node_url: str,
quantity: str
):
key, account, permission = load_account_info(
'user', key, account, permission)
node_url, _, _, _ = load_endpoint_info('user', node_url=node_url) config = load_skynet_ini()
key = load_key(config, 'skynet.user', 'key')
account = load_key(config, 'skynet.user', 'account')
permission = load_key(config, 'skynet.user', 'permission')
node_url = load_key(config, 'skynet.user', 'node_url')
cleos = CLEOS(None, None, url=node_url, remote=node_url)
res = trio.run( res = trio.run(
partial( partial(
@ -345,6 +300,8 @@ def run(*args, **kwargs):
@run.command() @run.command()
def db(): def db():
from .db import open_new_database
logging.basicConfig(level=logging.INFO) logging.basicConfig(level=logging.INFO)
with open_new_database(cleanup=False) as db_params: with open_new_database(cleanup=False) as db_params:
container, passwd, host = db_params container, passwd, host = db_params
@ -352,6 +309,8 @@ def db():
@run.command() @run.command()
def nodeos(): def nodeos():
from .nodeos import open_nodeos
logging.basicConfig(filename='skynet-nodeos.log', level=logging.INFO) logging.basicConfig(filename='skynet-nodeos.log', level=logging.INFO)
with open_nodeos(cleanup=False): with open_nodeos(cleanup=False):
... ...
@ -364,6 +323,7 @@ def dgpu(
loglevel: str, loglevel: str,
config_path: str config_path: str
): ):
import trio
from .dgpu import open_dgpu_node from .dgpu import open_dgpu_node
logging.basicConfig(level=loglevel) logging.basicConfig(level=loglevel)
@ -377,20 +337,6 @@ def dgpu(
@run.command() @run.command()
@click.option('--loglevel', '-l', default='INFO', help='logging level') @click.option('--loglevel', '-l', default='INFO', help='logging level')
@click.option(
'--account', '-a', default='telegram')
@click.option(
'--permission', '-p', default='active')
@click.option(
'--key', '-k', default=None)
@click.option(
'--hyperion-url', '-y', default=f'https://testnet.{DEFAULT_DOMAIN}')
@click.option(
'--node-url', '-n', default=f'https://testnet.{DEFAULT_DOMAIN}')
@click.option(
'--ipfs-url', '-i', default=DEFAULT_IPFS_LOCAL)
@click.option(
'--ipfs-gateway-url', '-I', default=None)
@click.option( @click.option(
'--db-host', '-h', default='localhost:5432') '--db-host', '-h', default='localhost:5432')
@click.option( @click.option(
@ -399,26 +345,26 @@ def dgpu(
'--db-pass', '-u', default='password') '--db-pass', '-u', default='password')
def telegram( def telegram(
loglevel: str, loglevel: str,
account: str,
permission: str,
key: str | None,
hyperion_url: str,
ipfs_url: str,
ipfs_gateway_url: str,
node_url: str,
db_host: str, db_host: str,
db_user: str, db_user: str,
db_pass: str db_pass: str
): ):
import asyncio
from .frontend.telegram import SkynetTelegramFrontend
logging.basicConfig(level=loglevel) logging.basicConfig(level=loglevel)
_, _, tg_token, _ = init_env_from_config() config = load_skynet_ini()
tg_token = load_key(config, 'skynet.telegram', 'tg_token')
key, account, permission = load_account_info( key = load_key(config, 'skynet.user', 'key')
'telegram', key, account, permission) account = load_key(config, 'skynet.user', 'account')
permission = load_key(config, 'skynet.user', 'permission')
node_url = load_key(config, 'skynet.user', 'node_url')
hyperion_url = load_key(config, 'skynet.telegram', 'hyperion_url')
node_url, _, ipfs_gateway_url, ipfs_url = load_endpoint_info( ipfs_gateway_url = load_key(config, 'skynet.telegram', 'ipfs_gateway_url')
'telegram', node_url=node_url, ipfs_gateway_url=ipfs_gateway_url) ipfs_url = load_key(config, 'skynet.telegram', 'ipfs_url')
async def _async_main(): async def _async_main():
frontend = SkynetTelegramFrontend( frontend = SkynetTelegramFrontend(
@ -442,20 +388,6 @@ def telegram(
@run.command() @run.command()
@click.option('--loglevel', '-l', default='INFO', help='logging level') @click.option('--loglevel', '-l', default='INFO', help='logging level')
@click.option(
'--account', '-a', default='discord')
@click.option(
'--permission', '-p', default='active')
@click.option(
'--key', '-k', default=None)
@click.option(
'--hyperion-url', '-y', default=f'https://testnet.{DEFAULT_DOMAIN}')
@click.option(
'--node-url', '-n', default=f'https://testnet.{DEFAULT_DOMAIN}')
@click.option(
'--ipfs-url', '-i', default=DEFAULT_IPFS_LOCAL)
@click.option(
'--ipfs-gateway-url', '-I', default=DEFAULT_IPFS_REMOTE)
@click.option( @click.option(
'--db-host', '-h', default='localhost:5432') '--db-host', '-h', default='localhost:5432')
@click.option( @click.option(
@ -464,26 +396,26 @@ def telegram(
'--db-pass', '-u', default='password') '--db-pass', '-u', default='password')
def discord( def discord(
loglevel: str, loglevel: str,
account: str,
permission: str,
key: str | None,
hyperion_url: str,
ipfs_url: str,
ipfs_gateway_url: str,
node_url: str,
db_host: str, db_host: str,
db_user: str, db_user: str,
db_pass: str db_pass: str
): ):
import asyncio
from .frontend.discord import SkynetDiscordFrontend
logging.basicConfig(level=loglevel) logging.basicConfig(level=loglevel)
_, _, _, dc_token = init_env_from_config() config = load_skynet_ini()
dc_token = load_key(config, 'skynet.discord', 'dc_token')
key, account, permission = load_account_info( key = load_key(config, 'skynet.discord', 'key')
'discord', key, account, permission) account = load_key(config, 'skynet.discord', 'account')
permission = load_key(config, 'skynet.discord', 'permission')
node_url = load_key(config, 'skynet.discord', 'node_url')
hyperion_url = load_key(config, 'skynet.discord', 'hyperion_url')
node_url, _, ipfs_gateway_url, ipfs_url = load_endpoint_info( ipfs_gateway_url = load_key(config, 'skynet.discord', 'ipfs_gateway_url')
'discord', node_url=node_url, ipfs_gateway_url=ipfs_gateway_url) ipfs_url = load_key(config, 'skynet.discord', 'ipfs_url')
async def _async_main(): async def _async_main():
frontend = SkynetDiscordFrontend( frontend = SkynetDiscordFrontend(
@ -507,24 +439,28 @@ def discord(
@run.command() @run.command()
@click.option('--loglevel', '-l', default='INFO', help='logging level') @click.option('--loglevel', '-l', default='INFO', help='logging level')
@click.option('--name', '-n', default='skynet-ipfs', help='container name') @click.option('--name', '-n', default='skynet-ipfs', help='container name')
def ipfs(loglevel, name): @click.option('--peer', '-p', default=(), help='connect to peer', multiple=True, type=str)
def ipfs(loglevel, name, peer):
from skynet.ipfs.docker import open_ipfs_node from skynet.ipfs.docker import open_ipfs_node
logging.basicConfig(level=loglevel) logging.basicConfig(level=loglevel)
with open_ipfs_node(name=name): with open_ipfs_node(name=name, peers=peer):
... ...
@run.command() @run.command()
@click.option('--loglevel', '-l', default='INFO', help='logging level') @click.option('--loglevel', '-l', default='INFO', help='logging level')
@click.option( def pinner(loglevel):
'--ipfs-rpc', '-i', default='http://127.0.0.1:5001') import trio
@click.option( from leap.hyperion import HyperionAPI
'--hyperion-url', '-y', default='http://127.0.0.1:42001') from .ipfs import AsyncIPFSHTTP
def pinner(loglevel, ipfs_rpc, hyperion_url):
from .ipfs.pinner import SkynetPinner from .ipfs.pinner import SkynetPinner
config = load_skynet_ini()
hyperion_url = load_key(config, 'skynet.pinner', 'hyperion_url')
ipfs_url = load_key(config, 'skynet.pinner', 'ipfs_url')
logging.basicConfig(level=loglevel) logging.basicConfig(level=loglevel)
ipfs_node = AsyncIPFSHTTP(ipfs_rpc) ipfs_node = AsyncIPFSHTTP(ipfs_url)
hyperion = HyperionAPI(hyperion_url) hyperion = HyperionAPI(hyperion_url)
pinner = SkynetPinner(hyperion, ipfs_node) pinner = SkynetPinner(hyperion, ipfs_node)

View File

@ -1,117 +1,30 @@
#!/usr/bin/python #!/usr/bin/python
import os
import json
from pathlib import Path
from configparser import ConfigParser from configparser import ConfigParser
from re import sub
from .constants import DEFAULT_CONFIG_PATH from .constants import DEFAULT_CONFIG_PATH
class ConfigParsingError(BaseException):
...
def load_skynet_ini( def load_skynet_ini(
file_path=DEFAULT_CONFIG_PATH file_path=DEFAULT_CONFIG_PATH
): ) -> ConfigParser:
config = ConfigParser() config = ConfigParser()
config.read(file_path) config.read(file_path)
return config return config
def init_env_from_config( def load_key(config: ConfigParser, section: str, key: str) -> str:
hf_token: str | None = None, if section not in config:
hf_home: str | None = None, conf_sections = [s for s in config]
tg_token: str | None = None, raise ConfigParsingError(f'section \"{section}\" not in {conf_sections}')
dc_token: str | None = None,
file_path=DEFAULT_CONFIG_PATH
):
config = load_skynet_ini(file_path=file_path)
if 'HF_TOKEN' in os.environ: if key not in config[section]:
hf_token = os.environ['HF_TOKEN'] conf_keys = [k for k in config[section]]
raise ConfigParsingError(f'key \"{key}\" not in {conf_keys}')
elif 'skynet.dgpu' in config: return str(config[section][key])
sub_config = config['skynet.dgpu']
if 'hf_token' in sub_config:
hf_token = sub_config['hf_token']
os.environ['HF_TOKEN'] = hf_token
if 'HF_HOME' in os.environ:
hf_home = os.environ['HF_HOME']
elif 'skynet.dgpu' in config:
sub_config = config['skynet.dgpu']
if 'hf_home' in sub_config:
hf_home = sub_config['hf_home']
os.environ['HF_HOME'] = hf_home
if 'TG_TOKEN' in os.environ:
tg_token = os.environ['TG_TOKEN']
elif 'skynet.telegram' in config:
sub_config = config['skynet.telegram']
if 'token' in sub_config:
tg_token = sub_config['token']
if 'DC_TOKEN' in os.environ:
dc_token = os.environ['DC_TOKEN']
elif 'skynet.discord' in config:
sub_config = config['skynet.discord']
if 'token' in sub_config:
dc_token = sub_config['token']
return hf_home, hf_token, tg_token, dc_token
def load_account_info(
_type: str,
key: str | None = None,
account: str | None = None,
permission: str | None = None,
file_path=DEFAULT_CONFIG_PATH
):
config = load_skynet_ini(file_path=file_path)
type_key = f'skynet.{_type}'
if type_key in config:
sub_config = config[type_key]
if not key and 'key' in sub_config:
key = sub_config['key']
if not account and 'account' in sub_config:
account = sub_config['account']
if not permission and 'permission' in sub_config:
permission = sub_config['permission']
return key, account, permission
def load_endpoint_info(
_type: str,
node_url: str | None = None,
hyperion_url: str | None = None,
ipfs_url: str | None = None,
ipfs_gateway_url: str | None = None,
file_path=DEFAULT_CONFIG_PATH
):
config = load_skynet_ini(file_path=file_path)
type_key = f'skynet.{_type}'
if type_key in config:
sub_config = config[type_key]
if not node_url and 'node_url' in sub_config:
node_url = sub_config['node_url']
if not hyperion_url and 'hyperion_url' in sub_config:
hyperion_url = sub_config['hyperion_url']
if not ipfs_url and 'ipfs_url' in sub_config:
ipfs_url = sub_config['ipfs_url']
if not ipfs_gateway_url and 'ipfs_gateway_url' in sub_config:
ipfs_gateway_url = sub_config['ipfs_gateway_url']
return node_url, hyperion_url, ipfs_gateway_url, ipfs_url

View File

@ -5,18 +5,18 @@ VERSION = '0.1a11'
DOCKER_RUNTIME_CUDA = 'skynet:runtime-cuda' DOCKER_RUNTIME_CUDA = 'skynet:runtime-cuda'
MODELS = { MODELS = {
'prompthero/openjourney': { 'short': 'midj'}, 'prompthero/openjourney': {'short': 'midj', 'mem': 8},
'runwayml/stable-diffusion-v1-5': { 'short': 'stable'}, 'runwayml/stable-diffusion-v1-5': {'short': 'stable', 'mem': 8},
'stabilityai/stable-diffusion-2-1-base': { 'short': 'stable2'}, 'stabilityai/stable-diffusion-2-1-base': {'short': 'stable2', 'mem': 8},
'snowkidy/stable-diffusion-xl-base-0.9': { 'short': 'stablexl0.9'}, 'snowkidy/stable-diffusion-xl-base-0.9': {'short': 'stablexl0.9', 'mem': 24},
'stabilityai/stable-diffusion-xl-base-1.0': { 'short': 'stablexl'}, 'stabilityai/stable-diffusion-xl-base-1.0': {'short': 'stablexl', 'mem': 24},
'Linaqruf/anything-v3.0': { 'short': 'hdanime'}, 'Linaqruf/anything-v3.0': {'short': 'hdanime', 'mem': 8},
'hakurei/waifu-diffusion': { 'short': 'waifu'}, 'hakurei/waifu-diffusion': {'short': 'waifu', 'mem': 8},
'nitrosocke/Ghibli-Diffusion': { 'short': 'ghibli'}, 'nitrosocke/Ghibli-Diffusion': {'short': 'ghibli', 'mem': 8},
'dallinmackay/Van-Gogh-diffusion': { 'short': 'van-gogh'}, 'dallinmackay/Van-Gogh-diffusion': {'short': 'van-gogh', 'mem': 8},
'lambdalabs/sd-pokemon-diffusers': { 'short': 'pokemon'}, 'lambdalabs/sd-pokemon-diffusers': {'short': 'pokemon', 'mem': 8},
'Envvi/Inkpunk-Diffusion': { 'short': 'ink'}, 'Envvi/Inkpunk-Diffusion': {'short': 'ink', 'mem': 8},
'nousr/robo-diffusion': { 'short': 'robot'} 'nousr/robo-diffusion': {'short': 'robot', 'mem': 8}
} }
SHORT_NAMES = [ SHORT_NAMES = [
@ -165,8 +165,7 @@ DEFAULT_UPSCALER = None
DEFAULT_CONFIG_PATH = 'skynet.ini' DEFAULT_CONFIG_PATH = 'skynet.ini'
DEFAULT_INITAL_MODELS = [ DEFAULT_INITAL_MODELS = [
'prompthero/openjourney', 'stabilityai/stable-diffusion-xl-base-1.0'
'runwayml/stable-diffusion-v1-5'
] ]
DATE_FORMAT = '%B the %dth %Y, %H:%M:%S' DATE_FORMAT = '%B the %dth %Y, %H:%M:%S'
@ -189,3 +188,5 @@ DEFAULT_IPFS_LOCAL = 'http://127.0.0.1:5001'
TG_MAX_WIDTH = 1280 TG_MAX_WIDTH = 1280
TG_MAX_HEIGHT = 1280 TG_MAX_HEIGHT = 1280
DEFAULT_SINGLE_CARD_MAP = 'cuda:0'

View File

@ -1,7 +1,5 @@
#!/usr/bin/python #!/usr/bin/python
import trio
from skynet.dgpu.compute import SkynetMM from skynet.dgpu.compute import SkynetMM
from skynet.dgpu.daemon import SkynetDGPUDaemon from skynet.dgpu.daemon import SkynetDGPUDaemon
from skynet.dgpu.network import SkynetGPUConnector from skynet.dgpu.network import SkynetGPUConnector
@ -11,6 +9,4 @@ async def open_dgpu_node(config: dict):
conn = SkynetGPUConnector(config) conn = SkynetGPUConnector(config)
mm = SkynetMM(config) mm = SkynetMM(config)
async with conn.open() as conn: await SkynetDGPUDaemon(mm, conn, config).serve_forever()
await (SkynetDGPUDaemon(mm, conn, config)
.serve_forever())

View File

@ -10,8 +10,6 @@ import logging
import asks import asks
from PIL import Image from PIL import Image
from contextlib import asynccontextmanager as acm
from leap.cleos import CLEOS from leap.cleos import CLEOS
from leap.sugar import Checksum256, Name, asset_from_str from leap.sugar import Checksum256, Name, asset_from_str
from skynet.constants import DEFAULT_DOMAIN from skynet.constants import DEFAULT_DOMAIN
@ -26,7 +24,9 @@ async def failable(fn: partial, ret_fail=None):
except ( except (
asks.errors.RequestTimeout, asks.errors.RequestTimeout,
json.JSONDecodeError asks.errors.BadHttpResponse,
json.JSONDecodeError,
OSError
): ):
return ret_fail return ret_fail
@ -193,11 +193,11 @@ class SkynetGPUConnector:
) )
# IPFS helpers # IPFS helpers
async def publish_on_ipfs(self, raw_img: bytes): async def publish_on_ipfs(self, raw_img: bytes):
Path('ipfs-staging').mkdir(exist_ok=True)
logging.info('publish_on_ipfs') logging.info('publish_on_ipfs')
img = Image.open(io.BytesIO(raw_img)) img = Image.open(io.BytesIO(raw_img))
img.save('ipfs-docker-staging/image.png') img.save('ipfs-staging/image.png')
# check peer connections, reconnect to skynet gateway if not # check peer connections, reconnect to skynet gateway if not
gateway_id = Path(self.ipfs_gateway_url).name gateway_id = Path(self.ipfs_gateway_url).name
@ -205,7 +205,7 @@ class SkynetGPUConnector:
if gateway_id not in [p['Peer'] for p in peers]: if gateway_id not in [p['Peer'] for p in peers]:
await self.ipfs_client.connect(self.ipfs_gateway_url) await self.ipfs_client.connect(self.ipfs_gateway_url)
file_info = await self.ipfs_client.add(Path('ipfs-docker-staging/image.png')) file_info = await self.ipfs_client.add(Path('ipfs-staging/image.png'))
file_cid = file_info['Hash'] file_cid = file_info['Hash']
await self.ipfs_client.pin(file_cid) await self.ipfs_client.pin(file_cid)

View File

@ -1,6 +1,5 @@
#!/usr/bin/python #!/usr/bin/python
import os
import sys import sys
import logging import logging
@ -10,48 +9,14 @@ from contextlib import contextmanager as cm
import docker import docker
from docker.types import Mount from docker.types import Mount
from docker.models.containers import Container
class IPFSDocker:
def __init__(self, container: Container):
self._container = container
def add(self, file: str) -> str:
ec, out = self._container.exec_run(
['ipfs', 'add', '-w', f'/export/{file}', '-Q'])
if ec != 0:
logging.error(out)
assert ec == 0
return out.decode().rstrip()
def pin(self, ipfs_hash: str):
ec, _ = self._container.exec_run(
['ipfs', 'pin', 'add', ipfs_hash])
assert ec == 0
def connect(self, remote_node: str):
ec, out = self._container.exec_run(
['ipfs', 'swarm', 'connect', remote_node])
if ec != 0:
logging.error(out)
assert ec == 0
def check_connect(self):
ec, out = self._container.exec_run(
['ipfs', 'swarm', 'peers'])
if ec != 0:
logging.error(out)
assert ec == 0
return out.splitlines()
@cm @cm
def open_ipfs_node(name='skynet-ipfs', teardown=False): def open_ipfs_node(
name: str = 'skynet-ipfs',
teardown: bool = False,
peers: list[str] = []
):
dclient = docker.from_env() dclient = docker.from_env()
container = None container = None
@ -59,13 +24,9 @@ def open_ipfs_node(name='skynet-ipfs', teardown=False):
container = dclient.containers.get(name) container = dclient.containers.get(name)
except docker.errors.NotFound: except docker.errors.NotFound:
staging_dir = Path().resolve() / 'ipfs-docker-staging'
staging_dir.mkdir(parents=True, exist_ok=True)
data_dir = Path().resolve() / 'ipfs-docker-data' data_dir = Path().resolve() / 'ipfs-docker-data'
data_dir.mkdir(parents=True, exist_ok=True) data_dir.mkdir(parents=True, exist_ok=True)
export_target = '/export'
data_target = '/data/ipfs' data_target = '/data/ipfs'
container = dclient.containers.run( container = dclient.containers.run(
@ -77,19 +38,15 @@ def open_ipfs_node(name='skynet-ipfs', teardown=False):
'5001/tcp': ('127.0.0.1', 5001) '5001/tcp': ('127.0.0.1', 5001)
}, },
mounts=[ mounts=[
Mount(export_target, str(staging_dir), 'bind'),
Mount(data_target, str(data_dir), 'bind') Mount(data_target, str(data_dir), 'bind')
], ],
detach=True, detach=True,
remove=True remove=True
) )
uid, gid = 1000, 1000
if sys.platform != 'win32': if sys.platform != 'win32':
uid = os.getuid()
gid = os.getgid()
ec, out = container.exec_run(['chown', f'{uid}:{gid}', '-R', export_target])
logging.info(out)
assert ec == 0
ec, out = container.exec_run(['chown', f'{uid}:{gid}', '-R', data_target]) ec, out = container.exec_run(['chown', f'{uid}:{gid}', '-R', data_target])
logging.info(out) logging.info(out)
assert ec == 0 assert ec == 0
@ -100,7 +57,13 @@ def open_ipfs_node(name='skynet-ipfs', teardown=False):
if 'Daemon is ready' in log: if 'Daemon is ready' in log:
break break
yield IPFSDocker(container) for peer in peers:
ec, out = container.exec_run(
['ipfs', 'swarm', 'connect', peer])
if ec != 0:
logging.error(out)
yield
if teardown and container: if teardown and container:
container.stop() container.stop()

View File

@ -4,44 +4,12 @@ import json
import time import time
import logging import logging
from datetime import datetime
from contextlib import contextmanager as cm from contextlib import contextmanager as cm
import docker import docker
from pytz import timezone from leap.cleos import CLEOS
from leap.cleos import CLEOS, default_nodeos_image from leap.sugar import get_container, Symbol
from leap.sugar import get_container, Symbol, random_string
@cm
def open_cleos(
node_url: str,
key: str | None
):
vtestnet = None
try:
dclient = docker.from_env()
vtestnet = get_container(
dclient,
default_nodeos_image(),
name=f'skynet-wallet-{random_string(size=8)}',
force_unique=True,
detach=True,
network='host',
remove=True)
cleos = CLEOS(dclient, vtestnet, url=node_url, remote=node_url)
if key:
cleos.setup_wallet(key)
yield cleos
finally:
if vtestnet:
vtestnet.stop()
@cm @cm

View File

@ -1,12 +1,14 @@
#!/usr/bin/python #!/usr/bin/python
import io import io
import logging
import os import os
import time import time
import random import random
from typing import Optional from typing import Optional
from pathlib import Path from pathlib import Path
import asks
import torch import torch
import numpy as np import numpy as np
@ -23,6 +25,8 @@ from diffusers import (
) )
from realesrgan import RealESRGANer from realesrgan import RealESRGANer
from huggingface_hub import login from huggingface_hub import login
from torch.distributions import weibull
import trio
from .constants import MODELS from .constants import MODELS
@ -74,16 +78,25 @@ def pipeline_for(model: str, mem_fraction: float = 1.0, image=False) -> Diffusio
torch.backends.cudnn.benchmark = False torch.backends.cudnn.benchmark = False
torch.use_deterministic_algorithms(True) torch.use_deterministic_algorithms(True)
model_info = MODELS[model]
req_mem = model_info['mem']
mem_gb = torch.cuda.mem_get_info()[1] / (10**9)
over_mem = mem_gb < req_mem
if over_mem:
logging.warn(f'model requires {req_mem} but card has {mem_gb}, model will run slower..')
shortname = model_info['short']
params = { params = {
'torch_dtype': torch.float16, 'torch_dtype': torch.float16,
'safety_checker': None 'safety_checker': None
} }
if model == 'runwayml/stable-diffusion-v1-5': if shortname == 'stable':
params['revision'] = 'fp16' params['revision'] = 'fp16'
if (model == 'stabilityai/stable-diffusion-xl-base-1.0' or if 'xl' in shortname:
model == 'snowkidy/stable-diffusion-xl-base-0.9'):
if image: if image:
pipe_class = StableDiffusionXLImg2ImgPipeline pipe_class = StableDiffusionXLImg2ImgPipeline
else: else:
@ -100,10 +113,16 @@ def pipeline_for(model: str, mem_fraction: float = 1.0, image=False) -> Diffusio
pipe.scheduler = EulerAncestralDiscreteScheduler.from_config( pipe.scheduler = EulerAncestralDiscreteScheduler.from_config(
pipe.scheduler.config) pipe.scheduler.config)
if not image: if over_mem:
pipe.enable_vae_slicing() if not image:
pipe.enable_vae_slicing()
pipe.enable_vae_tiling()
return pipe.to('cuda') pipe.enable_model_cpu_offload()
pipe.enable_xformers_memory_efficient_attention()
return pipe
def txt2img( def txt2img(
@ -209,14 +228,25 @@ def upscale(
convert_from_image_to_cv2(input_img), outscale=4) convert_from_image_to_cv2(input_img), outscale=4)
image = convert_from_cv2_to_image(up_img) image = convert_from_cv2_to_image(up_img)
image.save(output) image.save(output)
async def download_upscaler():
print('downloading upscaler...')
weights_path = Path('weights')
weights_path.mkdir(exist_ok=True)
upscaler_url = 'https://github.com/xinntao/Real-ESRGAN/releases/download/v0.1.0/RealESRGAN_x4plus.pth'
save_path = weights_path / 'RealESRGAN_x4plus.pth'
response = await asks.get(upscaler_url)
with open(save_path, 'wb') as f:
f.write(response.content)
print('done')
def download_all_models(hf_token: str): def download_all_models(hf_token: str):
assert torch.cuda.is_available() assert torch.cuda.is_available()
trio.run(download_upscaler)
login(token=hf_token) login(token=hf_token)
for model in MODELS: for model in MODELS:
print(f'DOWNLOADING {model.upper()}') print(f'DOWNLOADING {model.upper()}')