mirror of https://github.com/skygpu/skynet.git
Add wait time on pinner and autocreate ipfs node directories on startup
parent
a85518152a
commit
303ed7b24f
|
@ -1,6 +1,7 @@
|
|||
#!/usr/bin/python
|
||||
|
||||
import os
|
||||
import time
|
||||
import json
|
||||
import logging
|
||||
import random
|
||||
|
@ -376,21 +377,23 @@ def telegram(
|
|||
|
||||
|
||||
@run.command()
|
||||
@click.option('--loglevel', '-l', default='warning', help='logging level')
|
||||
@click.option('--loglevel', '-l', default='INFO', help='logging level')
|
||||
@click.option(
|
||||
'--container', '-c', default='ipfs_host')
|
||||
@click.option(
|
||||
'--hyperion-url', '-n', default='http://127.0.0.1:42001')
|
||||
def pinner(loglevel, container):
|
||||
def pinner(loglevel, container, hyperion_url):
|
||||
logging.basicConfig(level=loglevel)
|
||||
dclient = docker.from_env()
|
||||
|
||||
container = dclient.containers.get(conatiner)
|
||||
container = dclient.containers.get(container)
|
||||
ipfs_node = IPFSDocker(container)
|
||||
hyperion = HyperionAPI(hyperion_url)
|
||||
|
||||
last_pinned: dict[str, datetime] = {}
|
||||
|
||||
def cleanup_pinned(now: datetime):
|
||||
for cid in last_pinned.keys():
|
||||
for cid in set(last_pinned.keys()):
|
||||
ts = last_pinned[cid]
|
||||
if now - ts > timedelta(minutes=1):
|
||||
del last_pinned[cid]
|
||||
|
@ -398,7 +401,7 @@ def pinner(loglevel, container):
|
|||
try:
|
||||
while True:
|
||||
# get all submits in the last minute
|
||||
now = dateimte.now()
|
||||
now = datetime.now()
|
||||
half_min_ago = now - timedelta(seconds=30)
|
||||
submits = hyperion.get_actions(
|
||||
account='telos.gpu',
|
||||
|
@ -422,7 +425,11 @@ def pinner(loglevel, container):
|
|||
|
||||
ipfs_node.pin(cid)
|
||||
|
||||
logging.info(f'pinned {cid}')
|
||||
|
||||
cleanup_pinned(now)
|
||||
|
||||
time.sleep(1)
|
||||
|
||||
except KeyboardInterrupt:
|
||||
...
|
||||
|
|
|
@ -42,6 +42,14 @@ class IPFSDocker:
|
|||
def open_ipfs_node():
|
||||
dclient = docker.from_env()
|
||||
|
||||
staging_dir = (Path().resolve() / 'ipfs-docker-staging').mkdir(
|
||||
parents=True, exist_ok=True)
|
||||
data_dir = (Path().resolve() / 'ipfs-docker-data').mkdir(
|
||||
parents=True, exist_ok=True)
|
||||
|
||||
export_target = '/export'
|
||||
data_target = '/data/ipfs'
|
||||
|
||||
container = dclient.containers.run(
|
||||
'ipfs/go-ipfs:latest',
|
||||
name='skynet-ipfs',
|
||||
|
@ -51,25 +59,17 @@ def open_ipfs_node():
|
|||
'5001/tcp': ('127.0.0.1', 5001)
|
||||
},
|
||||
mounts=[
|
||||
Mount(
|
||||
'/export',
|
||||
str(Path().resolve() / 'tmp/ipfs-docker-staging'),
|
||||
'bind'
|
||||
),
|
||||
Mount(
|
||||
'/data/ipfs',
|
||||
str(Path().resolve() / 'tmp/ipfs-docker-data'),
|
||||
'bind'
|
||||
)
|
||||
Mount(export_target, str(staging_dir), 'bind'),
|
||||
Mount(data_target, str(data_dir), 'bind')
|
||||
],
|
||||
detach=True,
|
||||
remove=True
|
||||
)
|
||||
uid = os.getuid()
|
||||
gid = os.getgid()
|
||||
ec, out = container.exec_run(['chown', f'{uid}:{gid}', '-R', '/export'])
|
||||
ec, out = container.exec_run(['chown', f'{uid}:{gid}', '-R', export_target])
|
||||
assert ec == 0
|
||||
ec, out = container.exec_run(['chown', f'{uid}:{gid}', '-R', '/data/ipfs'])
|
||||
ec, out = container.exec_run(['chown', f'{uid}:{gid}', '-R', data_target])
|
||||
assert ec == 0
|
||||
try:
|
||||
|
||||
|
|
Loading…
Reference in New Issue