mirror of https://github.com/skygpu/skynet.git
				
				
				
			Add wait time on pinner and autocreate ipfs node directories on startup
							parent
							
								
									a85518152a
								
							
						
					
					
						commit
						303ed7b24f
					
				| 
						 | 
					@ -1,6 +1,7 @@
 | 
				
			||||||
#!/usr/bin/python
 | 
					#!/usr/bin/python
 | 
				
			||||||
 | 
					
 | 
				
			||||||
import os
 | 
					import os
 | 
				
			||||||
 | 
					import time
 | 
				
			||||||
import json
 | 
					import json
 | 
				
			||||||
import logging
 | 
					import logging
 | 
				
			||||||
import random
 | 
					import random
 | 
				
			||||||
| 
						 | 
					@ -376,21 +377,23 @@ def telegram(
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
@run.command()
 | 
					@run.command()
 | 
				
			||||||
@click.option('--loglevel', '-l', default='warning', help='logging level')
 | 
					@click.option('--loglevel', '-l', default='INFO', help='logging level')
 | 
				
			||||||
@click.option(
 | 
					@click.option(
 | 
				
			||||||
    '--container', '-c', default='ipfs_host')
 | 
					    '--container', '-c', default='ipfs_host')
 | 
				
			||||||
@click.option(
 | 
					@click.option(
 | 
				
			||||||
    '--hyperion-url', '-n', default='http://127.0.0.1:42001')
 | 
					    '--hyperion-url', '-n', default='http://127.0.0.1:42001')
 | 
				
			||||||
def pinner(loglevel, container):
 | 
					def pinner(loglevel, container, hyperion_url):
 | 
				
			||||||
 | 
					    logging.basicConfig(level=loglevel)
 | 
				
			||||||
    dclient = docker.from_env()
 | 
					    dclient = docker.from_env()
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    container = dclient.containers.get(conatiner)
 | 
					    container = dclient.containers.get(container)
 | 
				
			||||||
    ipfs_node = IPFSDocker(container)
 | 
					    ipfs_node = IPFSDocker(container)
 | 
				
			||||||
 | 
					    hyperion = HyperionAPI(hyperion_url)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    last_pinned: dict[str, datetime] = {}
 | 
					    last_pinned: dict[str, datetime] = {}
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    def cleanup_pinned(now: datetime):
 | 
					    def cleanup_pinned(now: datetime):
 | 
				
			||||||
        for cid in last_pinned.keys():
 | 
					        for cid in set(last_pinned.keys()):
 | 
				
			||||||
            ts = last_pinned[cid]
 | 
					            ts = last_pinned[cid]
 | 
				
			||||||
            if now - ts > timedelta(minutes=1):
 | 
					            if now - ts > timedelta(minutes=1):
 | 
				
			||||||
                del last_pinned[cid]
 | 
					                del last_pinned[cid]
 | 
				
			||||||
| 
						 | 
					@ -398,7 +401,7 @@ def pinner(loglevel, container):
 | 
				
			||||||
    try:
 | 
					    try:
 | 
				
			||||||
        while True:
 | 
					        while True:
 | 
				
			||||||
            # get all submits in the last minute
 | 
					            # get all submits in the last minute
 | 
				
			||||||
            now = dateimte.now()
 | 
					            now = datetime.now()
 | 
				
			||||||
            half_min_ago = now - timedelta(seconds=30)
 | 
					            half_min_ago = now - timedelta(seconds=30)
 | 
				
			||||||
            submits = hyperion.get_actions(
 | 
					            submits = hyperion.get_actions(
 | 
				
			||||||
                account='telos.gpu',
 | 
					                account='telos.gpu',
 | 
				
			||||||
| 
						 | 
					@ -422,7 +425,11 @@ def pinner(loglevel, container):
 | 
				
			||||||
 | 
					
 | 
				
			||||||
                ipfs_node.pin(cid)
 | 
					                ipfs_node.pin(cid)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					                logging.info(f'pinned {cid}')
 | 
				
			||||||
 | 
					
 | 
				
			||||||
            cleanup_pinned(now)
 | 
					            cleanup_pinned(now)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					            time.sleep(1)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    except KeyboardInterrupt:
 | 
					    except KeyboardInterrupt:
 | 
				
			||||||
        ...
 | 
					        ...
 | 
				
			||||||
| 
						 | 
					
 | 
				
			||||||
| 
						 | 
					@ -42,6 +42,14 @@ class IPFSDocker:
 | 
				
			||||||
def open_ipfs_node():
 | 
					def open_ipfs_node():
 | 
				
			||||||
    dclient = docker.from_env()
 | 
					    dclient = docker.from_env()
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    staging_dir = (Path().resolve() / 'ipfs-docker-staging').mkdir(
 | 
				
			||||||
 | 
					        parents=True, exist_ok=True)
 | 
				
			||||||
 | 
					    data_dir = (Path().resolve() / 'ipfs-docker-data').mkdir(
 | 
				
			||||||
 | 
					        parents=True, exist_ok=True)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    export_target = '/export'
 | 
				
			||||||
 | 
					    data_target = '/data/ipfs'
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    container = dclient.containers.run(
 | 
					    container = dclient.containers.run(
 | 
				
			||||||
        'ipfs/go-ipfs:latest',
 | 
					        'ipfs/go-ipfs:latest',
 | 
				
			||||||
        name='skynet-ipfs',
 | 
					        name='skynet-ipfs',
 | 
				
			||||||
| 
						 | 
					@ -51,25 +59,17 @@ def open_ipfs_node():
 | 
				
			||||||
            '5001/tcp': ('127.0.0.1', 5001)
 | 
					            '5001/tcp': ('127.0.0.1', 5001)
 | 
				
			||||||
        },
 | 
					        },
 | 
				
			||||||
        mounts=[
 | 
					        mounts=[
 | 
				
			||||||
            Mount(
 | 
					            Mount(export_target, str(staging_dir), 'bind'),
 | 
				
			||||||
                '/export',
 | 
					            Mount(data_target, str(data_dir), 'bind')
 | 
				
			||||||
                str(Path().resolve() / 'tmp/ipfs-docker-staging'),
 | 
					 | 
				
			||||||
                'bind'
 | 
					 | 
				
			||||||
            ),
 | 
					 | 
				
			||||||
            Mount(
 | 
					 | 
				
			||||||
                '/data/ipfs',
 | 
					 | 
				
			||||||
                str(Path().resolve() / 'tmp/ipfs-docker-data'),
 | 
					 | 
				
			||||||
                'bind'
 | 
					 | 
				
			||||||
            )
 | 
					 | 
				
			||||||
        ],
 | 
					        ],
 | 
				
			||||||
        detach=True,
 | 
					        detach=True,
 | 
				
			||||||
        remove=True
 | 
					        remove=True
 | 
				
			||||||
    )
 | 
					    )
 | 
				
			||||||
    uid = os.getuid()
 | 
					    uid = os.getuid()
 | 
				
			||||||
    gid = os.getgid()
 | 
					    gid = os.getgid()
 | 
				
			||||||
    ec, out = container.exec_run(['chown', f'{uid}:{gid}', '-R', '/export'])
 | 
					    ec, out = container.exec_run(['chown', f'{uid}:{gid}', '-R', export_target])
 | 
				
			||||||
    assert ec == 0
 | 
					    assert ec == 0
 | 
				
			||||||
    ec, out = container.exec_run(['chown', f'{uid}:{gid}', '-R', '/data/ipfs'])
 | 
					    ec, out = container.exec_run(['chown', f'{uid}:{gid}', '-R', data_target])
 | 
				
			||||||
    assert ec == 0
 | 
					    assert ec == 0
 | 
				
			||||||
    try:
 | 
					    try:
 | 
				
			||||||
 | 
					
 | 
				
			||||||
| 
						 | 
					
 | 
				
			||||||
		Loading…
	
		Reference in New Issue