mirror of https://github.com/skygpu/skynet.git
Merge pull request #23 from guilledk/full_http_ipfs
Async IPFS apis, drop docker on worker & other nice sprites ☠pull/34/merge v0.1a11
commit
82458bb4c8
|
@ -1,4 +1,4 @@
|
|||
from python:3.10.0
|
||||
from python:3.11
|
||||
|
||||
env DEBIAN_FRONTEND=noninteractive
|
||||
|
||||
|
|
|
@ -1,22 +0,0 @@
|
|||
FROM ubuntu:22.04
|
||||
|
||||
ENV DEBIAN_FRONTEND=noninteractive
|
||||
|
||||
RUN apt-get update && apt-get install -y wget
|
||||
|
||||
# install eosio tools
|
||||
RUN wget https://github.com/AntelopeIO/leap/releases/download/v4.0.1/leap_4.0.1-ubuntu22.04_amd64.deb
|
||||
|
||||
RUN apt-get install -y ./leap_4.0.1-ubuntu22.04_amd64.deb
|
||||
|
||||
RUN mkdir -p /root/nodeos
|
||||
WORKDIR /root/nodeos
|
||||
COPY config.ini config.ini
|
||||
COPY contracts contracts
|
||||
COPY genesis genesis
|
||||
|
||||
EXPOSE 42000
|
||||
EXPOSE 29876
|
||||
EXPOSE 39999
|
||||
|
||||
CMD sleep 9999999999
|
|
@ -1,52 +0,0 @@
|
|||
agent-name = Telos Skynet Testnet
|
||||
|
||||
wasm-runtime = eos-vm-jit
|
||||
eos-vm-oc-compile-threads = 4
|
||||
eos-vm-oc-enable = true
|
||||
|
||||
chain-state-db-size-mb = 65536
|
||||
enable-account-queries = true
|
||||
|
||||
http-server-address = 0.0.0.0:42000
|
||||
access-control-allow-origin = *
|
||||
contracts-console = true
|
||||
http-validate-host = false
|
||||
p2p-listen-endpoint = 0.0.0.0:29876
|
||||
p2p-server-address = 0.0.0.0:29876
|
||||
verbose-http-errors = true
|
||||
|
||||
state-history-endpoint = 0.0.0.0:39999
|
||||
trace-history = true
|
||||
chain-state-history = true
|
||||
trace-history-debug-mode = true
|
||||
state-history-dir = state-history
|
||||
|
||||
sync-fetch-span = 1600
|
||||
max-clients = 250
|
||||
|
||||
signature-provider = EOS5fLreY5Zq5owBhmNJTgQaLqQ4ufzXSTpStQakEyfxNFuUEgNs1=KEY:5JnvSc6pewpHHuUHwvbJopsew6AKwiGnexwDRc2Pj2tbdw6iML9
|
||||
|
||||
disable-subjective-billing = true
|
||||
max-transaction-time = 500
|
||||
read-only-read-window-time-us = 600000
|
||||
|
||||
abi-serializer-max-time-ms = 2000000
|
||||
|
||||
p2p-max-nodes-per-host = 1
|
||||
|
||||
connection-cleanup-period = 30
|
||||
allowed-connection = any
|
||||
http-max-response-time-ms = 100000
|
||||
max-body-size = 10000000
|
||||
|
||||
enable-stale-production = true
|
||||
|
||||
|
||||
plugin = eosio::http_plugin
|
||||
plugin = eosio::chain_plugin
|
||||
plugin = eosio::chain_api_plugin
|
||||
plugin = eosio::net_api_plugin
|
||||
plugin = eosio::net_plugin
|
||||
plugin = eosio::producer_plugin
|
||||
plugin = eosio::producer_api_plugin
|
||||
plugin = eosio::state_history_plugin
|
|
@ -1,360 +0,0 @@
|
|||
{
|
||||
"____comment": "This file was generated with eosio-abigen. DO NOT EDIT Thu Apr 14 07:49:43 2022",
|
||||
"version": "eosio::abi/1.1",
|
||||
"structs": [
|
||||
{
|
||||
"name": "action",
|
||||
"base": "",
|
||||
"fields": [
|
||||
{
|
||||
"name": "account",
|
||||
"type": "name"
|
||||
},
|
||||
{
|
||||
"name": "name",
|
||||
"type": "name"
|
||||
},
|
||||
{
|
||||
"name": "authorization",
|
||||
"type": "permission_level[]"
|
||||
},
|
||||
{
|
||||
"name": "data",
|
||||
"type": "bytes"
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"name": "approval",
|
||||
"base": "",
|
||||
"fields": [
|
||||
{
|
||||
"name": "level",
|
||||
"type": "permission_level"
|
||||
},
|
||||
{
|
||||
"name": "time",
|
||||
"type": "time_point"
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"name": "approvals_info",
|
||||
"base": "",
|
||||
"fields": [
|
||||
{
|
||||
"name": "version",
|
||||
"type": "uint8"
|
||||
},
|
||||
{
|
||||
"name": "proposal_name",
|
||||
"type": "name"
|
||||
},
|
||||
{
|
||||
"name": "requested_approvals",
|
||||
"type": "approval[]"
|
||||
},
|
||||
{
|
||||
"name": "provided_approvals",
|
||||
"type": "approval[]"
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"name": "approve",
|
||||
"base": "",
|
||||
"fields": [
|
||||
{
|
||||
"name": "proposer",
|
||||
"type": "name"
|
||||
},
|
||||
{
|
||||
"name": "proposal_name",
|
||||
"type": "name"
|
||||
},
|
||||
{
|
||||
"name": "level",
|
||||
"type": "permission_level"
|
||||
},
|
||||
{
|
||||
"name": "proposal_hash",
|
||||
"type": "checksum256$"
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"name": "cancel",
|
||||
"base": "",
|
||||
"fields": [
|
||||
{
|
||||
"name": "proposer",
|
||||
"type": "name"
|
||||
},
|
||||
{
|
||||
"name": "proposal_name",
|
||||
"type": "name"
|
||||
},
|
||||
{
|
||||
"name": "canceler",
|
||||
"type": "name"
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"name": "exec",
|
||||
"base": "",
|
||||
"fields": [
|
||||
{
|
||||
"name": "proposer",
|
||||
"type": "name"
|
||||
},
|
||||
{
|
||||
"name": "proposal_name",
|
||||
"type": "name"
|
||||
},
|
||||
{
|
||||
"name": "executer",
|
||||
"type": "name"
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"name": "extension",
|
||||
"base": "",
|
||||
"fields": [
|
||||
{
|
||||
"name": "type",
|
||||
"type": "uint16"
|
||||
},
|
||||
{
|
||||
"name": "data",
|
||||
"type": "bytes"
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"name": "invalidate",
|
||||
"base": "",
|
||||
"fields": [
|
||||
{
|
||||
"name": "account",
|
||||
"type": "name"
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"name": "invalidation",
|
||||
"base": "",
|
||||
"fields": [
|
||||
{
|
||||
"name": "account",
|
||||
"type": "name"
|
||||
},
|
||||
{
|
||||
"name": "last_invalidation_time",
|
||||
"type": "time_point"
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"name": "old_approvals_info",
|
||||
"base": "",
|
||||
"fields": [
|
||||
{
|
||||
"name": "proposal_name",
|
||||
"type": "name"
|
||||
},
|
||||
{
|
||||
"name": "requested_approvals",
|
||||
"type": "permission_level[]"
|
||||
},
|
||||
{
|
||||
"name": "provided_approvals",
|
||||
"type": "permission_level[]"
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"name": "permission_level",
|
||||
"base": "",
|
||||
"fields": [
|
||||
{
|
||||
"name": "actor",
|
||||
"type": "name"
|
||||
},
|
||||
{
|
||||
"name": "permission",
|
||||
"type": "name"
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"name": "proposal",
|
||||
"base": "",
|
||||
"fields": [
|
||||
{
|
||||
"name": "proposal_name",
|
||||
"type": "name"
|
||||
},
|
||||
{
|
||||
"name": "packed_transaction",
|
||||
"type": "bytes"
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"name": "propose",
|
||||
"base": "",
|
||||
"fields": [
|
||||
{
|
||||
"name": "proposer",
|
||||
"type": "name"
|
||||
},
|
||||
{
|
||||
"name": "proposal_name",
|
||||
"type": "name"
|
||||
},
|
||||
{
|
||||
"name": "requested",
|
||||
"type": "permission_level[]"
|
||||
},
|
||||
{
|
||||
"name": "trx",
|
||||
"type": "transaction"
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"name": "transaction",
|
||||
"base": "transaction_header",
|
||||
"fields": [
|
||||
{
|
||||
"name": "context_free_actions",
|
||||
"type": "action[]"
|
||||
},
|
||||
{
|
||||
"name": "actions",
|
||||
"type": "action[]"
|
||||
},
|
||||
{
|
||||
"name": "transaction_extensions",
|
||||
"type": "extension[]"
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"name": "transaction_header",
|
||||
"base": "",
|
||||
"fields": [
|
||||
{
|
||||
"name": "expiration",
|
||||
"type": "time_point_sec"
|
||||
},
|
||||
{
|
||||
"name": "ref_block_num",
|
||||
"type": "uint16"
|
||||
},
|
||||
{
|
||||
"name": "ref_block_prefix",
|
||||
"type": "uint32"
|
||||
},
|
||||
{
|
||||
"name": "max_net_usage_words",
|
||||
"type": "varuint32"
|
||||
},
|
||||
{
|
||||
"name": "max_cpu_usage_ms",
|
||||
"type": "uint8"
|
||||
},
|
||||
{
|
||||
"name": "delay_sec",
|
||||
"type": "varuint32"
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"name": "unapprove",
|
||||
"base": "",
|
||||
"fields": [
|
||||
{
|
||||
"name": "proposer",
|
||||
"type": "name"
|
||||
},
|
||||
{
|
||||
"name": "proposal_name",
|
||||
"type": "name"
|
||||
},
|
||||
{
|
||||
"name": "level",
|
||||
"type": "permission_level"
|
||||
}
|
||||
]
|
||||
}
|
||||
],
|
||||
"types": [],
|
||||
"actions": [
|
||||
{
|
||||
"name": "approve",
|
||||
"type": "approve",
|
||||
"ricardian_contract": ""
|
||||
},
|
||||
{
|
||||
"name": "cancel",
|
||||
"type": "cancel",
|
||||
"ricardian_contract": ""
|
||||
},
|
||||
{
|
||||
"name": "exec",
|
||||
"type": "exec",
|
||||
"ricardian_contract": ""
|
||||
},
|
||||
{
|
||||
"name": "invalidate",
|
||||
"type": "invalidate",
|
||||
"ricardian_contract": ""
|
||||
},
|
||||
{
|
||||
"name": "propose",
|
||||
"type": "propose",
|
||||
"ricardian_contract": ""
|
||||
},
|
||||
{
|
||||
"name": "unapprove",
|
||||
"type": "unapprove",
|
||||
"ricardian_contract": ""
|
||||
}
|
||||
],
|
||||
"tables": [
|
||||
{
|
||||
"name": "approvals",
|
||||
"type": "old_approvals_info",
|
||||
"index_type": "i64",
|
||||
"key_names": [],
|
||||
"key_types": []
|
||||
},
|
||||
{
|
||||
"name": "approvals2",
|
||||
"type": "approvals_info",
|
||||
"index_type": "i64",
|
||||
"key_names": [],
|
||||
"key_types": []
|
||||
},
|
||||
{
|
||||
"name": "invals",
|
||||
"type": "invalidation",
|
||||
"index_type": "i64",
|
||||
"key_names": [],
|
||||
"key_types": []
|
||||
},
|
||||
{
|
||||
"name": "proposal",
|
||||
"type": "proposal",
|
||||
"index_type": "i64",
|
||||
"key_names": [],
|
||||
"key_types": []
|
||||
}
|
||||
],
|
||||
"ricardian_clauses": [],
|
||||
"variants": [],
|
||||
"abi_extensions": []
|
||||
}
|
Binary file not shown.
File diff suppressed because one or more lines are too long
Binary file not shown.
|
@ -1,185 +0,0 @@
|
|||
{
|
||||
"____comment": "This file was generated with eosio-abigen. DO NOT EDIT ",
|
||||
"version": "eosio::abi/1.1",
|
||||
"types": [],
|
||||
"structs": [
|
||||
{
|
||||
"name": "account",
|
||||
"base": "",
|
||||
"fields": [
|
||||
{
|
||||
"name": "balance",
|
||||
"type": "asset"
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"name": "close",
|
||||
"base": "",
|
||||
"fields": [
|
||||
{
|
||||
"name": "owner",
|
||||
"type": "name"
|
||||
},
|
||||
{
|
||||
"name": "symbol",
|
||||
"type": "symbol"
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"name": "create",
|
||||
"base": "",
|
||||
"fields": [
|
||||
{
|
||||
"name": "issuer",
|
||||
"type": "name"
|
||||
},
|
||||
{
|
||||
"name": "maximum_supply",
|
||||
"type": "asset"
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"name": "currency_stats",
|
||||
"base": "",
|
||||
"fields": [
|
||||
{
|
||||
"name": "supply",
|
||||
"type": "asset"
|
||||
},
|
||||
{
|
||||
"name": "max_supply",
|
||||
"type": "asset"
|
||||
},
|
||||
{
|
||||
"name": "issuer",
|
||||
"type": "name"
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"name": "issue",
|
||||
"base": "",
|
||||
"fields": [
|
||||
{
|
||||
"name": "to",
|
||||
"type": "name"
|
||||
},
|
||||
{
|
||||
"name": "quantity",
|
||||
"type": "asset"
|
||||
},
|
||||
{
|
||||
"name": "memo",
|
||||
"type": "string"
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"name": "open",
|
||||
"base": "",
|
||||
"fields": [
|
||||
{
|
||||
"name": "owner",
|
||||
"type": "name"
|
||||
},
|
||||
{
|
||||
"name": "symbol",
|
||||
"type": "symbol"
|
||||
},
|
||||
{
|
||||
"name": "ram_payer",
|
||||
"type": "name"
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"name": "retire",
|
||||
"base": "",
|
||||
"fields": [
|
||||
{
|
||||
"name": "quantity",
|
||||
"type": "asset"
|
||||
},
|
||||
{
|
||||
"name": "memo",
|
||||
"type": "string"
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"name": "transfer",
|
||||
"base": "",
|
||||
"fields": [
|
||||
{
|
||||
"name": "from",
|
||||
"type": "name"
|
||||
},
|
||||
{
|
||||
"name": "to",
|
||||
"type": "name"
|
||||
},
|
||||
{
|
||||
"name": "quantity",
|
||||
"type": "asset"
|
||||
},
|
||||
{
|
||||
"name": "memo",
|
||||
"type": "string"
|
||||
}
|
||||
]
|
||||
}
|
||||
],
|
||||
"actions": [
|
||||
{
|
||||
"name": "close",
|
||||
"type": "close",
|
||||
"ricardian_contract": "---\nspec_version: \"0.2.0\"\ntitle: Close Token Balance\nsummary: 'Close {{nowrap owner}}’s zero quantity balance'\nicon: http://127.0.0.1/ricardian_assets/eosio.contracts/icons/token.png#207ff68b0406eaa56618b08bda81d6a0954543f36adc328ab3065f31a5c5d654\n---\n\n{{owner}} agrees to close their zero quantity balance for the {{symbol_to_symbol_code symbol}} token.\n\nRAM will be refunded to the RAM payer of the {{symbol_to_symbol_code symbol}} token balance for {{owner}}."
|
||||
},
|
||||
{
|
||||
"name": "create",
|
||||
"type": "create",
|
||||
"ricardian_contract": "---\nspec_version: \"0.2.0\"\ntitle: Create New Token\nsummary: 'Create a new token'\nicon: http://127.0.0.1/ricardian_assets/eosio.contracts/icons/token.png#207ff68b0406eaa56618b08bda81d6a0954543f36adc328ab3065f31a5c5d654\n---\n\n{{$action.account}} agrees to create a new token with symbol {{asset_to_symbol_code maximum_supply}} to be managed by {{issuer}}.\n\nThis action will not result any any tokens being issued into circulation.\n\n{{issuer}} will be allowed to issue tokens into circulation, up to a maximum supply of {{maximum_supply}}.\n\nRAM will deducted from {{$action.account}}’s resources to create the necessary records."
|
||||
},
|
||||
{
|
||||
"name": "issue",
|
||||
"type": "issue",
|
||||
"ricardian_contract": "---\nspec_version: \"0.2.0\"\ntitle: Issue Tokens into Circulation\nsummary: 'Issue {{nowrap quantity}} into circulation and transfer into {{nowrap to}}’s account'\nicon: http://127.0.0.1/ricardian_assets/eosio.contracts/icons/token.png#207ff68b0406eaa56618b08bda81d6a0954543f36adc328ab3065f31a5c5d654\n---\n\nThe token manager agrees to issue {{quantity}} into circulation, and transfer it into {{to}}’s account.\n\n{{#if memo}}There is a memo attached to the transfer stating:\n{{memo}}\n{{/if}}\n\nIf {{to}} does not have a balance for {{asset_to_symbol_code quantity}}, or the token manager does not have a balance for {{asset_to_symbol_code quantity}}, the token manager will be designated as the RAM payer of the {{asset_to_symbol_code quantity}} token balance for {{to}}. As a result, RAM will be deducted from the token manager’s resources to create the necessary records.\n\nThis action does not allow the total quantity to exceed the max allowed supply of the token."
|
||||
},
|
||||
{
|
||||
"name": "open",
|
||||
"type": "open",
|
||||
"ricardian_contract": "---\nspec_version: \"0.2.0\"\ntitle: Open Token Balance\nsummary: 'Open a zero quantity balance for {{nowrap owner}}'\nicon: http://127.0.0.1/ricardian_assets/eosio.contracts/icons/token.png#207ff68b0406eaa56618b08bda81d6a0954543f36adc328ab3065f31a5c5d654\n---\n\n{{ram_payer}} agrees to establish a zero quantity balance for {{owner}} for the {{symbol_to_symbol_code symbol}} token.\n\nIf {{owner}} does not have a balance for {{symbol_to_symbol_code symbol}}, {{ram_payer}} will be designated as the RAM payer of the {{symbol_to_symbol_code symbol}} token balance for {{owner}}. As a result, RAM will be deducted from {{ram_payer}}’s resources to create the necessary records."
|
||||
},
|
||||
{
|
||||
"name": "retire",
|
||||
"type": "retire",
|
||||
"ricardian_contract": "---\nspec_version: \"0.2.0\"\ntitle: Remove Tokens from Circulation\nsummary: 'Remove {{nowrap quantity}} from circulation'\nicon: http://127.0.0.1/ricardian_assets/eosio.contracts/icons/token.png#207ff68b0406eaa56618b08bda81d6a0954543f36adc328ab3065f31a5c5d654\n---\n\nThe token manager agrees to remove {{quantity}} from circulation, taken from their own account.\n\n{{#if memo}} There is a memo attached to the action stating:\n{{memo}}\n{{/if}}"
|
||||
},
|
||||
{
|
||||
"name": "transfer",
|
||||
"type": "transfer",
|
||||
"ricardian_contract": "---\nspec_version: \"0.2.0\"\ntitle: Transfer Tokens\nsummary: 'Send {{nowrap quantity}} from {{nowrap from}} to {{nowrap to}}'\nicon: http://127.0.0.1/ricardian_assets/eosio.contracts/icons/transfer.png#5dfad0df72772ee1ccc155e670c1d124f5c5122f1d5027565df38b418042d1dd\n---\n\n{{from}} agrees to send {{quantity}} to {{to}}.\n\n{{#if memo}}There is a memo attached to the transfer stating:\n{{memo}}\n{{/if}}\n\nIf {{from}} is not already the RAM payer of their {{asset_to_symbol_code quantity}} token balance, {{from}} will be designated as such. As a result, RAM will be deducted from {{from}}’s resources to refund the original RAM payer.\n\nIf {{to}} does not have a balance for {{asset_to_symbol_code quantity}}, {{from}} will be designated as the RAM payer of the {{asset_to_symbol_code quantity}} token balance for {{to}}. As a result, RAM will be deducted from {{from}}’s resources to create the necessary records."
|
||||
}
|
||||
],
|
||||
"tables": [
|
||||
{
|
||||
"name": "accounts",
|
||||
"type": "account",
|
||||
"index_type": "i64",
|
||||
"key_names": [],
|
||||
"key_types": []
|
||||
},
|
||||
{
|
||||
"name": "stat",
|
||||
"type": "currency_stats",
|
||||
"index_type": "i64",
|
||||
"key_names": [],
|
||||
"key_types": []
|
||||
}
|
||||
],
|
||||
"ricardian_clauses": [],
|
||||
"variants": []
|
||||
}
|
Binary file not shown.
|
@ -1,130 +0,0 @@
|
|||
{
|
||||
"____comment": "This file was generated with eosio-abigen. DO NOT EDIT Thu Apr 14 07:49:40 2022",
|
||||
"version": "eosio::abi/1.1",
|
||||
"structs": [
|
||||
{
|
||||
"name": "action",
|
||||
"base": "",
|
||||
"fields": [
|
||||
{
|
||||
"name": "account",
|
||||
"type": "name"
|
||||
},
|
||||
{
|
||||
"name": "name",
|
||||
"type": "name"
|
||||
},
|
||||
{
|
||||
"name": "authorization",
|
||||
"type": "permission_level[]"
|
||||
},
|
||||
{
|
||||
"name": "data",
|
||||
"type": "bytes"
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"name": "exec",
|
||||
"base": "",
|
||||
"fields": [
|
||||
{
|
||||
"name": "executer",
|
||||
"type": "name"
|
||||
},
|
||||
{
|
||||
"name": "trx",
|
||||
"type": "transaction"
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"name": "extension",
|
||||
"base": "",
|
||||
"fields": [
|
||||
{
|
||||
"name": "type",
|
||||
"type": "uint16"
|
||||
},
|
||||
{
|
||||
"name": "data",
|
||||
"type": "bytes"
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"name": "permission_level",
|
||||
"base": "",
|
||||
"fields": [
|
||||
{
|
||||
"name": "actor",
|
||||
"type": "name"
|
||||
},
|
||||
{
|
||||
"name": "permission",
|
||||
"type": "name"
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"name": "transaction",
|
||||
"base": "transaction_header",
|
||||
"fields": [
|
||||
{
|
||||
"name": "context_free_actions",
|
||||
"type": "action[]"
|
||||
},
|
||||
{
|
||||
"name": "actions",
|
||||
"type": "action[]"
|
||||
},
|
||||
{
|
||||
"name": "transaction_extensions",
|
||||
"type": "extension[]"
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"name": "transaction_header",
|
||||
"base": "",
|
||||
"fields": [
|
||||
{
|
||||
"name": "expiration",
|
||||
"type": "time_point_sec"
|
||||
},
|
||||
{
|
||||
"name": "ref_block_num",
|
||||
"type": "uint16"
|
||||
},
|
||||
{
|
||||
"name": "ref_block_prefix",
|
||||
"type": "uint32"
|
||||
},
|
||||
{
|
||||
"name": "max_net_usage_words",
|
||||
"type": "varuint32"
|
||||
},
|
||||
{
|
||||
"name": "max_cpu_usage_ms",
|
||||
"type": "uint8"
|
||||
},
|
||||
{
|
||||
"name": "delay_sec",
|
||||
"type": "varuint32"
|
||||
}
|
||||
]
|
||||
}
|
||||
],
|
||||
"types": [],
|
||||
"actions": [
|
||||
{
|
||||
"name": "exec",
|
||||
"type": "exec",
|
||||
"ricardian_contract": ""
|
||||
}
|
||||
],
|
||||
"tables": [],
|
||||
"ricardian_clauses": [],
|
||||
"variants": [],
|
||||
"abi_extensions": []
|
||||
}
|
Binary file not shown.
File diff suppressed because it is too large
Load Diff
Binary file not shown.
|
@ -1,25 +0,0 @@
|
|||
{
|
||||
"initial_timestamp": "2023-05-22T00:00:00.000",
|
||||
"initial_key": "EOS5fLreY5Zq5owBhmNJTgQaLqQ4ufzXSTpStQakEyfxNFuUEgNs1",
|
||||
"initial_configuration": {
|
||||
"max_block_net_usage": 1048576,
|
||||
"target_block_net_usage_pct": 1000,
|
||||
"max_transaction_net_usage": 1048575,
|
||||
"base_per_transaction_net_usage": 12,
|
||||
"net_usage_leeway": 500,
|
||||
"context_free_discount_net_usage_num": 20,
|
||||
"context_free_discount_net_usage_den": 100,
|
||||
"max_block_cpu_usage": 200000,
|
||||
"target_block_cpu_usage_pct": 1000,
|
||||
"max_transaction_cpu_usage": 150000,
|
||||
"min_transaction_cpu_usage": 100,
|
||||
"max_transaction_lifetime": 3600,
|
||||
"deferred_trx_expiration_window": 600,
|
||||
"max_transaction_delay": 3888000,
|
||||
"max_inline_action_size": 4096,
|
||||
"max_inline_action_depth": 4,
|
||||
"max_authority_depth": 6
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -1,3 +1,4 @@
|
|||
[pytest]
|
||||
log_cli = True
|
||||
log_level = info
|
||||
trio_mode = True
|
||||
|
|
|
@ -3,9 +3,10 @@ account = testworkerX
|
|||
permission = active
|
||||
key = 5Xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx
|
||||
|
||||
node_url = https://skynet.ancap.tech
|
||||
hyperion_url = https://skynet.ancap.tech
|
||||
ipfs_url = /ip4/169.197.140.154/tcp/4001/p2p/12D3KooWKWogLFNEcNNMKnzU7Snrnuj84RZdMBg3sLiQSQc51oEv
|
||||
node_url = https://testnet.skygpu.net
|
||||
hyperion_url = https://testnet.skygpu.net
|
||||
ipfs_gateway_url = /ip4/169.197.140.154/tcp/4001/p2p/12D3KooWKWogLFNEcNNMKnzU7Snrnuj84RZdMBg3sLiQSQc51oEv
|
||||
ipfs_url = http://127.0.0.1:5001
|
||||
|
||||
hf_home = hf_home
|
||||
hf_token = hf_XxXxXxXxXxXxXxXxXxXxXxXxXxXxXxXxXx
|
||||
|
@ -17,9 +18,10 @@ account = telegram
|
|||
permission = active
|
||||
key = 5Xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx
|
||||
|
||||
node_url = https://skynet.ancap.tech
|
||||
hyperion_url = https://skynet.ancap.tech
|
||||
ipfs_url = /ip4/169.197.140.154/tcp/4001/p2p/12D3KooWKWogLFNEcNNMKnzU7Snrnuj84RZdMBg3sLiQSQc51oEv
|
||||
node_url = https://testnet.skygpu.net
|
||||
hyperion_url = https://testnet.skygpu.net
|
||||
ipfs_gateway_url = /ip4/169.197.140.154/tcp/4001/p2p/12D3KooWKWogLFNEcNNMKnzU7Snrnuj84RZdMBg3sLiQSQc51oEv
|
||||
ipfs_url = http://127.0.0.1:5001
|
||||
|
||||
token = XXXXXXXXXX:xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx
|
||||
|
||||
|
@ -28,8 +30,9 @@ account = discord
|
|||
permission = active
|
||||
key = 5Xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx
|
||||
|
||||
node_url = https://skynet.ancap.tech
|
||||
hyperion_url = https://skynet.ancap.tech
|
||||
ipfs_url = /ip4/169.197.140.154/tcp/4001/p2p/12D3KooWKWogLFNEcNNMKnzU7Snrnuj84RZdMBg3sLiQSQc51oEv
|
||||
node_url = https://testnet.skygpu.net
|
||||
hyperion_url = https://testnet.skygpu.net
|
||||
ipfs_gateway_url = /ip4/169.197.140.154/tcp/4001/p2p/12D3KooWKWogLFNEcNNMKnzU7Snrnuj84RZdMBg3sLiQSQc51oEv
|
||||
ipfs_url = http://127.0.0.1:5001
|
||||
|
||||
token = XXXXXXXXXX:xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx
|
||||
|
|
|
@ -16,7 +16,7 @@ from leap.cleos import CLEOS
|
|||
from leap.sugar import collect_stdout, Name, asset_from_str
|
||||
from leap.hyperion import HyperionAPI
|
||||
|
||||
from skynet.ipfs import IPFSHTTP
|
||||
from skynet.ipfs import AsyncIPFSHTTP
|
||||
|
||||
|
||||
from .db import open_new_database
|
||||
|
@ -125,8 +125,7 @@ def enqueue(
|
|||
key, account, permission = load_account_info(
|
||||
'user', key, account, permission)
|
||||
|
||||
node_url, _, _ = load_endpoint_info(
|
||||
'user', node_url, None, None)
|
||||
node_url, _, _, _ = load_endpoint_info('user', node_url=node_url)
|
||||
|
||||
with open_cleos(node_url, key=key) as cleos:
|
||||
async def enqueue_n_jobs():
|
||||
|
@ -176,8 +175,7 @@ def clean(
|
|||
key, account, permission = load_account_info(
|
||||
'user', key, account, permission)
|
||||
|
||||
node_url, _, _ = load_endpoint_info(
|
||||
'user', node_url, None, None)
|
||||
node_url, _, _, _ = load_endpoint_info('user', node_url=node_url)
|
||||
|
||||
logging.basicConfig(level=loglevel)
|
||||
cleos = CLEOS(None, None, url=node_url, remote=node_url)
|
||||
|
@ -195,8 +193,7 @@ def clean(
|
|||
@click.option(
|
||||
'--node-url', '-n', default='https://skynet.ancap.tech')
|
||||
def queue(node_url: str):
|
||||
node_url, _, _ = load_endpoint_info(
|
||||
'user', node_url, None, None)
|
||||
node_url, _, _, _ = load_endpoint_info('user', node_url=node_url)
|
||||
resp = requests.post(
|
||||
f'{node_url}/v1/chain/get_table_rows',
|
||||
json={
|
||||
|
@ -213,8 +210,7 @@ def queue(node_url: str):
|
|||
'--node-url', '-n', default='https://skynet.ancap.tech')
|
||||
@click.argument('request-id')
|
||||
def status(node_url: str, request_id: int):
|
||||
node_url, _, _ = load_endpoint_info(
|
||||
'user', node_url, None, None)
|
||||
node_url, _, _, _ = load_endpoint_info('user', node_url=node_url)
|
||||
resp = requests.post(
|
||||
f'{node_url}/v1/chain/get_table_rows',
|
||||
json={
|
||||
|
@ -246,18 +242,20 @@ def dequeue(
|
|||
key, account, permission = load_account_info(
|
||||
'user', key, account, permission)
|
||||
|
||||
node_url, _, _ = load_endpoint_info(
|
||||
'user', node_url, None, None)
|
||||
node_url, _, _, _ = load_endpoint_info('user', node_url=node_url)
|
||||
|
||||
with open_cleos(node_url, key=key) as cleos:
|
||||
res = trio.run(cleos.a_push_action,
|
||||
cleos = CLEOS(None, None, url=node_url, remote=node_url)
|
||||
res = trio.run(
|
||||
partial(
|
||||
cleos.a_push_action,
|
||||
'telos.gpu',
|
||||
'dequeue',
|
||||
{
|
||||
'user': Name(account),
|
||||
'request_id': int(request_id),
|
||||
},
|
||||
account, key, permission,
|
||||
account, key, permission=permission
|
||||
)
|
||||
)
|
||||
print(res)
|
||||
|
||||
|
@ -285,18 +283,19 @@ def config(
|
|||
):
|
||||
key, account, permission = load_account_info(
|
||||
'user', key, account, permission)
|
||||
|
||||
node_url, _, _ = load_endpoint_info(
|
||||
'user', node_url, None, None)
|
||||
with open_cleos(node_url, key=key) as cleos:
|
||||
res = trio.run(cleos.a_push_action,
|
||||
node_url, _, _, _ = load_endpoint_info('user', node_url=node_url)
|
||||
cleos = CLEOS(None, None, url=node_url, remote=node_url)
|
||||
res = trio.run(
|
||||
partial(
|
||||
cleos.a_push_action,
|
||||
'telos.gpu',
|
||||
'config',
|
||||
{
|
||||
'token_contract': token_contract,
|
||||
'token_symbol': token_symbol,
|
||||
},
|
||||
account, key, permission,
|
||||
account, key, permission=permission
|
||||
)
|
||||
)
|
||||
print(res)
|
||||
|
||||
|
@ -321,12 +320,12 @@ def deposit(
|
|||
key, account, permission = load_account_info(
|
||||
'user', key, account, permission)
|
||||
|
||||
node_url, _, _ = load_endpoint_info(
|
||||
'user', node_url, None, None)
|
||||
node_url, _, _, _ = load_endpoint_info('user', node_url=node_url)
|
||||
|
||||
with open_cleos(node_url, key=key) as cleos:
|
||||
res = trio.run(cleos.a_push_action,
|
||||
'eosio.token',
|
||||
res = trio.run(
|
||||
partial(
|
||||
cleos.a_push_action,
|
||||
'telos.gpu',
|
||||
'transfer',
|
||||
{
|
||||
'sender': Name(account),
|
||||
|
@ -334,7 +333,8 @@ def deposit(
|
|||
'amount': asset_from_str(quantity),
|
||||
'memo': f'{account} transferred {quantity} to telos.gpu'
|
||||
},
|
||||
account, key, permission,
|
||||
account, key, permission=permission
|
||||
)
|
||||
)
|
||||
print(res)
|
||||
|
||||
|
@ -388,7 +388,9 @@ def dgpu(
|
|||
@click.option(
|
||||
'--node-url', '-n', default=f'https://testnet.{DEFAULT_DOMAIN}')
|
||||
@click.option(
|
||||
'--ipfs-url', '-i', default=DEFAULT_IPFS_REMOTE)
|
||||
'--ipfs-url', '-i', default=DEFAULT_IPFS_LOCAL)
|
||||
@click.option(
|
||||
'--ipfs-gateway-url', '-I', default=None)
|
||||
@click.option(
|
||||
'--db-host', '-h', default='localhost:5432')
|
||||
@click.option(
|
||||
|
@ -402,6 +404,7 @@ def telegram(
|
|||
key: str | None,
|
||||
hyperion_url: str,
|
||||
ipfs_url: str,
|
||||
ipfs_gateway_url: str,
|
||||
node_url: str,
|
||||
db_host: str,
|
||||
db_user: str,
|
||||
|
@ -414,8 +417,8 @@ def telegram(
|
|||
key, account, permission = load_account_info(
|
||||
'telegram', key, account, permission)
|
||||
|
||||
node_url, _, ipfs_url = load_endpoint_info(
|
||||
'telegram', node_url, None, None)
|
||||
node_url, _, ipfs_gateway_url, ipfs_url = load_endpoint_info(
|
||||
'telegram', node_url=node_url, ipfs_gateway_url=ipfs_gateway_url)
|
||||
|
||||
async def _async_main():
|
||||
frontend = SkynetTelegramFrontend(
|
||||
|
@ -425,7 +428,8 @@ def telegram(
|
|||
node_url,
|
||||
hyperion_url,
|
||||
db_host, db_user, db_pass,
|
||||
remote_ipfs_node=ipfs_url,
|
||||
ipfs_url,
|
||||
remote_ipfs_node=ipfs_gateway_url,
|
||||
key=key
|
||||
)
|
||||
|
||||
|
@ -449,7 +453,9 @@ def telegram(
|
|||
@click.option(
|
||||
'--node-url', '-n', default=f'https://testnet.{DEFAULT_DOMAIN}')
|
||||
@click.option(
|
||||
'--ipfs-url', '-i', default=DEFAULT_IPFS_REMOTE)
|
||||
'--ipfs-url', '-i', default=DEFAULT_IPFS_LOCAL)
|
||||
@click.option(
|
||||
'--ipfs-gateway-url', '-I', default=DEFAULT_IPFS_REMOTE)
|
||||
@click.option(
|
||||
'--db-host', '-h', default='localhost:5432')
|
||||
@click.option(
|
||||
|
@ -463,6 +469,7 @@ def discord(
|
|||
key: str | None,
|
||||
hyperion_url: str,
|
||||
ipfs_url: str,
|
||||
ipfs_gateway_url: str,
|
||||
node_url: str,
|
||||
db_host: str,
|
||||
db_user: str,
|
||||
|
@ -475,8 +482,8 @@ def discord(
|
|||
key, account, permission = load_account_info(
|
||||
'discord', key, account, permission)
|
||||
|
||||
node_url, _, ipfs_url = load_endpoint_info(
|
||||
'discord', node_url, None, None)
|
||||
node_url, _, ipfs_gateway_url, ipfs_url = load_endpoint_info(
|
||||
'discord', node_url=node_url, ipfs_gateway_url=ipfs_gateway_url)
|
||||
|
||||
async def _async_main():
|
||||
frontend = SkynetDiscordFrontend(
|
||||
|
@ -486,7 +493,8 @@ def discord(
|
|||
node_url,
|
||||
hyperion_url,
|
||||
db_host, db_user, db_pass,
|
||||
remote_ipfs_node=ipfs_url,
|
||||
ipfs_url,
|
||||
remote_ipfs_node=ipfs_gateway_url,
|
||||
key=key
|
||||
)
|
||||
|
||||
|
@ -516,7 +524,7 @@ def pinner(loglevel, ipfs_rpc, hyperion_url):
|
|||
from .ipfs.pinner import SkynetPinner
|
||||
|
||||
logging.basicConfig(level=loglevel)
|
||||
ipfs_node = IPFSHTTP(ipfs_rpc)
|
||||
ipfs_node = AsyncIPFSHTTP(ipfs_rpc)
|
||||
hyperion = HyperionAPI(hyperion_url)
|
||||
|
||||
pinner = SkynetPinner(hyperion, ipfs_node)
|
||||
|
|
|
@ -93,6 +93,7 @@ def load_endpoint_info(
|
|||
node_url: str | None = None,
|
||||
hyperion_url: str | None = None,
|
||||
ipfs_url: str | None = None,
|
||||
ipfs_gateway_url: str | None = None,
|
||||
file_path=DEFAULT_CONFIG_PATH
|
||||
):
|
||||
config = load_skynet_ini(file_path=file_path)
|
||||
|
@ -110,4 +111,7 @@ def load_endpoint_info(
|
|||
if not ipfs_url and 'ipfs_url' in sub_config:
|
||||
ipfs_url = sub_config['ipfs_url']
|
||||
|
||||
return node_url, hyperion_url, ipfs_url
|
||||
if not ipfs_gateway_url and 'ipfs_gateway_url' in sub_config:
|
||||
ipfs_gateway_url = sub_config['ipfs_gateway_url']
|
||||
|
||||
return node_url, hyperion_url, ipfs_gateway_url, ipfs_url
|
||||
|
|
|
@ -185,6 +185,7 @@ CONFIG_ATTRS = [
|
|||
DEFAULT_DOMAIN = 'skygpu.net'
|
||||
|
||||
DEFAULT_IPFS_REMOTE = '/ip4/169.197.140.154/tcp/4001/p2p/12D3KooWKWogLFNEcNNMKnzU7Snrnuj84RZdMBg3sLiQSQc51oEv'
|
||||
DEFAULT_IPFS_LOCAL = 'http://127.0.0.1:5001'
|
||||
|
||||
TG_MAX_WIDTH = 1280
|
||||
TG_MAX_HEIGHT = 1280
|
||||
|
|
|
@ -73,7 +73,7 @@ class SkynetDGPUDaemon:
|
|||
img_sha, img_raw = self.mm.compute_one(
|
||||
body['method'], body['params'], binary=binary)
|
||||
|
||||
ipfs_hash = self.conn.publish_on_ipfs( img_raw)
|
||||
ipfs_hash = await self.conn.publish_on_ipfs(img_raw)
|
||||
|
||||
await self.conn.submit_work(rid, request_hash, img_sha, ipfs_hash)
|
||||
break
|
||||
|
|
|
@ -3,13 +3,13 @@
|
|||
from functools import partial
|
||||
import io
|
||||
import json
|
||||
from pathlib import Path
|
||||
import time
|
||||
import logging
|
||||
|
||||
import asks
|
||||
from PIL import Image
|
||||
|
||||
from contextlib import ExitStack
|
||||
from contextlib import asynccontextmanager as acm
|
||||
|
||||
from leap.cleos import CLEOS
|
||||
|
@ -17,8 +17,7 @@ from leap.sugar import Checksum256, Name, asset_from_str
|
|||
from skynet.constants import DEFAULT_DOMAIN
|
||||
|
||||
from skynet.dgpu.errors import DGPUComputeError
|
||||
from skynet.ipfs import get_ipfs_file
|
||||
from skynet.ipfs.docker import open_ipfs_node
|
||||
from skynet.ipfs import AsyncIPFSHTTP, get_ipfs_file
|
||||
|
||||
|
||||
async def failable(fn: partial, ret_fail=None):
|
||||
|
@ -38,28 +37,17 @@ class SkynetGPUConnector:
|
|||
self.account = Name(config['account'])
|
||||
self.permission = config['permission']
|
||||
self.key = config['key']
|
||||
|
||||
self.node_url = config['node_url']
|
||||
self.hyperion_url = config['hyperion_url']
|
||||
self.ipfs_url = config['ipfs_url']
|
||||
|
||||
self.cleos = CLEOS(
|
||||
None, None, self.node_url, remote=self.node_url)
|
||||
|
||||
self._exit_stack = ExitStack()
|
||||
|
||||
def connect(self):
|
||||
self.ipfs_node = self._exit_stack.enter_context(
|
||||
open_ipfs_node())
|
||||
|
||||
def disconnect(self):
|
||||
self._exit_stack.close()
|
||||
|
||||
@acm
|
||||
async def open(self):
|
||||
self.connect()
|
||||
yield self
|
||||
self.disconnect()
|
||||
self.ipfs_gateway_url = config['ipfs_gateway_url']
|
||||
self.ipfs_url = config['ipfs_url']
|
||||
|
||||
self.ipfs_client = AsyncIPFSHTTP(self.ipfs_url)
|
||||
|
||||
# blockchain helpers
|
||||
|
||||
|
@ -206,21 +194,23 @@ class SkynetGPUConnector:
|
|||
|
||||
# IPFS helpers
|
||||
|
||||
def publish_on_ipfs(self, raw_img: bytes):
|
||||
async def publish_on_ipfs(self, raw_img: bytes):
|
||||
logging.info('publish_on_ipfs')
|
||||
img = Image.open(io.BytesIO(raw_img))
|
||||
img.save(f'ipfs-docker-staging/image.png')
|
||||
img.save('ipfs-docker-staging/image.png')
|
||||
|
||||
# check peer connections, reconnect to skynet gateway if not
|
||||
peers = self.ipfs_node.check_connect()
|
||||
if self.ipfs_url not in peers:
|
||||
self.ipfs_node.connect(self.ipfs_url)
|
||||
gateway_id = Path(self.ipfs_gateway_url).name
|
||||
peers = await self.ipfs_client.peers()
|
||||
if gateway_id not in [p['Peer'] for p in peers]:
|
||||
await self.ipfs_client.connect(self.ipfs_gateway_url)
|
||||
|
||||
ipfs_hash = self.ipfs_node.add('image.png')
|
||||
file_info = await self.ipfs_client.add(Path('ipfs-docker-staging/image.png'))
|
||||
file_cid = file_info['Hash']
|
||||
|
||||
self.ipfs_node.pin(ipfs_hash)
|
||||
await self.ipfs_client.pin(file_cid)
|
||||
|
||||
return ipfs_hash
|
||||
return file_cid
|
||||
|
||||
async def get_input_data(self, ipfs_hash: str) -> bytes:
|
||||
if ipfs_hash == '':
|
||||
|
|
|
@ -19,9 +19,8 @@ from leap.hyperion import HyperionAPI
|
|||
import discord
|
||||
import io
|
||||
|
||||
from skynet.db import open_new_database, open_database_connection
|
||||
from skynet.ipfs import get_ipfs_file
|
||||
from skynet.ipfs.docker import open_ipfs_node
|
||||
from skynet.db import open_database_connection
|
||||
from skynet.ipfs import get_ipfs_file, AsyncIPFSHTTP
|
||||
from skynet.constants import *
|
||||
|
||||
from . import *
|
||||
|
@ -44,6 +43,7 @@ class SkynetDiscordFrontend:
|
|||
db_host: str,
|
||||
db_user: str,
|
||||
db_pass: str,
|
||||
ipfs_url: str,
|
||||
remote_ipfs_node: str,
|
||||
key: str
|
||||
):
|
||||
|
@ -55,23 +55,21 @@ class SkynetDiscordFrontend:
|
|||
self.db_host = db_host
|
||||
self.db_user = db_user
|
||||
self.db_pass = db_pass
|
||||
self.ipfs_url = ipfs_url
|
||||
self.remote_ipfs_node = remote_ipfs_node
|
||||
self.key = key
|
||||
|
||||
self.bot = DiscordBot(self)
|
||||
self.cleos = CLEOS(None, None, url=node_url, remote=node_url)
|
||||
self.hyperion = HyperionAPI(hyperion_url)
|
||||
self.ipfs_node = AsyncIPFSHTTP(ipfs_node)
|
||||
|
||||
self._exit_stack = ExitStack()
|
||||
self._async_exit_stack = AsyncExitStack()
|
||||
|
||||
async def start(self):
|
||||
self.ipfs_node = self._exit_stack.enter_context(
|
||||
open_ipfs_node())
|
||||
|
||||
self.ipfs_node.connect(self.remote_ipfs_node)
|
||||
logging.info(
|
||||
f'connected to remote ipfs node: {self.remote_ipfs_node}')
|
||||
if self.remote_ipfs_node:
|
||||
await self.ipfs_node.connect(self.remote_ipfs_node)
|
||||
|
||||
self.db_call = await self._async_exit_stack.enter_async_context(
|
||||
open_database_connection(
|
||||
|
|
|
@ -217,10 +217,12 @@ def create_handler_context(frontend: 'SkynetDiscordFrontend'):
|
|||
image.thumbnail((512, 512))
|
||||
logging.warning(f'resized it to {image.size}')
|
||||
|
||||
image.save(f'ipfs-docker-staging/image.png', format='PNG')
|
||||
image_loc = 'ipfs-staging/image.png'
|
||||
image.save(image_loc, format='PNG')
|
||||
|
||||
ipfs_hash = ipfs_node.add('image.png')
|
||||
ipfs_node.pin(ipfs_hash)
|
||||
ipfs_info = await ipfs_node.add(image_loc)
|
||||
ipfs_hash = ipfs_info['Hash']
|
||||
await ipfs_node.pin(ipfs_hash)
|
||||
|
||||
logging.info(f'published input image {ipfs_hash} on ipfs')
|
||||
|
||||
|
|
|
@ -21,8 +21,7 @@ from telebot.types import InputMediaPhoto
|
|||
from telebot.async_telebot import AsyncTeleBot
|
||||
|
||||
from skynet.db import open_new_database, open_database_connection
|
||||
from skynet.ipfs import get_ipfs_file
|
||||
from skynet.ipfs.docker import open_ipfs_node
|
||||
from skynet.ipfs import get_ipfs_file, AsyncIPFSHTTP
|
||||
from skynet.constants import *
|
||||
|
||||
from . import *
|
||||
|
@ -43,7 +42,8 @@ class SkynetTelegramFrontend:
|
|||
db_host: str,
|
||||
db_user: str,
|
||||
db_pass: str,
|
||||
remote_ipfs_node: str,
|
||||
ipfs_node: str,
|
||||
remote_ipfs_node: str | None,
|
||||
key: str
|
||||
):
|
||||
self.token = token
|
||||
|
@ -60,17 +60,13 @@ class SkynetTelegramFrontend:
|
|||
self.bot = AsyncTeleBot(token, exception_handler=SKYExceptionHandler)
|
||||
self.cleos = CLEOS(None, None, url=node_url, remote=node_url)
|
||||
self.hyperion = HyperionAPI(hyperion_url)
|
||||
self.ipfs_node = AsyncIPFSHTTP(ipfs_node)
|
||||
|
||||
self._exit_stack = ExitStack()
|
||||
self._async_exit_stack = AsyncExitStack()
|
||||
|
||||
async def start(self):
|
||||
self.ipfs_node = self._exit_stack.enter_context(
|
||||
open_ipfs_node())
|
||||
|
||||
# self.ipfs_node.connect(self.remote_ipfs_node)
|
||||
logging.info(
|
||||
f'connected to remote ipfs node: {self.remote_ipfs_node}')
|
||||
if self.remote_ipfs_node:
|
||||
await self.ipfs_node.connect(self.remote_ipfs_node)
|
||||
|
||||
self.db_call = await self._async_exit_stack.enter_async_context(
|
||||
open_database_connection(
|
||||
|
@ -80,7 +76,6 @@ class SkynetTelegramFrontend:
|
|||
|
||||
async def stop(self):
|
||||
await self._async_exit_stack.aclose()
|
||||
self._exit_stack.close()
|
||||
|
||||
@acm
|
||||
async def open(self):
|
||||
|
|
|
@ -217,10 +217,12 @@ def create_handler_context(frontend: 'SkynetTelegramFrontend'):
|
|||
image.thumbnail((512, 512))
|
||||
logging.warning(f'resized it to {image.size}')
|
||||
|
||||
image.save(f'ipfs-docker-staging/image.png', format='PNG')
|
||||
image_loc = 'ipfs-staging/image.png'
|
||||
image.save(image_loc, format='PNG')
|
||||
|
||||
ipfs_hash = ipfs_node.add('image.png')
|
||||
ipfs_node.pin(ipfs_hash)
|
||||
ipfs_info = await ipfs_node.add(image_loc)
|
||||
ipfs_hash = ipfs_info['Hash']
|
||||
await ipfs_node.pin(ipfs_hash)
|
||||
|
||||
logging.info(f'published input image {ipfs_hash} on ipfs')
|
||||
|
||||
|
|
|
@ -1,28 +1,59 @@
|
|||
#!/usr/bin/python
|
||||
|
||||
import logging
|
||||
from pathlib import Path
|
||||
|
||||
import asks
|
||||
import requests
|
||||
|
||||
|
||||
class IPFSHTTP:
|
||||
class IPFSClientException(BaseException):
|
||||
...
|
||||
|
||||
|
||||
class AsyncIPFSHTTP:
|
||||
|
||||
def __init__(self, endpoint: str):
|
||||
self.endpoint = endpoint
|
||||
|
||||
def pin(self, cid: str):
|
||||
return requests.post(
|
||||
f'{self.endpoint}/api/v0/pin/add',
|
||||
params={'arg': cid}
|
||||
async def _post(self, sub_url: str, *args, **kwargs):
|
||||
resp = await asks.post(
|
||||
self.endpoint + sub_url,
|
||||
*args, **kwargs
|
||||
)
|
||||
|
||||
async def a_pin(self, cid: str):
|
||||
return await asks.post(
|
||||
f'{self.endpoint}/api/v0/pin/add',
|
||||
params={'arg': cid}
|
||||
if resp.status_code != 200:
|
||||
raise IPFSClientException(resp.text)
|
||||
|
||||
return resp.json()
|
||||
|
||||
async def add(self, file_path: Path, **kwargs):
|
||||
files = {
|
||||
'file': file_path
|
||||
}
|
||||
return await self._post(
|
||||
'/api/v0/add',
|
||||
files=files,
|
||||
params=kwargs
|
||||
)
|
||||
|
||||
async def pin(self, cid: str):
|
||||
return (await self._post(
|
||||
'/api/v0/pin/add',
|
||||
params={'arg': cid}
|
||||
))['Pins']
|
||||
|
||||
async def connect(self, multi_addr: str):
|
||||
return await self._post(
|
||||
'/api/v0/swarm/connect',
|
||||
params={'arg': multi_addr}
|
||||
)
|
||||
|
||||
async def peers(self, **kwargs):
|
||||
return (await self._post(
|
||||
'/api/v0/swarm/peers',
|
||||
params=kwargs
|
||||
))['Peers']
|
||||
|
||||
|
||||
async def get_ipfs_file(ipfs_link: str, timeout: int = 60):
|
||||
logging.info(f'attempting to get image at {ipfs_link}')
|
||||
|
|
|
@ -51,9 +51,10 @@ class IPFSDocker:
|
|||
|
||||
|
||||
@cm
|
||||
def open_ipfs_node(name='skynet-ipfs'):
|
||||
def open_ipfs_node(name='skynet-ipfs', teardown=False):
|
||||
dclient = docker.from_env()
|
||||
|
||||
container = None
|
||||
try:
|
||||
container = dclient.containers.get(name)
|
||||
|
||||
|
@ -100,3 +101,6 @@ def open_ipfs_node(name='skynet-ipfs'):
|
|||
break
|
||||
|
||||
yield IPFSDocker(container)
|
||||
|
||||
if teardown and container:
|
||||
container.stop()
|
||||
|
|
|
@ -9,7 +9,7 @@ import trio
|
|||
|
||||
from leap.hyperion import HyperionAPI
|
||||
|
||||
from . import IPFSHTTP
|
||||
from . import AsyncIPFSHTTP
|
||||
|
||||
|
||||
MAX_TIME = timedelta(seconds=20)
|
||||
|
@ -20,7 +20,7 @@ class SkynetPinner:
|
|||
def __init__(
|
||||
self,
|
||||
hyperion: HyperionAPI,
|
||||
ipfs_http: IPFSHTTP
|
||||
ipfs_http: AsyncIPFSHTTP
|
||||
):
|
||||
self.hyperion = hyperion
|
||||
self.ipfs_http = ipfs_http
|
||||
|
@ -85,7 +85,7 @@ class SkynetPinner:
|
|||
for _ in range(6):
|
||||
try:
|
||||
with trio.move_on_after(5):
|
||||
resp = await self.ipfs_http.a_pin(cid)
|
||||
resp = await self.ipfs_http.pin(cid)
|
||||
if resp.status_code != 200:
|
||||
logging.error(f'error pinning {cid}:\n{resp.text}')
|
||||
del self._pinned[cid]
|
||||
|
|
|
@ -1,15 +1,18 @@
|
|||
#!/usr/bin/python
|
||||
|
||||
import logging
|
||||
|
||||
from pathlib import Path
|
||||
|
||||
import pytest
|
||||
|
||||
from skynet.db import open_new_database
|
||||
from skynet.ipfs import AsyncIPFSHTTP
|
||||
from skynet.ipfs.docker import open_ipfs_node
|
||||
from skynet.nodeos import open_nodeos
|
||||
|
||||
|
||||
@pytest.fixture(scope='session')
|
||||
def ipfs_client():
|
||||
with open_ipfs_node(teardown=True):
|
||||
yield AsyncIPFSHTTP('http://127.0.0.1:5001')
|
||||
|
||||
@pytest.fixture(scope='session')
|
||||
def postgres_db():
|
||||
with open_new_database() as db_params:
|
||||
|
|
|
@ -0,0 +1,26 @@
|
|||
#!/usr/bin/python
|
||||
|
||||
|
||||
from pathlib import Path
|
||||
|
||||
|
||||
async def test_connection(ipfs_client):
|
||||
await ipfs_client.connect(
|
||||
'/ip4/169.197.140.154/tcp/4001/p2p/12D3KooWKWogLFNEcNNMKnzU7Snrnuj84RZdMBg3sLiQSQc51oEv')
|
||||
peers = await ipfs_client.peers()
|
||||
assert '12D3KooWKWogLFNEcNNMKnzU7Snrnuj84RZdMBg3sLiQSQc51oEv' in [p['Peer'] for p in peers]
|
||||
|
||||
|
||||
async def test_add_and_pin_file(ipfs_client):
|
||||
test_file = Path('hello_world.txt')
|
||||
with open(test_file, 'w+') as file:
|
||||
file.write('Hello Skynet!')
|
||||
|
||||
file_info = await ipfs_client.add(test_file)
|
||||
file_cid = file_info['Hash']
|
||||
|
||||
pin_resp = await ipfs_client.pin(file_cid)
|
||||
|
||||
assert file_cid in pin_resp
|
||||
|
||||
test_file.unlink()
|
Loading…
Reference in New Issue