Skip to content

Commit 1a86323

Browse files
authored
Merge branch 'fix(web)/staking-refetch-and-improving-fetching-timing' into fix(web)/dashboard-data-and-wallet-connection
2 parents 33f3532 + 7338bd5 commit 1a86323

File tree

8 files changed

+55
-38
lines changed

8 files changed

+55
-38
lines changed

bot-pinner/README.md

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -15,7 +15,8 @@ $ docker-compose up -d
1515

1616
1. Standard IPFS container which creates a local mount for data
1717
2. Evidence container that awaits new events and then scrapes the latest hashes and submits it to IPFS.
18-
18+
3. `src/peers.txt` contains a list of peers, by default it will add connects to Estuary & Kleros IPFS nodes. This should make it easier to find content by creating a data network around Kleros Court V2.
19+
1. Adding these peers will make it easier to find and replicate content.
1920
## DappNode
2021

2122
:warning: For the following steps, you need access to [a DappNode](https://dappnode.io) system with the IPFS service running.

bot-pinner/docker-compose.yml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -20,7 +20,7 @@ services:
2020
- ./court/:/var/lib/data/
2121
- ./watchlist/:/var/lib/watchlist/
2222
environment:
23-
RPC: "https://rinkeby.arbitrum.io/rpc"
23+
RPC: "https://arb-goerli.g.alchemy.com/v2/<key>"
2424
IPFS: "http://localhost:5001/api/v0"
2525
INTERVAL: 300
2626
RETRY: 2

bot-pinner/src/add_hashes.py

Lines changed: 12 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -2,21 +2,23 @@
22
import requests
33
import os
44
import time
5-
from tooling import motd, create_logger, port_up
5+
from tooling import motd, create_logger
66
import json
77
import glob
88

99

10-
logger = create_logger()
11-
motd()
10+
11+
1212

1313
# Init
1414
RPC = os.environ.get("RPC", "http://localhost:8545")
1515
IPFS = os.environ.get("IPFS", "http://ipfs-cluster.dappnode:9094")
1616
INTERVAL = os.environ.get("INTERVAL", 600) # Events are not constantly listened to, instead it checks per INTERVAL.
17-
RETRY = int(os.environ.get("RETRY", 0)) # Retry interval value
17+
RETRY = int(os.environ.get("RETRY", 0)) # Retry interval value
1818
attempted_retries = dict()
1919

20+
logger = create_logger(IPFS)
21+
2022
# Contract / RPC
2123
w3 = Web3(Web3.HTTPProvider(RPC))
2224

@@ -36,6 +38,7 @@
3638
except FileNotFoundError:
3739
pass
3840

41+
3942
def main():
4043
block_number = block
4144
tasks = ["Evidence"]
@@ -75,12 +78,14 @@ def retry_hashes():
7578
attempted_retries[_hash] += 1
7679
if RETRY == 0 or attempted_retries[_hash] < RETRY:
7780
add_hash(_hash)
78-
elif attempted_retries[_hash] > int(RETRY + 10):
81+
elif attempted_retries[_hash] > int(RETRY + 10):
7982
attempted_retries[_hash] = int(RETRY - 2) # Reset the search
8083

84+
8185
def check_hash(_hash):
8286
return _hash.rsplit('/', 1)[0] # Recursive pin // i.e. strip _hash/something.json
8387

88+
8489
def add_hash(_hash):
8590
_hash = check_hash(_hash)
8691
try:
@@ -91,6 +96,7 @@ def add_hash(_hash):
9196
logger.warning(f"Time-out: Couldn't find {_hash} on the IPFS network")
9297
if _hash not in hashes_wanted: hashes_wanted.append(_hash)
9398

99+
94100
def get_contracts():
95101
contracts = []
96102
for f in glob.glob('/var/lib/watchlist/**/*.json', recursive=True):
@@ -105,5 +111,6 @@ def get_contracts():
105111
pass
106112
return contracts
107113

114+
108115
if __name__ == '__main__':
109116
main()

bot-pinner/src/peers.txt

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,4 @@
1+
/ip4/139.178.68.217/tcp/6744/p2p/12D3KooWCVXs8P7iq6ao4XhfAmKWrEeuKFWCJgqe9jGDMTqHYBjw
2+
/ip4/147.75.49.71/tcp/6745/p2p/12D3KooWGBWx9gyUFTVQcKMTenQMSyE2ad9m7c9fpjS4NMjoDien
3+
/ip4/147.75.86.255/tcp/6745/p2p/12D3KooWFrnuj5o3tx4fGD2ZVJRyDqTdzGnU3XYXmBbWbc8Hs8Nd
4+
/ip4/3.134.223.177/tcp/6745/p2p/12D3KooWN8vAoGd6eurUSidcpLYguQiGZwt4eVgDvbgaS7kiGTup

bot-pinner/src/tooling.py

Lines changed: 33 additions & 28 deletions
Original file line numberDiff line numberDiff line change
@@ -1,20 +1,30 @@
11
import time
2-
import socket
32
import logging
43
import sys
5-
import glob
6-
import json
7-
from web3 import Web3
4+
import requests
85

9-
def create_logger():
6+
7+
def check_ipfs(IPFS, logger):
8+
try:
9+
r = requests.post(f"{IPFS}/version")
10+
except:
11+
logger.error("Couldn't connect to IPFS API, retrying..")
12+
time.sleep(15)
13+
check_ipfs(IPFS, logger)
14+
15+
16+
def create_logger(IPFS):
1017
# create a logger object
11-
logger = logging.getLogger('kleros-v2-evidence-collector')
18+
logger = logging.getLogger('kleros-v2-evidence')
1219
logger.setLevel(logging.INFO)
1320
logfile = logging.StreamHandler(sys.stdout)
1421
fmt = '%(asctime)s %(levelname)s %(message)s'
1522
formatter = logging.Formatter(fmt)
1623
logfile.setFormatter(formatter)
1724
logger.addHandler(logfile)
25+
motd()
26+
check_ipfs(IPFS, logger)
27+
add_peers(IPFS, logger)
1828
return logger
1929

2030

@@ -33,27 +43,22 @@ def motd():
3343
`-_, :!;;;''
3444
`-!'
3545
"""
36-
print("Booting...")
37-
print(kleros)
38-
print("Kleros Court V2 Evidence Collector!")
39-
time.sleep(10) # Wait for IPFS to come up
40-
print(additional_info())
41-
42-
43-
def additional_info():
44-
ipfs_api = port_up(8080)
45-
ipfs_gw = port_up(5001)
46-
if ipfs_api == 0 and ipfs_gw == 0:
47-
return "Gateway and API are up. IPFS WebUI: http://127.0.0.1:5001/webui"
48-
if ipfs_api == 0:
49-
return "API is up"
50-
return "API or Gateway unavailable. (If running on different ports, disregard this check)"
51-
52-
53-
def port_up(port: int, host="127.0.0.1"):
54-
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
55-
result = sock.connect_ex((host, port))
56-
sock.close()
57-
return result
46+
motd = f"""
47+
Booting... \
48+
{kleros} \
49+
Kleros Court V2 Evidence Collector!"""
50+
print(motd)
51+
52+
53+
def add_peers(ipfs, logger):
54+
with open('peers.txt') as f:
55+
peers = f.read().splitlines()
56+
for peer in peers:
57+
r = requests.post(f"{ipfs}/swarm/connect?arg={peer}", timeout=25)
58+
if r.status_code == 200:
59+
logger.info(f"Succesfully added peer: {[peer]}")
60+
if r.status_code != 200:
61+
logger.warning(f"Couldn't add {peer} to peerlist {r.content}")
62+
5863

5964

bot-pinner/watchlist/DisputeKitClassic.json

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,5 @@
11
{
2-
"address": "0xA2c538AA05BBCc44c213441f6f3777223D2BF9e5",
2+
"address": "0x8F1a2B8F9b04320375856580Fc6B1669Cb12a9EE",
33
"abi": [
44
{
55
"inputs": [

contracts/src/arbitration/dispute-kits/DisputeKitClassic.sol

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -14,7 +14,7 @@ import "../interfaces/IEvidence.sol";
1414
/// @title DisputeKitClassic
1515
/// Dispute kit implementation of the Kleros v1 features including:
1616
/// - a drawing system: proportional to staked PNK,
17-
/// - a vote aggreation system: plurality,
17+
/// - a vote aggregation system: plurality,
1818
/// - an incentive system: equal split between coherent votes,
1919
/// - an appeal system: fund 2 choices only, vote on any choice.
2020
contract DisputeKitClassic is BaseDisputeKit, IEvidence {

contracts/src/arbitration/dispute-kits/DisputeKitSybilResistant.sol

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -21,7 +21,7 @@ interface IProofOfHumanity {
2121
/// @title DisputeKitSybilResistant
2222
/// Dispute kit implementation adapted from DisputeKitClassic
2323
/// - a drawing system: at most 1 vote per juror registered on Proof of Humanity,
24-
/// - a vote aggreation system: plurality,
24+
/// - a vote aggregation system: plurality,
2525
/// - an incentive system: equal split between coherent votes,
2626
/// - an appeal system: fund 2 choices only, vote on any choice.
2727
contract DisputeKitSybilResistant is BaseDisputeKit, IEvidence {

0 commit comments

Comments
 (0)