Skip to content
This repository has been archived by the owner on Aug 2, 2022. It is now read-only.

Minor cleanup to merge conflict #7102

Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion tests/Cluster.py
Original file line number Diff line number Diff line change
Expand Up @@ -430,7 +430,7 @@ def connectGroup(group, producerNodes, bridgeNodes) :
if not loadSystemContract:
useBiosBootFile=False #ensure we use Cluster.bootstrap
if onlyBios or not useBiosBootFile:
self.biosNode=self.bootstrap(biosNode, startedNodes, prodCount, totalProducers, pfSetupPolicy, onlyBios, onlySetProds)
self.biosNode=self.bootstrap(biosNode, startedNodes, prodCount, totalProducers, pfSetupPolicy, onlyBios, onlySetProds, loadSystemContract)
if self.biosNode is None:
Utils.Print("ERROR: Bootstrap failed.")
return False
Expand Down
27 changes: 3 additions & 24 deletions tests/Node.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,10 +7,6 @@
import datetime
import json
import signal
import urllib.request
import urllib.parse
from urllib.error import HTTPError
import tempfile

from core_symbol import CORE_SYMBOL
from testUtils import Utils
Expand Down Expand Up @@ -1455,34 +1451,18 @@ def reportStatus(self):
Utils.Print(" hbn : %s (%s)" % (self.lastRetrievedHeadBlockNum, status))
Utils.Print(" lib : %s (%s)" % (self.lastRetrievedLIB, status))

def sendRpcApi(self, relativeUrl, data={}):
url = urllib.parse.urljoin(self.endpointHttp, relativeUrl)
req = urllib.request.Request(url)
req.add_header('Content-Type', 'application/json; charset=utf-8')
reqData = json.dumps(data).encode("utf-8")
rpcApiResult = None
try:
response = urllib.request.urlopen(req, reqData)
rpcApiResult = json.loads(response.read().decode("utf-8"))
except HTTPError as e:
Utils.Print("Fail to send RPC API to {} with data {} ({})".format(url, data, e.read()))
raise e
except Exception as e:
Utils.Print("Fail to send RPC API to {} with data {} ({})".format(url, data, e))
raise e
return rpcApiResult

# Require producer_api_plugin
def scheduleProtocolFeatureActivations(self, featureDigests=[]):
self.sendRpcApi("v1/producer/schedule_protocol_feature_activations", {"protocol_features_to_activate": featureDigests})
param = { "protocol_features_to_activate": featureDigests }
self.processCurlCmd("producer", "schedule_protocol_feature_activations", json.dumps(param))

# Require producer_api_plugin
def getSupportedProtocolFeatures(self, excludeDisabled=False, excludeUnactivatable=False):
param = {
"exclude_disabled": excludeDisabled,
"exclude_unactivatable": excludeUnactivatable
}
res = self.sendRpcApi("v1/producer/get_supported_protocol_features", param)
res = self.processCurlCmd("producer", "get_supported_protocol_features", json.dumps(param))
return res

# This will return supported protocol features in a dict (feature codename as the key), i.e.
Expand Down Expand Up @@ -1564,7 +1544,6 @@ def getActivatedProtocolFeatures(self):
return latestBlockHeaderState["activated_protocol_features"]["protocol_features"]

def modifyBuiltinPFSubjRestrictions(self, nodeId, featureCodename, subjectiveRestriction={}):
from Cluster import Cluster
jsonPath = os.path.join(Utils.getNodeConfigDir(nodeId),
"protocol_features",
"BUILTIN-{}.json".format(featureCodename))
Expand Down
29 changes: 15 additions & 14 deletions tests/nodeos_multiple_version_protocol_feature_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -82,6 +82,7 @@ def hasBlockBecomeIrr():
associatedNodeLabels = {
"3": "170"
}
Utils.Print("Alternate Version Labels File is {}".format(alternateVersionLabelsFile))
assert exists(alternateVersionLabelsFile), "Alternate version labels file does not exist"
assert cluster.launch(pnodes=4, totalNodes=4, prodCount=1, totalProducers=4,
extraNodeosArgs=" --plugin eosio::producer_api_plugin ",
Expand All @@ -91,31 +92,31 @@ def hasBlockBecomeIrr():
alternateVersionLabelsFile=alternateVersionLabelsFile,
associatedNodeLabels=associatedNodeLabels), "Unable to launch cluster"

def pauseBlockProduction(nodes:[Node]):
for node in nodes:
node.sendRpcApi("v1/producer/pause")
newNodeIds = [0, 1, 2]
oldNodeId = 3
newNodes = list(map(lambda id: cluster.getNode(id), newNodeIds))
oldNode = cluster.getNode(oldNodeId)
allNodes = [*newNodes, oldNode]

def resumeBlockProduction(nodes:[Node]):
for node in nodes:
node.sendRpcApi("v1/producer/resume")
def pauseBlockProductions():
for node in allNodes:
if not node.killed: node.processCurlCmd("producer", "pause", "")

def resumeBlockProductions():
for node in allNodes:
if not node.killed: node.processCurlCmd("producer", "resume", "")

def shouldNodesBeInSync(nodes:[Node]):
# Pause all block production to ensure the head is not moving
pauseBlockProduction(nodes)
pauseBlockProductions()
time.sleep(1) # Wait for some time to ensure all blocks are propagated
headBlockIds = []
for node in nodes:
headBlockId = node.getInfo()["head_block_id"]
headBlockIds.append(headBlockId)
resumeBlockProduction(nodes)
resumeBlockProductions()
return len(set(headBlockIds)) == 1

newNodeIds = [0, 1, 2]
oldNodeId = 3
newNodes = list(map(lambda id: cluster.getNode(id), newNodeIds))
oldNode = cluster.getNode(oldNodeId)
allNodes = [*newNodes, oldNode]

# Before everything starts, all nodes (new version and old version) should be in sync
assert shouldNodesBeInSync(allNodes), "Nodes are not in sync before preactivation"

Expand Down