From 51715aa44651a34135c3fa38ff26422872e0ee36 Mon Sep 17 00:00:00 2001 From: Anirudh Warrier <12178754+anirudhwarrier@users.noreply.github.com> Date: Mon, 23 Sep 2024 12:43:55 +0400 Subject: [PATCH 01/14] [DEVSVCS-547] option to use pre-deployed contracts in Automation tests (#14497) * rename to ctftestenv * automation test config - add contract addresses * add multicall contract in pre-deployed contracts * support benchmark test contract in pre-deployed contracts * load chainmodule contract from address * fix registrar load, add more logs * reduce test setup time on live network * fix lint issues --- .../actions/automation_ocr_helpers.go | 75 ++- .../actions/automationv2/actions.go | 163 ++++--- integration-tests/actions/keeper_helpers.go | 2 +- .../benchmark/automation_test.go | 35 +- .../chaos/automation_chaos_test.go | 6 +- .../ethereum_contracts_automation.go | 166 ++++--- .../automationv2_1/automationv2_1_test.go | 2 +- .../reorg/automation_reorg_test.go | 3 +- integration-tests/smoke/automation_test.go | 127 +----- integration-tests/smoke/log_poller_test.go | 15 +- .../testconfig/automation/config.go | 430 ++++++++++++++++++ ...r_benchmark.go => automation_benchmark.go} | 38 +- 12 files changed, 784 insertions(+), 278 deletions(-) rename integration-tests/testsetups/{keeper_benchmark.go => automation_benchmark.go} (94%) diff --git a/integration-tests/actions/automation_ocr_helpers.go b/integration-tests/actions/automation_ocr_helpers.go index f1845804677..8159427699c 100644 --- a/integration-tests/actions/automation_ocr_helpers.go +++ b/integration-tests/actions/automation_ocr_helpers.go @@ -6,6 +6,10 @@ import ( "math/big" "testing" + "github.com/ethereum/go-ethereum/common" + + tt "github.com/smartcontractkit/chainlink/integration-tests/types" + "github.com/pkg/errors" "github.com/smartcontractkit/chainlink-testing-framework/seth" @@ -32,9 +36,9 @@ func DeployAutoOCRRegistryAndRegistrar( return registry, registrar } -// DeployConsumers deploys and registers keeper consumers. If ephemeral addresses are enabled, it will deploy and register the consumers from ephemeral addresses, but each upkpeep will be registered with root key address as the admin. Which means +// DeployLegacyConsumers deploys and registers keeper consumers. If ephemeral addresses are enabled, it will deploy and register the consumers from ephemeral addresses, but each upkpeep will be registered with root key address as the admin. Which means // that functions like setting upkeep configuration, pausing, unpausing, etc. will be done by the root key address. It deploys multicall contract and sends link funds to each deployment address. -func DeployConsumers(t *testing.T, chainClient *seth.Client, registry contracts.KeeperRegistry, registrar contracts.KeeperRegistrar, linkToken contracts.LinkToken, numberOfUpkeeps int, linkFundsForEachUpkeep *big.Int, upkeepGasLimit uint32, isLogTrigger bool, isMercury bool, isBillingTokenNative bool, wethToken contracts.WETHToken) ([]contracts.KeeperConsumer, []*big.Int) { +func DeployLegacyConsumers(t *testing.T, chainClient *seth.Client, registry contracts.KeeperRegistry, registrar contracts.KeeperRegistrar, linkToken contracts.LinkToken, numberOfUpkeeps int, linkFundsForEachUpkeep *big.Int, upkeepGasLimit uint32, isLogTrigger bool, isMercury bool, isBillingTokenNative bool, wethToken contracts.WETHToken) ([]contracts.KeeperConsumer, []*big.Int) { // Fund deployers with LINK, no need to do this for Native token if !isBillingTokenNative { err := DeployMultiCallAndFundDeploymentAddresses(chainClient, linkToken, numberOfUpkeeps, linkFundsForEachUpkeep) @@ -54,6 +58,28 @@ func DeployConsumers(t *testing.T, chainClient *seth.Client, registry contracts. return upkeeps, upkeepIds } +// DeployConsumers deploys and registers keeper consumers. If ephemeral addresses are enabled, it will deploy and register the consumers from ephemeral addresses, but each upkpeep will be registered with root key address as the admin. Which means +// that functions like setting upkeep configuration, pausing, unpausing, etc. will be done by the root key address. It deploys multicall contract and sends link funds to each deployment address. +func DeployConsumers(t *testing.T, chainClient *seth.Client, registry contracts.KeeperRegistry, registrar contracts.KeeperRegistrar, linkToken contracts.LinkToken, numberOfUpkeeps int, linkFundsForEachUpkeep *big.Int, upkeepGasLimit uint32, isLogTrigger bool, isMercury bool, isBillingTokenNative bool, wethToken contracts.WETHToken, config tt.AutomationTestConfig) ([]contracts.KeeperConsumer, []*big.Int) { + // Fund deployers with LINK, no need to do this for Native token + if !isBillingTokenNative { + err := SetupMultiCallAndFundDeploymentAddresses(chainClient, linkToken, numberOfUpkeeps, linkFundsForEachUpkeep, config) + require.NoError(t, err, "Sending link funds to deployment addresses shouldn't fail") + } + + upkeeps := DeployKeeperConsumers(t, chainClient, numberOfUpkeeps, isLogTrigger, isMercury) + require.Equal(t, numberOfUpkeeps, len(upkeeps), "Number of upkeeps should match") + var upkeepsAddresses []string + for _, upkeep := range upkeeps { + upkeepsAddresses = append(upkeepsAddresses, upkeep.Address()) + } + upkeepIds := RegisterUpkeepContracts( + t, chainClient, linkToken, linkFundsForEachUpkeep, upkeepGasLimit, registry, registrar, numberOfUpkeeps, upkeepsAddresses, isLogTrigger, isMercury, isBillingTokenNative, wethToken, + ) + require.Equal(t, numberOfUpkeeps, len(upkeepIds), "Number of upkeepIds should match") + return upkeeps, upkeepIds +} + // DeployPerformanceConsumers deploys and registers keeper performance consumers. If ephemeral addresses are enabled, it will deploy and register the consumers from ephemeral addresses, but each upkeep will be registered with root key address as the admin. // that functions like setting upkeep configuration, pausing, unpausing, etc. will be done by the root key address. It deploys multicall contract and sends link funds to each deployment address. func DeployPerformanceConsumers( @@ -69,12 +95,13 @@ func DeployPerformanceConsumers( blockInterval, // Interval of blocks that upkeeps are expected to be performed checkGasToBurn, // How much gas should be burned on checkUpkeep() calls performGasToBurn int64, // How much gas should be burned on performUpkeep() calls + config tt.AutomationTestConfig, ) ([]contracts.KeeperConsumerPerformance, []*big.Int) { upkeeps := DeployKeeperConsumersPerformance( t, chainClient, numberOfUpkeeps, blockRange, blockInterval, checkGasToBurn, performGasToBurn, ) - err := DeployMultiCallAndFundDeploymentAddresses(chainClient, linkToken, numberOfUpkeeps, linkFundsForEachUpkeep) + err := SetupMultiCallAndFundDeploymentAddresses(chainClient, linkToken, numberOfUpkeeps, linkFundsForEachUpkeep, config) require.NoError(t, err, "Sending link funds to deployment addresses shouldn't fail") var upkeepsAddresses []string @@ -97,10 +124,11 @@ func DeployPerformDataCheckerConsumers( linkFundsForEachUpkeep *big.Int, upkeepGasLimit uint32, expectedData []byte, + config tt.AutomationTestConfig, ) ([]contracts.KeeperPerformDataChecker, []*big.Int) { upkeeps := DeployPerformDataChecker(t, chainClient, numberOfUpkeeps, expectedData) - err := DeployMultiCallAndFundDeploymentAddresses(chainClient, linkToken, numberOfUpkeeps, linkFundsForEachUpkeep) + err := SetupMultiCallAndFundDeploymentAddresses(chainClient, linkToken, numberOfUpkeeps, linkFundsForEachUpkeep, config) require.NoError(t, err, "Sending link funds to deployment addresses shouldn't fail") var upkeepsAddresses []string @@ -111,6 +139,45 @@ func DeployPerformDataCheckerConsumers( return upkeeps, upkeepIds } +func SetupMultiCallAddress(chainClient *seth.Client, testConfig tt.AutomationTestConfig) (common.Address, error) { + if testConfig.GetAutomationConfig().UseExistingMultiCallContract() { + multiCallAddress, err := testConfig.GetAutomationConfig().MultiCallContractAddress() + if err != nil { + return common.Address{}, errors.Wrap(err, "Error getting existing multicall contract address") + } + return multiCallAddress, nil + } + + multicallAddress, err := contracts.DeployMultiCallContract(chainClient) + if err != nil { + return common.Address{}, errors.Wrap(err, "Error deploying multicall contract") + } + return multicallAddress, nil +} + +// SetupMultiCallAndFundDeploymentAddresses setups multicall contract and sends link funds to each deployment address +func SetupMultiCallAndFundDeploymentAddresses( + chainClient *seth.Client, + linkToken contracts.LinkToken, + numberOfUpkeeps int, + linkFundsForEachUpkeep *big.Int, + testConfig tt.AutomationTestConfig, +) error { + concurrency, err := GetAndAssertCorrectConcurrency(chainClient, 1) + if err != nil { + return err + } + + operationsPerAddress := numberOfUpkeeps / concurrency + + multicallAddress, err := SetupMultiCallAddress(chainClient, testConfig) + if err != nil { + return errors.Wrap(err, "Error deploying multicall contract") + } + + return SendLinkFundsToDeploymentAddresses(chainClient, concurrency, numberOfUpkeeps, operationsPerAddress, multicallAddress, linkFundsForEachUpkeep, linkToken) +} + // DeployMultiCallAndFundDeploymentAddresses deploys multicall contract and sends link funds to each deployment address func DeployMultiCallAndFundDeploymentAddresses( chainClient *seth.Client, diff --git a/integration-tests/actions/automationv2/actions.go b/integration-tests/actions/automationv2/actions.go index 6ce35873d88..9b3013f778e 100644 --- a/integration-tests/actions/automationv2/actions.go +++ b/integration-tests/actions/automationv2/actions.go @@ -12,6 +12,8 @@ import ( "testing" "time" + tt "github.com/smartcontractkit/chainlink/integration-tests/types" + "github.com/ethereum/go-ethereum/accounts/abi" "github.com/ethereum/go-ethereum/common" "github.com/lib/pq" @@ -45,7 +47,7 @@ import ( "github.com/smartcontractkit/chainlink/v2/core/store/models" ctf_concurrency "github.com/smartcontractkit/chainlink-testing-framework/lib/concurrency" - ctfTestEnv "github.com/smartcontractkit/chainlink-testing-framework/lib/docker/test_env" + ctftestenv "github.com/smartcontractkit/chainlink-testing-framework/lib/docker/test_env" "github.com/smartcontractkit/chainlink-testing-framework/lib/logging" ) @@ -61,6 +63,8 @@ type NodeDetails struct { type AutomationTest struct { ChainClient *seth.Client + TestConfig tt.AutomationTestConfig + LinkToken contracts.LinkToken Transcoder contracts.UpkeepTranscoder LINKETHFeed contracts.MockLINKETHFeed @@ -110,9 +114,11 @@ func NewAutomationTestK8s( l zerolog.Logger, chainClient *seth.Client, chainlinkNodes []*client.ChainlinkK8sClient, + config tt.AutomationTestConfig, ) *AutomationTest { return &AutomationTest{ ChainClient: chainClient, + TestConfig: config, ChainlinkNodesk8s: chainlinkNodes, IsOnk8s: true, TransmitterKeyIndex: 0, @@ -126,9 +132,11 @@ func NewAutomationTestDocker( l zerolog.Logger, chainClient *seth.Client, chainlinkNodes []*client.ChainlinkClient, + config tt.AutomationTestConfig, ) *AutomationTest { return &AutomationTest{ ChainClient: chainClient, + TestConfig: config, ChainlinkNodes: chainlinkNodes, IsOnk8s: false, TransmitterKeyIndex: 0, @@ -173,6 +181,7 @@ func (a *AutomationTest) LoadLINK(address string) error { return err } a.LinkToken = linkToken + a.Logger.Info().Str("LINK Token Address", a.LinkToken.Address()).Msg("Successfully loaded LINK Token") return nil } @@ -191,6 +200,7 @@ func (a *AutomationTest) LoadTranscoder(address string) error { return err } a.Transcoder = transcoder + a.Logger.Info().Str("Transcoder Address", a.Transcoder.Address()).Msg("Successfully loaded Transcoder") return nil } @@ -209,6 +219,7 @@ func (a *AutomationTest) LoadLinkEthFeed(address string) error { return err } a.LINKETHFeed = ethLinkFeed + a.Logger.Info().Str("LINK/ETH Feed Address", a.LINKETHFeed.Address()).Msg("Successfully loaded LINK/ETH Feed") return nil } @@ -227,6 +238,7 @@ func (a *AutomationTest) LoadEthUSDFeed(address string) error { return err } a.ETHUSDFeed = ethUSDFeed + a.Logger.Info().Str("ETH/USD Feed Address", a.ETHUSDFeed.Address()).Msg("Successfully loaded ETH/USD Feed") return nil } @@ -245,6 +257,7 @@ func (a *AutomationTest) LoadLinkUSDFeed(address string) error { return err } a.LINKUSDFeed = linkUSDFeed + a.Logger.Info().Str("LINK/USD Feed Address", a.LINKUSDFeed.Address()).Msg("Successfully loaded LINK/USD Feed") return nil } @@ -263,6 +276,7 @@ func (a *AutomationTest) LoadWETH(address string) error { return err } a.WETHToken = wethToken + a.Logger.Info().Str("WETH Token Address", a.WETHToken.Address()).Msg("Successfully loaded WETH Token") return nil } @@ -281,6 +295,7 @@ func (a *AutomationTest) LoadEthGasFeed(address string) error { return err } a.GasFeed = gasFeed + a.Logger.Info().Str("Gas Feed Address", a.GasFeed.Address()).Msg("Successfully loaded Gas Feed") return nil } @@ -305,12 +320,13 @@ func (a *AutomationTest) DeployRegistry() error { return nil } -func (a *AutomationTest) LoadRegistry(address string) error { - registry, err := contracts.LoadKeeperRegistry(a.Logger, a.ChainClient, common.HexToAddress(address), a.RegistrySettings.RegistryVersion) +func (a *AutomationTest) LoadRegistry(registryAddress, chainModuleAddress string) error { + registry, err := contracts.LoadKeeperRegistry(a.Logger, a.ChainClient, common.HexToAddress(registryAddress), a.RegistrySettings.RegistryVersion, common.HexToAddress(chainModuleAddress)) if err != nil { return err } a.Registry = registry + a.Logger.Info().Str("ChainModule Address", chainModuleAddress).Str("Registry Address", a.Registry.Address()).Msg("Successfully loaded Registry") return nil } @@ -337,6 +353,7 @@ func (a *AutomationTest) LoadRegistrar(address string) error { if err != nil { return err } + a.Logger.Info().Str("Registrar Address", registrar.Address()).Msg("Successfully loaded Registrar") a.Registrar = registrar return nil } @@ -597,6 +614,7 @@ func (a *AutomationTest) SetConfigOnRegistry() error { }, } } + a.Logger.Debug().Interface("ocrConfig", ocrConfig).Msg("Setting OCR3 config") err = a.Registry.SetConfigTypeSafe(ocrConfig) if err != nil { return errors.Join(err, fmt.Errorf("failed to set config on registry")) @@ -846,7 +864,7 @@ func (a *AutomationTest) AddJobsAndSetConfig(t *testing.T) { l.Info().Str("Registry Address", a.Registry.Address()).Msg("Successfully setConfig on registry") } -func (a *AutomationTest) SetupMercuryMock(t *testing.T, imposters []ctfTestEnv.KillgraveImposter) { +func (a *AutomationTest) SetupMercuryMock(t *testing.T, imposters []ctftestenv.KillgraveImposter) { if a.IsOnk8s { t.Error("mercury mock is not supported on k8s") } @@ -874,61 +892,104 @@ func (a *AutomationTest) setupDeployment(t *testing.T, addJobs bool) { l.Info().Msg("Collected Node Details") l.Debug().Interface("Node Details", a.NodeDetails).Msg("Node Details") - err = a.DeployLINK() - require.NoError(t, err, "Error deploying link token contract") + if a.TestConfig.GetAutomationConfig().UseExistingLinkTokenContract() { + linkAddress, err := a.TestConfig.GetAutomationConfig().LinkTokenContractAddress() + require.NoError(t, err, "Error getting link token contract address") + err = a.LoadLINK(linkAddress.String()) + require.NoError(t, err, "Error loading link token contract") + } else { + err = a.DeployLINK() + require.NoError(t, err, "Error deploying link token contract") + } - err = a.DeployWETH() - require.NoError(t, err, "Error deploying weth token contract") + if a.TestConfig.GetAutomationConfig().UseExistingWethContract() { + wethAddress, err := a.TestConfig.GetAutomationConfig().WethContractAddress() + require.NoError(t, err, "Error getting weth token contract address") + err = a.LoadWETH(wethAddress.String()) + require.NoError(t, err, "Error loading weth token contract") + } else { + err = a.DeployWETH() + require.NoError(t, err, "Error deploying weth token contract") + } - err = a.DeployLinkEthFeed() - require.NoError(t, err, "Error deploying link eth feed contract") - err = a.DeployGasFeed() - require.NoError(t, err, "Error deploying gas feed contract") + if a.TestConfig.GetAutomationConfig().UseExistingLinkEthFeedContract() { + linkEthFeedAddress, err := a.TestConfig.GetAutomationConfig().LinkEthFeedContractAddress() + require.NoError(t, err, "Error getting link eth feed contract address") + err = a.LoadLinkEthFeed(linkEthFeedAddress.String()) + require.NoError(t, err, "Error loading link eth feed contract") + } else { + err = a.DeployLinkEthFeed() + require.NoError(t, err, "Error deploying link eth feed contract") + } - err = a.DeployEthUSDFeed() - require.NoError(t, err, "Error deploying eth usd feed contract") + if a.TestConfig.GetAutomationConfig().UseExistingEthGasFeedContract() { + gasFeedAddress, err := a.TestConfig.GetAutomationConfig().EthGasFeedContractAddress() + require.NoError(t, err, "Error getting gas feed contract address") + err = a.LoadEthGasFeed(gasFeedAddress.String()) + require.NoError(t, err, "Error loading gas feed contract") + } else { + err = a.DeployGasFeed() + require.NoError(t, err, "Error deploying gas feed contract") + } - err = a.DeployLinkUSDFeed() - require.NoError(t, err, "Error deploying link usd feed contract") + if a.TestConfig.GetAutomationConfig().UseExistingEthUSDFeedContract() { + ethUsdFeedAddress, err := a.TestConfig.GetAutomationConfig().EthUSDFeedContractAddress() + require.NoError(t, err, "Error getting eth usd feed contract address") + err = a.LoadEthUSDFeed(ethUsdFeedAddress.String()) + require.NoError(t, err, "Error loading eth usd feed contract") + } else { + err = a.DeployEthUSDFeed() + require.NoError(t, err, "Error deploying eth usd feed contract") + } - err = a.DeployTranscoder() - require.NoError(t, err, "Error deploying transcoder contract") + if a.TestConfig.GetAutomationConfig().UseExistingLinkUSDFeedContract() { + linkUsdFeedAddress, err := a.TestConfig.GetAutomationConfig().LinkUSDFeedContractAddress() + require.NoError(t, err, "Error getting link usd feed contract address") + err = a.LoadLinkUSDFeed(linkUsdFeedAddress.String()) + require.NoError(t, err, "Error loading link usd feed contract") + } else { + err = a.DeployLinkUSDFeed() + require.NoError(t, err, "Error deploying link usd feed contract") + } - err = a.DeployRegistry() - require.NoError(t, err, "Error deploying registry contract") - err = a.DeployRegistrar() - require.NoError(t, err, "Error deploying registrar contract") + if a.TestConfig.GetAutomationConfig().UseExistingTranscoderContract() { + transcoderAddress, err := a.TestConfig.GetAutomationConfig().TranscoderContractAddress() + require.NoError(t, err, "Error getting transcoder contract address") + err = a.LoadTranscoder(transcoderAddress.String()) + require.NoError(t, err, "Error loading transcoder contract") + } else { + err = a.DeployTranscoder() + require.NoError(t, err, "Error deploying transcoder contract") + } + + if a.TestConfig.GetAutomationConfig().UseExistingRegistryContract() { + chainModuleAddress, err := a.TestConfig.GetAutomationConfig().ChainModuleContractAddress() + require.NoError(t, err, "Error getting chain module contract address") + registryAddress, err := a.TestConfig.GetAutomationConfig().RegistryContractAddress() + require.NoError(t, err, "Error getting registry contract address") + err = a.LoadRegistry(registryAddress.String(), chainModuleAddress.String()) + require.NoError(t, err, "Error loading registry contract") + if a.Registry.RegistryOwnerAddress().String() != a.ChainClient.MustGetRootKeyAddress().String() { + l.Debug().Str("RootKeyAddress", a.ChainClient.MustGetRootKeyAddress().String()).Str("Registry Owner Address", a.Registry.RegistryOwnerAddress().String()).Msg("Registry owner address is not the root key address") + t.Error("Registry owner address is not the root key address") + t.FailNow() + } + } else { + err = a.DeployRegistry() + require.NoError(t, err, "Error deploying registry contract") + } + + if a.TestConfig.GetAutomationConfig().UseExistingRegistrarContract() { + registrarAddress, err := a.TestConfig.GetAutomationConfig().RegistrarContractAddress() + require.NoError(t, err, "Error getting registrar contract address") + err = a.LoadRegistrar(registrarAddress.String()) + require.NoError(t, err, "Error loading registrar contract") + } else { + err = a.DeployRegistrar() + require.NoError(t, err, "Error deploying registrar contract") + } if addJobs { a.AddJobsAndSetConfig(t) } } - -func (a *AutomationTest) LoadAutomationDeployment(t *testing.T, linkTokenAddress, - linkEthFeedAddress, linkUsdFeedAddress, EthUsdFeedAddress, gasFeedAddress, transcoderAddress, registryAddress, registrarAddress string) { - l := logging.GetTestLogger(t) - err := a.CollectNodeDetails() - require.NoError(t, err, "Error collecting node details") - l.Info().Msg("Collected Node Details") - l.Debug().Interface("Node Details", a.NodeDetails).Msg("Node Details") - - err = a.LoadLINK(linkTokenAddress) - require.NoError(t, err, "Error loading link token contract") - - err = a.LoadLinkEthFeed(linkEthFeedAddress) - require.NoError(t, err, "Error loading link eth feed contract") - err = a.LoadEthGasFeed(gasFeedAddress) - require.NoError(t, err, "Error loading gas feed contract") - err = a.LoadEthUSDFeed(EthUsdFeedAddress) - require.NoError(t, err, "Error loading eth usd feed contract") - err = a.LoadLinkUSDFeed(linkUsdFeedAddress) - require.NoError(t, err, "Error loading link usd feed contract") - err = a.LoadTranscoder(transcoderAddress) - require.NoError(t, err, "Error loading transcoder contract") - err = a.LoadRegistry(registryAddress) - require.NoError(t, err, "Error loading registry contract") - err = a.LoadRegistrar(registrarAddress) - require.NoError(t, err, "Error loading registrar contract") - - a.AddJobsAndSetConfig(t) -} diff --git a/integration-tests/actions/keeper_helpers.go b/integration-tests/actions/keeper_helpers.go index 80822a95af4..c7694946cf9 100644 --- a/integration-tests/actions/keeper_helpers.go +++ b/integration-tests/actions/keeper_helpers.go @@ -117,7 +117,7 @@ func DeployKeeperContracts( } registrar := DeployKeeperRegistrar(t, client, registryVersion, linkToken, registrarSettings, registry) - upkeeps, upkeepIds := DeployConsumers(t, client, registry, registrar, linkToken, numberOfUpkeeps, linkFundsForEachUpkeep, upkeepGasLimit, false, false, false, nil) + upkeeps, upkeepIds := DeployLegacyConsumers(t, client, registry, registrar, linkToken, numberOfUpkeeps, linkFundsForEachUpkeep, upkeepGasLimit, false, false, false, nil) return registry, registrar, upkeeps, upkeepIds } diff --git a/integration-tests/benchmark/automation_test.go b/integration-tests/benchmark/automation_test.go index 534fca2f513..0a63ff2c27a 100644 --- a/integration-tests/benchmark/automation_test.go +++ b/integration-tests/benchmark/automation_test.go @@ -120,13 +120,15 @@ func TestAutomationBenchmark(t *testing.T) { if err = actions.TeardownRemoteSuite(keeperBenchmarkTest.TearDownVals(t)); err != nil { l.Error().Err(err).Msg("Error when tearing down remote suite") } else { - err := testEnvironment.Client.RemoveNamespace(testEnvironment.Cfg.Namespace) - if err != nil { - l.Error().Err(err).Msg("Error removing namespace") + if *config.GetAutomationConfig().Benchmark.DeleteJobsOnEnd { + err := testEnvironment.Client.RemoveNamespace(testEnvironment.Cfg.Namespace) + if err != nil { + l.Error().Err(err).Msg("Error removing namespace") + } } } }) - keeperBenchmarkTest.Setup(testEnvironment, &config) + keeperBenchmarkTest.Setup(testEnvironment, config) keeperBenchmarkTest.Run() } @@ -314,18 +316,19 @@ func SetupAutomationBenchmarkEnv(t *testing.T, keeperTestConfig types.Automation }, })) } - - // TODO we need to update the image in CTF, the old one is not available anymore - // deploy blockscout if running on simulated - // if testNetwork.Simulated { - // testEnvironment. - // AddChart(blockscout.New(&blockscout.Props{ - // Name: "geth-blockscout", - // WsURL: testNetwork.URLs[0], - // HttpURL: testNetwork.HTTPURLs[0]})) - // } - err := testEnvironment.Run() - require.NoError(t, err, "Error launching test environment") + var err error + if testNetwork.Simulated { + // TODO we need to update the image in CTF, the old one is not available anymore + // deploy blockscout if running on simulated + //testEnvironment. + // AddChart(blockscout.New(&blockscout.Props{ + // Name: "geth-blockscout", + // WsURL: testNetwork.URLs[0], + // HttpURL: testNetwork.HTTPURLs[0]})) + // Need to setup geth node before setting up chainlink nodes + err = testEnvironment.Run() + require.NoError(t, err, "Error launching test environment") + } if testEnvironment.WillUseRemoteRunner() { return testEnvironment, testNetwork diff --git a/integration-tests/chaos/automation_chaos_test.go b/integration-tests/chaos/automation_chaos_test.go index cde5962390a..c0823f482ad 100644 --- a/integration-tests/chaos/automation_chaos_test.go +++ b/integration-tests/chaos/automation_chaos_test.go @@ -240,7 +240,7 @@ func TestAutomationChaos(t *testing.T) { require.NoError(t, err, "Error tearing down environment") }) - a := automationv2.NewAutomationTestK8s(l, chainClient, chainlinkNodes) + a := automationv2.NewAutomationTestK8s(l, chainClient, chainlinkNodes, &config) a.SetMercuryCredentialName("cred1") a.RegistrySettings = actions.ReadRegistryConfig(config) a.RegistrySettings.RegistryVersion = rv @@ -259,11 +259,11 @@ func TestAutomationChaos(t *testing.T) { var consumersLogTrigger, consumersConditional []contracts.KeeperConsumer var upkeepidsConditional, upkeepidsLogTrigger []*big.Int - consumersConditional, upkeepidsConditional = actions.DeployConsumers(t, a.ChainClient, a.Registry, a.Registrar, a.LinkToken, numberOfUpkeeps, big.NewInt(defaultLinkFunds), defaultUpkeepGasLimit, false, false, false, nil) + consumersConditional, upkeepidsConditional = actions.DeployConsumers(t, a.ChainClient, a.Registry, a.Registrar, a.LinkToken, numberOfUpkeeps, big.NewInt(defaultLinkFunds), defaultUpkeepGasLimit, false, false, false, nil, &config) consumers := consumersConditional upkeepIDs := upkeepidsConditional if rv >= eth_contracts.RegistryVersion_2_1 { - consumersLogTrigger, upkeepidsLogTrigger = actions.DeployConsumers(t, a.ChainClient, a.Registry, a.Registrar, a.LinkToken, numberOfUpkeeps, big.NewInt(defaultLinkFunds), defaultUpkeepGasLimit, true, false, false, nil) + consumersLogTrigger, upkeepidsLogTrigger = actions.DeployConsumers(t, a.ChainClient, a.Registry, a.Registrar, a.LinkToken, numberOfUpkeeps, big.NewInt(defaultLinkFunds), defaultUpkeepGasLimit, true, false, false, nil, &config) consumers = append(consumersConditional, consumersLogTrigger...) upkeepIDs = append(upkeepidsConditional, upkeepidsLogTrigger...) diff --git a/integration-tests/contracts/ethereum_contracts_automation.go b/integration-tests/contracts/ethereum_contracts_automation.go index d9d4a730c3c..5a3e405d92f 100644 --- a/integration-tests/contracts/ethereum_contracts_automation.go +++ b/integration-tests/contracts/ethereum_contracts_automation.go @@ -194,10 +194,11 @@ func (v *EthereumKeeperRegistry) RegistryOwnerAddress() common.Address { func (v *EthereumKeeperRegistry) SetConfigTypeSafe(ocrConfig OCRv2Config) error { txOpts := v.client.NewTXOpts() var err error + var decodedTx *seth.DecodedTransaction switch v.version { case ethereum.RegistryVersion_2_1: - _, err = v.client.Decode(v.registry2_1.SetConfigTypeSafe(txOpts, + decodedTx, err = v.client.Decode(v.registry2_1.SetConfigTypeSafe(txOpts, ocrConfig.Signers, ocrConfig.Transmitters, ocrConfig.F, @@ -206,7 +207,7 @@ func (v *EthereumKeeperRegistry) SetConfigTypeSafe(ocrConfig OCRv2Config) error ocrConfig.OffchainConfig, )) case ethereum.RegistryVersion_2_2: - _, err = v.client.Decode(v.registry2_2.SetConfigTypeSafe(txOpts, + decodedTx, err = v.client.Decode(v.registry2_2.SetConfigTypeSafe(txOpts, ocrConfig.Signers, ocrConfig.Transmitters, ocrConfig.F, @@ -215,7 +216,7 @@ func (v *EthereumKeeperRegistry) SetConfigTypeSafe(ocrConfig OCRv2Config) error ocrConfig.OffchainConfig, )) case ethereum.RegistryVersion_2_3: - _, err = v.client.Decode(v.registry2_3.SetConfigTypeSafe(txOpts, + decodedTx, err = v.client.Decode(v.registry2_3.SetConfigTypeSafe(txOpts, ocrConfig.Signers, ocrConfig.Transmitters, ocrConfig.F, @@ -228,7 +229,7 @@ func (v *EthereumKeeperRegistry) SetConfigTypeSafe(ocrConfig OCRv2Config) error default: return fmt.Errorf("SetConfigTypeSafe is not supported in keeper registry version %d", v.version) } - + v.l.Debug().Interface("decodedTx", decodedTx).Msg("SetConfigTypeSafe") return err } @@ -1528,7 +1529,7 @@ func deployRegistry23(client *seth.Client, opts *KeeperRegistryOpts) (KeeperRegi } // LoadKeeperRegistry returns deployed on given address EthereumKeeperRegistry -func LoadKeeperRegistry(l zerolog.Logger, client *seth.Client, address common.Address, registryVersion eth_contracts.KeeperRegistryVersion) (KeeperRegistry, error) { +func LoadKeeperRegistry(l zerolog.Logger, client *seth.Client, address common.Address, registryVersion eth_contracts.KeeperRegistryVersion, chainModuleAddress common.Address) (KeeperRegistry, error) { var keeper *EthereumKeeperRegistry var err error switch registryVersion { @@ -1545,7 +1546,7 @@ func LoadKeeperRegistry(l zerolog.Logger, client *seth.Client, address common.Ad case eth_contracts.RegistryVersion_2_2: // why the contract name is not the same as the actual contract name? keeper, err = loadRegistry2_2(client, address) case eth_contracts.RegistryVersion_2_3: - keeper, err = loadRegistry2_3(client, address) + keeper, err = loadRegistry2_3(client, address, chainModuleAddress) default: return nil, fmt.Errorf("keeper registry version %d is not supported", registryVersion) } @@ -1685,24 +1686,24 @@ func loadRegistry2_2(client *seth.Client, address common.Address) (*EthereumKeep }, nil } -func loadRegistry2_3(client *seth.Client, address common.Address) (*EthereumKeeperRegistry, error) { - abi, err := iregistry23.IAutomationRegistryMaster23MetaData.GetAbi() +func loadRegistry2_3(client *seth.Client, address, chainModuleAddress common.Address) (*EthereumKeeperRegistry, error) { + + loader := seth.NewContractLoader[iregistry23.IAutomationRegistryMaster23](client) + instance, err := loader.LoadContract("AutomationRegistry2_3", address, iregistry23.IAutomationRegistryMaster23MetaData.GetAbi, iregistry23.NewIAutomationRegistryMaster23) if err != nil { - return &EthereumKeeperRegistry{}, fmt.Errorf("failed to get AutomationRegistry2_3 ABI: %w", err) + return &EthereumKeeperRegistry{}, fmt.Errorf("failed to load AutomationRegistry2_3 instance: %w", err) } - client.ContractStore.AddABI("AutomationRegistry2_3", *abi) - client.ContractStore.AddBIN("AutomationRegistry2_3", common.FromHex(iregistry23.IAutomationRegistryMaster23MetaData.Bin)) - - instance, err := iregistry23.NewIAutomationRegistryMaster23(address, wrappers.MustNewWrappedContractBackend(nil, client)) + chainModule, err := loadChainModule(client, chainModuleAddress) if err != nil { - return &EthereumKeeperRegistry{}, fmt.Errorf("failed to instantiate AutomationRegistry2_3 instance: %w", err) + return &EthereumKeeperRegistry{}, fmt.Errorf("failed to load chain module: %w", err) } return &EthereumKeeperRegistry{ address: &address, client: client, registry2_3: instance, + chainModule: chainModule, }, nil } @@ -1758,6 +1759,26 @@ func deployOptimismModule(client *seth.Client) (common.Address, error) { return data.Address, nil } +func loadChainModule(client *seth.Client, address common.Address) (*i_chain_module.IChainModule, error) { + abi, err := i_chain_module.IChainModuleMetaData.GetAbi() + if err != nil { + return &i_chain_module.IChainModule{}, fmt.Errorf("failed to get IChainModule ABI: %w", err) + } + + client.ContractStore.AddABI("IChainModule", *abi) + client.ContractStore.AddBIN("IChainModule", common.FromHex(i_chain_module.IChainModuleMetaData.Bin)) + + chainModule, err := i_chain_module.NewIChainModule( + address, + wrappers.MustNewWrappedContractBackend(nil, client), + ) + if err != nil { + return &i_chain_module.IChainModule{}, fmt.Errorf("failed to instantiate IChainModule instance: %w", err) + } + + return chainModule, nil +} + func deployBaseModule(client *seth.Client) (common.Address, error) { abi, err := chain_module_base.ChainModuleBaseMetaData.GetAbi() if err != nil { @@ -2193,17 +2214,10 @@ func LoadKeeperRegistrar(client *seth.Client, address common.Address, registryVe if registryVersion == eth_contracts.RegistryVersion_1_1 || registryVersion == eth_contracts.RegistryVersion_1_2 || registryVersion == eth_contracts.RegistryVersion_1_3 { - abi, err := keeper_registrar_wrapper1_2.KeeperRegistrarMetaData.GetAbi() - if err != nil { - return &EthereumKeeperRegistrar{}, fmt.Errorf("failed to get KeeperRegistrar1_2 ABI: %w", err) - } - - client.ContractStore.AddABI("KeeperRegistrar1_2", *abi) - client.ContractStore.AddBIN("KeeperRegistrar1_2", common.FromHex(keeper_registrar_wrapper1_2.KeeperRegistrarMetaData.Bin)) - - instance, err := keeper_registrar_wrapper1_2.NewKeeperRegistrar(address, wrappers.MustNewWrappedContractBackend(nil, client)) + loader := seth.NewContractLoader[keeper_registrar_wrapper1_2.KeeperRegistrar](client) + instance, err := loader.LoadContract("KeeperRegistrar1_2", address, keeper_registrar_wrapper1_2.KeeperRegistrarMetaData.GetAbi, keeper_registrar_wrapper1_2.NewKeeperRegistrar) if err != nil { - return &EthereumKeeperRegistrar{}, fmt.Errorf("failed to instantiate KeeperRegistrar1_2 instance: %w", err) + return &EthereumKeeperRegistrar{}, fmt.Errorf("failed to load KeeperRegistrar1_2 instance: %w", err) } return &EthereumKeeperRegistrar{ @@ -2212,44 +2226,43 @@ func LoadKeeperRegistrar(client *seth.Client, address common.Address, registryVe registrar: instance, }, err } else if registryVersion == eth_contracts.RegistryVersion_2_0 { - abi, err := keeper_registrar_wrapper2_0.KeeperRegistrarMetaData.GetAbi() + loader := seth.NewContractLoader[keeper_registrar_wrapper2_0.KeeperRegistrar](client) + instance, err := loader.LoadContract("KeeperRegistrar2_0", address, keeper_registrar_wrapper2_0.KeeperRegistrarMetaData.GetAbi, keeper_registrar_wrapper2_0.NewKeeperRegistrar) if err != nil { - return &EthereumKeeperRegistrar{}, fmt.Errorf("failed to get KeeperRegistrar2_0 ABI: %w", err) + return &EthereumKeeperRegistrar{}, fmt.Errorf("failed to load KeeperRegistrar2_0 instance: %w", err) } - client.ContractStore.AddABI("KeeperRegistrar2_0", *abi) - client.ContractStore.AddBIN("KeeperRegistrar2_0", common.FromHex(keeper_registrar_wrapper2_0.KeeperRegistrarMetaData.Bin)) - - instance, err := keeper_registrar_wrapper2_0.NewKeeperRegistrar(address, wrappers.MustNewWrappedContractBackend(nil, client)) + return &EthereumKeeperRegistrar{ + address: &address, + client: client, + registrar20: instance, + }, nil + } else if registryVersion == eth_contracts.RegistryVersion_2_1 || registryVersion == eth_contracts.RegistryVersion_2_2 { + loader := seth.NewContractLoader[registrar21.AutomationRegistrar](client) + instance, err := loader.LoadContract("KeeperRegistrar2_1", address, registrar21.AutomationRegistrarMetaData.GetAbi, registrar21.NewAutomationRegistrar) if err != nil { - return &EthereumKeeperRegistrar{}, fmt.Errorf("failed to instantiate KeeperRegistrar2_0 instance: %w", err) + return &EthereumKeeperRegistrar{}, fmt.Errorf("failed to load KeeperRegistrar2_1 instance: %w", err) } return &EthereumKeeperRegistrar{ address: &address, client: client, - registrar20: instance, + registrar21: instance, }, nil - } - - abi, err := registrar21.AutomationRegistrarMetaData.GetAbi() - if err != nil { - return &EthereumKeeperRegistrar{}, fmt.Errorf("failed to get KeeperRegistrar2_1 ABI: %w", err) - } - - client.ContractStore.AddABI("KeeperRegistrar2_1", *abi) - client.ContractStore.AddBIN("KeeperRegistrar2_1", common.FromHex(registrar21.AutomationRegistrarMetaData.Bin)) + } else if registryVersion == eth_contracts.RegistryVersion_2_3 { + loader := seth.NewContractLoader[registrar23.AutomationRegistrar](client) + instance, err := loader.LoadContract("KeeperRegistrar2_3", address, registrar23.AutomationRegistrarMetaData.GetAbi, registrar23.NewAutomationRegistrar) + if err != nil { + return &EthereumKeeperRegistrar{}, fmt.Errorf("failed to load KeeperRegistrar2_3 instance: %w", err) + } - instance, err := registrar21.NewAutomationRegistrar(address, wrappers.MustNewWrappedContractBackend(nil, client)) - if err != nil { - return &EthereumKeeperRegistrar{}, fmt.Errorf("failed to instantiate KeeperRegistrar2_1 instance: %w", err) + return &EthereumKeeperRegistrar{ + address: &address, + client: client, + registrar23: instance, + }, nil } - - return &EthereumKeeperRegistrar{ - address: &address, - client: client, - registrar21: instance, - }, nil + return &EthereumKeeperRegistrar{}, fmt.Errorf("unsupported registry version: %v", registryVersion) } type EthereumAutomationLogTriggeredStreamsLookupUpkeepConsumer struct { @@ -2702,21 +2715,42 @@ func (v *EthereumAutomationConsumerBenchmark) GetUpkeepCount(ctx context.Context }, id) } -// DeployKeeperConsumerBenchmark deploys a keeper consumer benchmark contract with a standard contract backend -func DeployKeeperConsumerBenchmark(client *seth.Client) (AutomationConsumerBenchmark, error) { - return deployKeeperConsumerBenchmarkWithWrapperFn(client, func(client *seth.Client) *wrappers.WrappedContractBackend { +// DeployAutomationConsumerBenchmark deploys a keeper consumer benchmark contract with a standard contract backend +func DeployAutomationConsumerBenchmark(client *seth.Client) (AutomationConsumerBenchmark, error) { + return deployAutomationConsumerBenchmarkWithWrapperFn(client, func(client *seth.Client) *wrappers.WrappedContractBackend { return wrappers.MustNewWrappedContractBackend(nil, client) }) } -// DeployKeeperConsumerBenchmarkWithRetry deploys a keeper consumer benchmark contract with a read-only operations retrying contract backend -func DeployKeeperConsumerBenchmarkWithRetry(client *seth.Client, logger zerolog.Logger, maxAttempts uint, retryDelay time.Duration) (AutomationConsumerBenchmark, error) { - return deployKeeperConsumerBenchmarkWithWrapperFn(client, func(client *seth.Client) *wrappers.WrappedContractBackend { +func LoadAutomationConsumerBenchmark(client *seth.Client, address common.Address) (*EthereumAutomationConsumerBenchmark, error) { + abi, err := automation_consumer_benchmark.AutomationConsumerBenchmarkMetaData.GetAbi() + if err != nil { + return &EthereumAutomationConsumerBenchmark{}, fmt.Errorf("failed to get AutomationConsumerBenchmark token ABI: %w", err) + } + + client.ContractStore.AddABI("AutomationConsumerBenchmark", *abi) + client.ContractStore.AddBIN("AutomationConsumerBenchmark", common.FromHex(automation_consumer_benchmark.AutomationConsumerBenchmarkMetaData.Bin)) + + consumer, err := automation_consumer_benchmark.NewAutomationConsumerBenchmark(address, wrappers.MustNewWrappedContractBackend(nil, client)) + if err != nil { + return &EthereumAutomationConsumerBenchmark{}, fmt.Errorf("failed to instantiate EthereumAutomationConsumerBenchmark instance: %w", err) + } + + return &EthereumAutomationConsumerBenchmark{ + client: client, + consumer: consumer, + address: &address, + }, nil +} + +// DeployAutomationConsumerBenchmarkWithRetry deploys a keeper consumer benchmark contract with a read-only operations retrying contract backend +func DeployAutomationConsumerBenchmarkWithRetry(client *seth.Client, logger zerolog.Logger, maxAttempts uint, retryDelay time.Duration) (AutomationConsumerBenchmark, error) { + return deployAutomationConsumerBenchmarkWithWrapperFn(client, func(client *seth.Client) *wrappers.WrappedContractBackend { return wrappers.MustNewRetryingWrappedContractBackend(client, logger, maxAttempts, retryDelay) }) } -func deployKeeperConsumerBenchmarkWithWrapperFn(client *seth.Client, wrapperConstrFn func(client *seth.Client) *wrappers.WrappedContractBackend) (AutomationConsumerBenchmark, error) { +func deployAutomationConsumerBenchmarkWithWrapperFn(client *seth.Client, wrapperConstrFn func(client *seth.Client) *wrappers.WrappedContractBackend) (AutomationConsumerBenchmark, error) { abi, err := automation_consumer_benchmark.AutomationConsumerBenchmarkMetaData.GetAbi() if err != nil { return &EthereumAutomationConsumerBenchmark{}, fmt.Errorf("failed to get AutomationConsumerBenchmark ABI: %w", err) @@ -2738,8 +2772,8 @@ func deployKeeperConsumerBenchmarkWithWrapperFn(client *seth.Client, wrapperCons }, nil } -// KeeperConsumerBenchmarkUpkeepObserver is a header subscription that awaits for a round of upkeeps -type KeeperConsumerBenchmarkUpkeepObserver struct { +// AutomationConsumerBenchmarkUpkeepObserver is a header subscription that awaits for a round of upkeeps +type AutomationConsumerBenchmarkUpkeepObserver struct { instance AutomationConsumerBenchmark registry KeeperRegistry upkeepID *big.Int @@ -2763,9 +2797,9 @@ type KeeperConsumerBenchmarkUpkeepObserver struct { l zerolog.Logger } -// NewKeeperConsumerBenchmarkUpkeepObserver provides a new instance of a NewKeeperConsumerBenchmarkUpkeepObserver +// NewAutomationConsumerBenchmarkUpkeepObserver provides a new instance of a NewAutomationConsumerBenchmarkUpkeepObserver // Used to track and log benchmark test results for keepers -func NewKeeperConsumerBenchmarkUpkeepObserver( +func NewAutomationConsumerBenchmarkUpkeepObserver( contract AutomationConsumerBenchmark, registry KeeperRegistry, upkeepID *big.Int, @@ -2775,8 +2809,8 @@ func NewKeeperConsumerBenchmarkUpkeepObserver( upkeepIndex int64, firstEligibleBuffer int64, logger zerolog.Logger, -) *KeeperConsumerBenchmarkUpkeepObserver { - return &KeeperConsumerBenchmarkUpkeepObserver{ +) *AutomationConsumerBenchmarkUpkeepObserver { + return &AutomationConsumerBenchmarkUpkeepObserver{ instance: contract, registry: registry, upkeepID: upkeepID, @@ -2798,7 +2832,7 @@ func NewKeeperConsumerBenchmarkUpkeepObserver( // ReceiveHeader will query the latest Keeper round and check to see whether upkeep was performed, it returns // true when observation has finished. -func (o *KeeperConsumerBenchmarkUpkeepObserver) ReceiveHeader(receivedHeader *blockchain.SafeEVMHeader) (bool, error) { +func (o *AutomationConsumerBenchmarkUpkeepObserver) ReceiveHeader(receivedHeader *blockchain.SafeEVMHeader) (bool, error) { if receivedHeader.Number.Uint64() <= o.lastBlockNum { // Uncle / reorg we won't count return false, nil } @@ -2901,12 +2935,12 @@ func (o *KeeperConsumerBenchmarkUpkeepObserver) ReceiveHeader(receivedHeader *bl } // Complete returns whether watching for upkeeps has completed -func (o *KeeperConsumerBenchmarkUpkeepObserver) Complete() bool { +func (o *AutomationConsumerBenchmarkUpkeepObserver) Complete() bool { return o.complete } // LogDetails logs the results of the benchmark test to testreporter -func (o *KeeperConsumerBenchmarkUpkeepObserver) LogDetails() { +func (o *AutomationConsumerBenchmarkUpkeepObserver) LogDetails() { report := testreporters.KeeperBenchmarkTestReport{ ContractAddress: o.instance.Address(), TotalEligibleCount: o.countEligible, diff --git a/integration-tests/load/automationv2_1/automationv2_1_test.go b/integration-tests/load/automationv2_1/automationv2_1_test.go index 6c49d1be44b..3cd931ecef0 100644 --- a/integration-tests/load/automationv2_1/automationv2_1_test.go +++ b/integration-tests/load/automationv2_1/automationv2_1_test.go @@ -311,7 +311,7 @@ Load Config: multicallAddress, err := contracts.DeployMultiCallContract(chainClient) require.NoError(t, err, "Error deploying multicall contract") - a := automationv2.NewAutomationTestK8s(l, chainClient, chainlinkNodes) + a := automationv2.NewAutomationTestK8s(l, chainClient, chainlinkNodes, &loadedTestConfig) a.RegistrySettings = actions.ReadRegistryConfig(loadedTestConfig) a.RegistrySettings.RegistryVersion = registryVersion a.PluginConfig = actions.ReadPluginConfig(loadedTestConfig) diff --git a/integration-tests/reorg/automation_reorg_test.go b/integration-tests/reorg/automation_reorg_test.go index 85ca4b4264a..fde37d9f06f 100644 --- a/integration-tests/reorg/automation_reorg_test.go +++ b/integration-tests/reorg/automation_reorg_test.go @@ -132,7 +132,7 @@ func TestAutomationReorg(t *testing.T) { gethRPCClient := ctfClient.NewRPCClient(evmNetwork.HTTPURLs[0], nil) - a := automationv2.NewAutomationTestDocker(l, sethClient, nodeClients) + a := automationv2.NewAutomationTestDocker(l, sethClient, nodeClients, &config) a.SetMercuryCredentialName("cred1") a.RegistrySettings = actions.ReadRegistryConfig(config) a.RegistrySettings.RegistryVersion = registryVersion @@ -169,6 +169,7 @@ func TestAutomationReorg(t *testing.T) { false, false, a.WETHToken, + &config, ) if isLogTrigger { diff --git a/integration-tests/smoke/automation_test.go b/integration-tests/smoke/automation_test.go index f5a329a1a29..2e56237e9ca 100644 --- a/integration-tests/smoke/automation_test.go +++ b/integration-tests/smoke/automation_test.go @@ -20,7 +20,7 @@ import ( "github.com/onsi/gomega" "github.com/stretchr/testify/require" - ctfTestEnv "github.com/smartcontractkit/chainlink-testing-framework/lib/docker/test_env" + ctftestenv "github.com/smartcontractkit/chainlink-testing-framework/lib/docker/test_env" "github.com/smartcontractkit/chainlink-testing-framework/lib/logging" "github.com/smartcontractkit/chainlink-testing-framework/lib/utils/testcontext" @@ -142,6 +142,7 @@ func SetupAutomationBasic(t *testing.T, nodeUpgrade bool) { isMercury, isBillingTokenNative, a.WETHToken, + &cfg, ) // Do it in two separate loops, so we don't end up setting up one upkeep, but starting the consumer for another one @@ -268,20 +269,7 @@ func TestSetUpkeepTriggerConfig(t *testing.T) { sb, err := a.ChainClient.Client.BlockNumber(context.Background()) require.NoError(t, err, "Failed to get start block") - consumers, upkeepIDs := actions.DeployConsumers( - t, - a.ChainClient, - a.Registry, - a.Registrar, - a.LinkToken, - defaultAmountOfUpkeeps, - big.NewInt(automationDefaultLinkFunds), - automationDefaultUpkeepGasLimit, - true, - false, - false, - nil, - ) + consumers, upkeepIDs := actions.DeployConsumers(t, a.ChainClient, a.Registry, a.Registrar, a.LinkToken, defaultAmountOfUpkeeps, big.NewInt(automationDefaultLinkFunds), automationDefaultUpkeepGasLimit, true, false, false, nil, &config) // Start log trigger based upkeeps for all consumers for i := 0; i < len(consumers); i++ { @@ -451,20 +439,7 @@ func TestAutomationAddFunds(t *testing.T) { sb, err := a.ChainClient.Client.BlockNumber(context.Background()) require.NoError(t, err, "Failed to get start block") - consumers, upkeepIDs := actions.DeployConsumers( - t, - a.ChainClient, - a.Registry, - a.Registrar, - a.LinkToken, - defaultAmountOfUpkeeps, - big.NewInt(1), - automationDefaultUpkeepGasLimit, - false, - false, - false, - nil, - ) + consumers, upkeepIDs := actions.DeployConsumers(t, a.ChainClient, a.Registry, a.Registrar, a.LinkToken, defaultAmountOfUpkeeps, big.NewInt(1), automationDefaultUpkeepGasLimit, false, false, false, nil, &config) t.Cleanup(func() { actions.GetStalenessReportCleanupFn(t, a.Logger, a.ChainClient, sb, a.Registry, registryVersion)() @@ -531,20 +506,7 @@ func TestAutomationPauseUnPause(t *testing.T) { sb, err := a.ChainClient.Client.BlockNumber(context.Background()) require.NoError(t, err, "Failed to get start block") - consumers, upkeepIDs := actions.DeployConsumers( - t, - a.ChainClient, - a.Registry, - a.Registrar, - a.LinkToken, - defaultAmountOfUpkeeps, - big.NewInt(automationDefaultLinkFunds), - automationDefaultUpkeepGasLimit, - false, - false, - false, - nil, - ) + consumers, upkeepIDs := actions.DeployConsumers(t, a.ChainClient, a.Registry, a.Registrar, a.LinkToken, defaultAmountOfUpkeeps, big.NewInt(automationDefaultLinkFunds), automationDefaultUpkeepGasLimit, false, false, false, nil, &config) t.Cleanup(func() { actions.GetStalenessReportCleanupFn(t, a.Logger, a.ChainClient, sb, a.Registry, registryVersion)() @@ -632,20 +594,7 @@ func TestAutomationRegisterUpkeep(t *testing.T) { sb, err := a.ChainClient.Client.BlockNumber(context.Background()) require.NoError(t, err, "Failed to get start block") - consumers, upkeepIDs := actions.DeployConsumers( - t, - a.ChainClient, - a.Registry, - a.Registrar, - a.LinkToken, - defaultAmountOfUpkeeps, - big.NewInt(automationDefaultLinkFunds), - automationDefaultUpkeepGasLimit, - false, - false, - false, - nil, - ) + consumers, upkeepIDs := actions.DeployConsumers(t, a.ChainClient, a.Registry, a.Registrar, a.LinkToken, defaultAmountOfUpkeeps, big.NewInt(automationDefaultLinkFunds), automationDefaultUpkeepGasLimit, false, false, false, nil, &config) t.Cleanup(func() { actions.GetStalenessReportCleanupFn(t, a.Logger, a.ChainClient, sb, a.Registry, registryVersion)() @@ -728,20 +677,7 @@ func TestAutomationPauseRegistry(t *testing.T) { sb, err := a.ChainClient.Client.BlockNumber(context.Background()) require.NoError(t, err, "Failed to get start block") - consumers, upkeepIDs := actions.DeployConsumers( - t, - a.ChainClient, - a.Registry, - a.Registrar, - a.LinkToken, - defaultAmountOfUpkeeps, - big.NewInt(automationDefaultLinkFunds), - automationDefaultUpkeepGasLimit, - false, - false, - false, - nil, - ) + consumers, upkeepIDs := actions.DeployConsumers(t, a.ChainClient, a.Registry, a.Registrar, a.LinkToken, defaultAmountOfUpkeeps, big.NewInt(automationDefaultLinkFunds), automationDefaultUpkeepGasLimit, false, false, false, nil, &config) t.Cleanup(func() { actions.GetStalenessReportCleanupFn(t, a.Logger, a.ChainClient, sb, a.Registry, registryVersion)() @@ -808,20 +744,7 @@ func TestAutomationKeeperNodesDown(t *testing.T) { sb, err := a.ChainClient.Client.BlockNumber(context.Background()) require.NoError(t, err, "Failed to get start block") - consumers, upkeepIDs := actions.DeployConsumers( - t, - a.ChainClient, - a.Registry, - a.Registrar, - a.LinkToken, - defaultAmountOfUpkeeps, - big.NewInt(automationDefaultLinkFunds), - automationDefaultUpkeepGasLimit, - false, - false, - false, - nil, - ) + consumers, upkeepIDs := actions.DeployConsumers(t, a.ChainClient, a.Registry, a.Registrar, a.LinkToken, defaultAmountOfUpkeeps, big.NewInt(automationDefaultLinkFunds), automationDefaultUpkeepGasLimit, false, false, false, nil, &config) t.Cleanup(func() { actions.GetStalenessReportCleanupFn(t, a.Logger, a.ChainClient, sb, a.Registry, registryVersion)() @@ -929,6 +852,7 @@ func TestAutomationPerformSimulation(t *testing.T) { 5, // Interval of blocks that upkeeps are expected to be performed 100000, // How much gas should be burned on checkUpkeep() calls 4000000, // How much gas should be burned on performUpkeep() calls. Initially set higher than defaultUpkeepGasLimit + &config, ) t.Cleanup(func() { @@ -1000,6 +924,7 @@ func TestAutomationCheckPerformGasLimit(t *testing.T) { 5, // Interval of blocks that upkeeps are expected to be performed 100000, // How much gas should be burned on checkUpkeep() calls 4000000, // How much gas should be burned on performUpkeep() calls. Initially set higher than defaultUpkeepGasLimit + &config, ) t.Cleanup(func() { @@ -1152,6 +1077,7 @@ func TestUpdateCheckData(t *testing.T) { big.NewInt(automationDefaultLinkFunds), automationDefaultUpkeepGasLimit, []byte(automationExpectedData), + &config, ) t.Cleanup(func() { @@ -1223,20 +1149,7 @@ func TestSetOffchainConfigWithMaxGasPrice(t *testing.T) { sb, err := a.ChainClient.Client.BlockNumber(context.Background()) require.NoError(t, err, "Failed to get start block") - consumers, upkeepIDs := actions.DeployConsumers( - t, - a.ChainClient, - a.Registry, - a.Registrar, - a.LinkToken, - defaultAmountOfUpkeeps, - big.NewInt(automationDefaultLinkFunds), - automationDefaultUpkeepGasLimit, - false, - false, - false, - nil, - ) + consumers, upkeepIDs := actions.DeployConsumers(t, a.ChainClient, a.Registry, a.Registrar, a.LinkToken, defaultAmountOfUpkeeps, big.NewInt(automationDefaultLinkFunds), automationDefaultUpkeepGasLimit, false, false, false, nil, &config) t.Cleanup(func() { actions.GetStalenessReportCleanupFn(t, a.Logger, a.ChainClient, sb, a.Registry, registryVersion)() @@ -1435,7 +1348,7 @@ func setupAutomationTestDocker( _ = actions.ReturnFundsFromNodes(l, sethClient, contracts.ChainlinkClientToChainlinkNodeWithKeysAndAddress(env.ClCluster.NodeAPIs())) }) - a := automationv2.NewAutomationTestDocker(l, sethClient, nodeClients) + a := automationv2.NewAutomationTestDocker(l, sethClient, nodeClients, automationTestConfig) a.SetMercuryCredentialName("cred1") a.RegistrySettings = registryConfig a.RegistrarSettings = contracts.KeeperRegistrarSettings{ @@ -1450,16 +1363,16 @@ func setupAutomationTestDocker( a.SetDockerEnv(env) if isMercuryV02 || isMercuryV03 { - var imposters []ctfTestEnv.KillgraveImposter - mercuryv03Mock200 := ctfTestEnv.KillgraveImposter{ - Request: ctfTestEnv.KillgraveRequest{ + var imposters []ctftestenv.KillgraveImposter + mercuryv03Mock200 := ctftestenv.KillgraveImposter{ + Request: ctftestenv.KillgraveRequest{ Method: http.MethodGet, Endpoint: "/api/v1/reports/bulk", SchemaFile: nil, Params: &map[string]string{"feedIDs": "0x00028c915d6af0fd66bba2d0fc9405226bca8d6806333121a7d9832103d1563c", "timestamp": "{[\\d+]}"}, Headers: nil, }, - Response: ctfTestEnv.KillgraveResponse{ + Response: ctftestenv.KillgraveResponse{ Status: 200, Body: `{"reports":[{"feedID":"0x00028c915d6af0fd66bba2d0fc9405226bca8d6806333121a7d9832103d1563c","validFromTimestamp":0,"observationsTimestamp":0,"fullReport":"0x00066dfcd1ed2d95b18c948dbc5bd64c687afe93e4ca7d663ddec14c20090ad80000000000000000000000000000000000000000000000000000000000081401000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000e000000000000000000000000000000000000000000000000000000000000002200000000000000000000000000000000000000000000000000000000000000280000100000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000001204554482d5553442d415242495452554d2d544553544e455400000000000000000000000000000000000000000000000000000000000000000000000064891c98000000000000000000000000000000000000000000000000000000289ad8d367000000000000000000000000000000000000000000000000000000289acf0b38000000000000000000000000000000000000000000000000000000289b3da40000000000000000000000000000000000000000000000000000000000018ae7ce74d9fa252a8983976eab600dc7590c778d04813430841bc6e765c34cd81a168d00000000000000000000000000000000000000000000000000000000018ae7cb0000000000000000000000000000000000000000000000000000000064891c98000000000000000000000000000000000000000000000000000000000000000260412b94e525ca6cedc9f544fd86f77606d52fe731a5d069dbe836a8bfc0fb8c911963b0ae7a14971f3b4621bffb802ef0605392b9a6c89c7fab1df8633a5ade00000000000000000000000000000000000000000000000000000000000000024500c2f521f83fba5efc2bf3effaaedde43d0a4adff785c1213b712a3aed0d8157642a84324db0cf9695ebd27708d4608eb0337e0dd87b0e43f0fa70c700d911"}]}`, BodyFile: nil, @@ -1468,15 +1381,15 @@ func setupAutomationTestDocker( }, } - mercuryv02Mock200 := ctfTestEnv.KillgraveImposter{ - Request: ctfTestEnv.KillgraveRequest{ + mercuryv02Mock200 := ctftestenv.KillgraveImposter{ + Request: ctftestenv.KillgraveRequest{ Method: http.MethodGet, Endpoint: "/client", SchemaFile: nil, Params: &map[string]string{"feedIdHex": "{0x00028c915d6af0fd66bba2d0fc9405226bca8d6806333121a7d9832103d1563c|0x4554482d5553442d415242495452554d2d544553544e45540000000000000000}", "blockNumber": "{[\\d+]}"}, Headers: nil, }, - Response: ctfTestEnv.KillgraveResponse{ + Response: ctftestenv.KillgraveResponse{ Status: 200, Body: `{"chainlinkBlob":"0x0001c38d71fed6c320b90e84b6f559459814d068e2a1700adc931ca9717d4fe70000000000000000000000000000000000000000000000000000000001a80b52b4bf1233f9cb71144a253a1791b202113c4ab4a92fa1b176d684b4959666ff8200000000000000000000000000000000000000000000000000000000000000e000000000000000000000000000000000000000000000000000000000000002000000000000000000000000000000000000000000000000000000000000000260000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000001004254432d5553442d415242495452554d2d544553544e4554000000000000000000000000000000000000000000000000000000000000000000000000645570be000000000000000000000000000000000000000000000000000002af2b818dc5000000000000000000000000000000000000000000000000000002af2426faf3000000000000000000000000000000000000000000000000000002af32dc209700000000000000000000000000000000000000000000000000000000012130f8df0a9745bb6ad5e2df605e158ba8ad8a33ef8a0acf9851f0f01668a3a3f2b68600000000000000000000000000000000000000000000000000000000012130f60000000000000000000000000000000000000000000000000000000000000002c4a7958dce105089cf5edb68dad7dcfe8618d7784eb397f97d5a5fade78c11a58275aebda478968e545f7e3657aba9dcbe8d44605e4c6fde3e24edd5e22c94270000000000000000000000000000000000000000000000000000000000000002459c12d33986018a8959566d145225f0c4a4e61a9a3f50361ccff397899314f0018162cf10cd89897635a0bb62a822355bd199d09f4abe76e4d05261bb44733d"}`, BodyFile: nil, diff --git a/integration-tests/smoke/log_poller_test.go b/integration-tests/smoke/log_poller_test.go index c7f5483ac15..b5891e7a3e8 100644 --- a/integration-tests/smoke/log_poller_test.go +++ b/integration-tests/smoke/log_poller_test.go @@ -306,20 +306,7 @@ func prepareEnvironment(l zerolog.Logger, t *testing.T, testConfig *tc.TestConfi logScannerSettings, ) - _, upkeepIDs := actions.DeployConsumers( - t, - chainClient, - registry, - registrar, - linkToken, - upKeepsNeeded, - big.NewInt(int64(9e18)), - uint32(2500000), - true, - false, - false, - nil, - ) + _, upkeepIDs := actions.DeployLegacyConsumers(t, chainClient, registry, registrar, linkToken, upKeepsNeeded, big.NewInt(int64(9e18)), uint32(2500000), true, false, false, nil) err = logpoller.AssertUpkeepIdsUniqueness(upkeepIDs) require.NoError(t, err, "Error asserting upkeep ids uniqueness") diff --git a/integration-tests/testconfig/automation/config.go b/integration-tests/testconfig/automation/config.go index 2dc68ebf8f8..e462ff15c17 100644 --- a/integration-tests/testconfig/automation/config.go +++ b/integration-tests/testconfig/automation/config.go @@ -2,9 +2,12 @@ package automation import ( "errors" + "fmt" "math/big" "time" + "github.com/ethereum/go-ethereum/common" + "github.com/smartcontractkit/chainlink-testing-framework/lib/blockchain" ) @@ -15,6 +18,7 @@ type Config struct { AutomationConfig *AutomationConfig `toml:"AutomationConfig"` Resiliency *ResiliencyConfig `toml:"Resiliency"` Benchmark *Benchmark `toml:"Benchmark"` + Contracts *Contracts `toml:"Contracts"` } func (c *Config) Validate() error { @@ -417,3 +421,429 @@ func (c *ResiliencyConfig) Validate() error { return nil } + +type Contracts struct { + ShouldBeUsed *bool `toml:"use"` + LinkTokenAddress *string `toml:"link_token"` + WethAddress *string `toml:"weth"` + TranscoderAddress *string `toml:"transcoder"` + ChainModuleAddress *string `toml:"chain_module"` + RegistryAddress *string `toml:"registry"` + RegistrarAddress *string `toml:"registrar"` + LinkEthFeedAddress *string `toml:"link_eth_feed"` + EthGasFeedAddress *string `toml:"eth_gas_feed"` + EthUSDFeedAddress *string `toml:"eth_usd_feed"` + LinkUSDFeedAddress *string `toml:"link_usd_feed"` + UpkeepContractAddresses []string `toml:"upkeep_contracts"` + MultiCallAddress *string `toml:"multicall"` + Settings map[string]ContractSetting `toml:"Settings"` +} + +func (o *Contracts) Validate() error { + if o.LinkTokenAddress != nil && !common.IsHexAddress(*o.LinkTokenAddress) { + return errors.New("link_token must be a valid ethereum address") + } + if o.WethAddress != nil && !common.IsHexAddress(*o.WethAddress) { + return errors.New("weth must be a valid ethereum address") + } + if o.TranscoderAddress != nil && !common.IsHexAddress(*o.TranscoderAddress) { + return errors.New("transcoder must be a valid ethereum address") + } + if o.ChainModuleAddress != nil && !common.IsHexAddress(*o.ChainModuleAddress) { + return errors.New("chain_module must be a valid ethereum address") + } + if o.RegistryAddress != nil && !common.IsHexAddress(*o.RegistryAddress) { + return errors.New("registry must be a valid ethereum address") + } + if o.RegistrarAddress != nil && !common.IsHexAddress(*o.RegistrarAddress) { + return errors.New("registrar must be a valid ethereum address") + } + if o.LinkEthFeedAddress != nil && !common.IsHexAddress(*o.LinkEthFeedAddress) { + return errors.New("link_eth_feed must be a valid ethereum address") + } + if o.EthGasFeedAddress != nil && !common.IsHexAddress(*o.EthGasFeedAddress) { + return errors.New("eth_gas_feed must be a valid ethereum address") + } + if o.EthUSDFeedAddress != nil && !common.IsHexAddress(*o.EthUSDFeedAddress) { + return errors.New("eth_usd_feed must be a valid ethereum address") + } + if o.LinkUSDFeedAddress != nil && !common.IsHexAddress(*o.LinkUSDFeedAddress) { + return errors.New("link_usd_feed must be a valid ethereum address") + } + if o.MultiCallAddress != nil && !common.IsHexAddress(*o.MultiCallAddress) { + return errors.New("multicall must be a valid ethereum address") + } + if o.UpkeepContractAddresses != nil { + allEnabled := make(map[bool]int) + allConfigure := make(map[bool]int) + for _, address := range o.UpkeepContractAddresses { + if !common.IsHexAddress(address) { + return fmt.Errorf("upkeep_contracts must be valid ethereum addresses, but %s is not", address) + } + + if v, ok := o.Settings[address]; ok { + if v.ShouldBeUsed != nil { + allEnabled[*v.ShouldBeUsed]++ + } else { + allEnabled[true]++ + } + if v.Configure != nil { + allConfigure[*v.Configure]++ + } else { + allConfigure[true]++ + } + } + } + + if allEnabled[true] > 0 && allEnabled[false] > 0 { + return errors.New("either all or none offchain_aggregators must be used") + } + + if allConfigure[true] > 0 && allConfigure[false] > 0 { + return errors.New("either all or none offchain_aggregators must be configured") + } + } + + return nil +} + +func (c *Config) UseExistingContracts() bool { + if c.Contracts == nil { + return false + } + + if c.Contracts.ShouldBeUsed != nil { + return *c.Contracts.ShouldBeUsed + } + + return false +} + +func (c *Config) LinkTokenContractAddress() (common.Address, error) { + if c.Contracts != nil && c.Contracts.LinkTokenAddress != nil { + return common.HexToAddress(*c.Contracts.LinkTokenAddress), nil + } + + return common.Address{}, errors.New("link token address must be set") +} + +func (c *Config) WethContractAddress() (common.Address, error) { + if c.Contracts != nil && c.Contracts.WethAddress != nil { + return common.HexToAddress(*c.Contracts.WethAddress), nil + } + + return common.Address{}, errors.New("weth address must be set") +} + +func (c *Config) TranscoderContractAddress() (common.Address, error) { + if c.Contracts != nil && c.Contracts.TranscoderAddress != nil { + return common.HexToAddress(*c.Contracts.TranscoderAddress), nil + } + + return common.Address{}, errors.New("transcoder address must be set") +} + +func (c *Config) ChainModuleContractAddress() (common.Address, error) { + if c.Contracts != nil && c.Contracts.ChainModuleAddress != nil { + return common.HexToAddress(*c.Contracts.ChainModuleAddress), nil + } + + return common.Address{}, errors.New("chain module address must be set") +} + +func (c *Config) RegistryContractAddress() (common.Address, error) { + if c.Contracts != nil && c.Contracts.RegistryAddress != nil { + return common.HexToAddress(*c.Contracts.RegistryAddress), nil + } + + return common.Address{}, errors.New("registry address must be set") +} + +func (c *Config) RegistrarContractAddress() (common.Address, error) { + if c.Contracts != nil && c.Contracts.RegistrarAddress != nil { + return common.HexToAddress(*c.Contracts.RegistrarAddress), nil + } + + return common.Address{}, errors.New("registrar address must be set") +} + +func (c *Config) LinkEthFeedContractAddress() (common.Address, error) { + if c.Contracts != nil && c.Contracts.LinkEthFeedAddress != nil { + return common.HexToAddress(*c.Contracts.LinkEthFeedAddress), nil + } + + return common.Address{}, errors.New("link eth feed address must be set") +} + +func (c *Config) EthGasFeedContractAddress() (common.Address, error) { + if c.Contracts != nil && c.Contracts.EthGasFeedAddress != nil { + return common.HexToAddress(*c.Contracts.EthGasFeedAddress), nil + } + + return common.Address{}, errors.New("eth gas feed address must be set") +} + +func (c *Config) EthUSDFeedContractAddress() (common.Address, error) { + if c.Contracts != nil && c.Contracts.EthUSDFeedAddress != nil { + return common.HexToAddress(*c.Contracts.EthUSDFeedAddress), nil + } + + return common.Address{}, errors.New("eth usd feed address must be set") +} + +func (c *Config) LinkUSDFeedContractAddress() (common.Address, error) { + if c.Contracts != nil && c.Contracts.LinkUSDFeedAddress != nil { + return common.HexToAddress(*c.Contracts.LinkUSDFeedAddress), nil + } + + return common.Address{}, errors.New("link usd feed address must be set") +} + +func (c *Config) UpkeepContractAddresses() ([]common.Address, error) { + if c.Contracts != nil && c.Contracts.UpkeepContractAddresses != nil { + addresses := make([]common.Address, len(c.Contracts.UpkeepContractAddresses)) + for i, address := range c.Contracts.UpkeepContractAddresses { + addresses[i] = common.HexToAddress(address) + } + return addresses, nil + } + + return nil, errors.New("upkeep contract addresses must be set") +} + +func (c *Config) MultiCallContractAddress() (common.Address, error) { + if c.Contracts != nil && c.Contracts.MultiCallAddress != nil { + return common.HexToAddress(*c.Contracts.MultiCallAddress), nil + } + + return common.Address{}, errors.New("multicall address must be set") +} + +func (c *Config) UseExistingLinkTokenContract() bool { + if !c.UseExistingContracts() { + return false + } + + if c.Contracts.LinkTokenAddress == nil { + return false + } + + if len(c.Contracts.Settings) == 0 { + return true + } + + if v, ok := c.Contracts.Settings[*c.Contracts.LinkTokenAddress]; ok { + return v.ShouldBeUsed != nil && *v.ShouldBeUsed + } + + return true +} + +func (c *Config) UseExistingWethContract() bool { + if !c.UseExistingContracts() { + return false + } + + if c.Contracts.WethAddress == nil { + return false + } + + if len(c.Contracts.Settings) == 0 { + return true + } + + if v, ok := c.Contracts.Settings[*c.Contracts.WethAddress]; ok { + return v.ShouldBeUsed != nil && *v.ShouldBeUsed + } + + return true +} + +func (c *Config) UseExistingTranscoderContract() bool { + if !c.UseExistingContracts() { + return false + } + + if c.Contracts.TranscoderAddress == nil { + return false + } + + if len(c.Contracts.Settings) == 0 { + return true + } + + if v, ok := c.Contracts.Settings[*c.Contracts.TranscoderAddress]; ok { + return v.ShouldBeUsed != nil && *v.ShouldBeUsed + } + + return true +} + +func (c *Config) UseExistingRegistryContract() bool { + if !c.UseExistingContracts() { + return false + } + + if c.Contracts.RegistryAddress == nil { + return false + } + + if len(c.Contracts.Settings) == 0 { + return true + } + + if v, ok := c.Contracts.Settings[*c.Contracts.RegistryAddress]; ok { + return v.ShouldBeUsed != nil && *v.ShouldBeUsed + } + + return true +} + +func (c *Config) UseExistingRegistrarContract() bool { + if !c.UseExistingContracts() { + return false + } + + if c.Contracts.RegistrarAddress == nil { + return false + } + + if len(c.Contracts.Settings) == 0 { + return true + } + + if v, ok := c.Contracts.Settings[*c.Contracts.RegistrarAddress]; ok { + return v.ShouldBeUsed != nil && *v.ShouldBeUsed + } + + return true +} + +func (c *Config) UseExistingLinkEthFeedContract() bool { + if !c.UseExistingContracts() { + return false + } + + if c.Contracts.LinkEthFeedAddress == nil { + return false + } + + if len(c.Contracts.Settings) == 0 { + return true + } + + if v, ok := c.Contracts.Settings[*c.Contracts.LinkEthFeedAddress]; ok { + return v.ShouldBeUsed != nil && *v.ShouldBeUsed + } + + return true +} + +func (c *Config) UseExistingEthGasFeedContract() bool { + if !c.UseExistingContracts() { + return false + } + + if c.Contracts.EthGasFeedAddress == nil { + return false + } + + if len(c.Contracts.Settings) == 0 { + return true + } + + if v, ok := c.Contracts.Settings[*c.Contracts.EthGasFeedAddress]; ok { + return v.ShouldBeUsed != nil && *v.ShouldBeUsed + } + + return true +} + +func (c *Config) UseExistingEthUSDFeedContract() bool { + if !c.UseExistingContracts() { + return false + } + + if c.Contracts.EthUSDFeedAddress == nil { + return false + } + + if len(c.Contracts.Settings) == 0 { + return true + } + + if v, ok := c.Contracts.Settings[*c.Contracts.EthUSDFeedAddress]; ok { + return v.ShouldBeUsed != nil && *v.ShouldBeUsed + } + + return true +} + +func (c *Config) UseExistingLinkUSDFeedContract() bool { + if !c.UseExistingContracts() { + return false + } + + if c.Contracts.LinkUSDFeedAddress == nil { + return false + } + + if len(c.Contracts.Settings) == 0 { + return true + } + + if v, ok := c.Contracts.Settings[*c.Contracts.LinkUSDFeedAddress]; ok { + return v.ShouldBeUsed != nil && *v.ShouldBeUsed + } + + return true +} + +func (c *Config) UseExistingUpkeepContracts() bool { + if !c.UseExistingContracts() { + return false + } + + if c.Contracts.UpkeepContractAddresses == nil { + return false + } + + if len(c.Contracts.Settings) == 0 { + return true + } + + for _, address := range c.Contracts.UpkeepContractAddresses { + if v, ok := c.Contracts.Settings[address]; ok { + if v.ShouldBeUsed != nil && *v.ShouldBeUsed { + return true + } + } + } + + return false +} + +func (c *Config) UseExistingMultiCallContract() bool { + if !c.UseExistingContracts() { + return false + } + + if c.Contracts.MultiCallAddress == nil { + return false + } + + if len(c.Contracts.Settings) == 0 { + return true + } + + if v, ok := c.Contracts.Settings[*c.Contracts.MultiCallAddress]; ok { + return v.ShouldBeUsed != nil && *v.ShouldBeUsed + } + + return true +} + +type ContractSetting struct { + ShouldBeUsed *bool `toml:"use"` + Configure *bool `toml:"configure"` +} diff --git a/integration-tests/testsetups/keeper_benchmark.go b/integration-tests/testsetups/automation_benchmark.go similarity index 94% rename from integration-tests/testsetups/keeper_benchmark.go rename to integration-tests/testsetups/automation_benchmark.go index 40a56ba73c8..ff8100ea437 100644 --- a/integration-tests/testsetups/keeper_benchmark.go +++ b/integration-tests/testsetups/automation_benchmark.go @@ -12,6 +12,8 @@ import ( "testing" "time" + "github.com/smartcontractkit/chainlink/integration-tests/testconfig" + "github.com/smartcontractkit/chainlink/integration-tests/actions/automationv2" geth "github.com/ethereum/go-ethereum" @@ -105,14 +107,14 @@ func NewKeeperBenchmarkTest(t *testing.T, inputs KeeperBenchmarkTestInputs) *Kee } // Setup prepares contracts for the test -func (k *KeeperBenchmarkTest) Setup(env *environment.Environment, config tt.AutomationBenchmarkTestConfig) { +func (k *KeeperBenchmarkTest) Setup(env *environment.Environment, config testconfig.TestConfig) { startTime := time.Now() k.TestReporter.Summary.StartTime = startTime.UnixMilli() k.ensureInputValues() k.env = env k.namespace = k.env.Cfg.Namespace inputs := k.Inputs - k.testConfig = config + k.testConfig = &config k.automationTests = make([]automationv2.AutomationTest, len(inputs.RegistryVersions)) k.keeperRegistries = make([]contracts.KeeperRegistry, len(inputs.RegistryVersions)) @@ -146,7 +148,7 @@ func (k *KeeperBenchmarkTest) Setup(env *environment.Environment, config tt.Auto for index := range inputs.RegistryVersions { k.log.Info().Int("Index", index).Msg("Starting Test Setup") - a := automationv2.NewAutomationTestK8s(k.log, k.chainClient, k.chainlinkNodes) + a := automationv2.NewAutomationTestK8s(k.log, k.chainClient, k.chainlinkNodes, &config) a.RegistrySettings = *k.Inputs.KeeperRegistrySettings a.RegistrySettings.RegistryVersion = inputs.RegistryVersions[index] a.RegistrarSettings = contracts.KeeperRegistrarSettings{ @@ -159,7 +161,7 @@ func (k *KeeperBenchmarkTest) Setup(env *environment.Environment, config tt.Auto a.SetupAutomationDeploymentWithoutJobs(k.t) err = a.SetConfigOnRegistry() require.NoError(k.t, err, "Setting initial config on registry shouldn't fail") - k.DeployBenchmarkKeeperContracts(index, a) + k.SetupBenchmarkKeeperContracts(index, a) } var keysToFund = inputs.RegistryVersions @@ -178,7 +180,7 @@ func (k *KeeperBenchmarkTest) Setup(env *environment.Environment, config tt.Auto } k.log.Info().Str("Setup Time", time.Since(startTime).String()).Msg("Finished Keeper Benchmark Test Setup") - err = k.SendSlackNotification(nil, config) + err = k.SendSlackNotification(nil, &config) if err != nil { k.log.Warn().Msg("Sending test start slack notification failed") } @@ -301,7 +303,7 @@ func (k *KeeperBenchmarkTest) Run() { startedObservations.Add(1) k.log.Info().Int("Channel index", chIndex).Str("UpkeepID", upkeepIDCopy.String()).Msg("Starting upkeep observation") - confirmer := contracts.NewKeeperConsumerBenchmarkUpkeepObserver( + confirmer := contracts.NewAutomationConsumerBenchmarkUpkeepObserver( k.keeperConsumerContracts[registryIndex], k.keeperRegistries[registryIndex], upkeepIDCopy, @@ -650,8 +652,8 @@ func (k *KeeperBenchmarkTest) SendSlackNotification(slackClient *slack.Client, c return err } -// DeployBenchmarkKeeperContracts deploys a set amount of keeper Benchmark contracts registered to a single registry -func (k *KeeperBenchmarkTest) DeployBenchmarkKeeperContracts(index int, a *automationv2.AutomationTest) { +// SetupBenchmarkKeeperContracts deploys a set amount of keeper Benchmark contracts registered to a single registry +func (k *KeeperBenchmarkTest) SetupBenchmarkKeeperContracts(index int, a *automationv2.AutomationTest) { registryVersion := k.Inputs.RegistryVersions[index] k.Inputs.KeeperRegistrySettings.RegistryVersion = registryVersion upkeep := k.Inputs.Upkeeps @@ -659,7 +661,15 @@ func (k *KeeperBenchmarkTest) DeployBenchmarkKeeperContracts(index int, a *autom err error ) - consumer := k.DeployKeeperConsumersBenchmark() + var consumer contracts.AutomationConsumerBenchmark + if a.TestConfig.GetAutomationConfig().UseExistingUpkeepContracts() { + benchmarkAddresses, err := a.TestConfig.GetAutomationConfig().UpkeepContractAddresses() + require.NoError(k.t, err, "Getting upkeep contract addresses shouldn't fail") + consumer, err = contracts.LoadAutomationConsumerBenchmark(k.chainClient, benchmarkAddresses[0]) + require.NoError(k.t, err, "Loading KeeperConsumerBenchmark shouldn't fail") + } else { + consumer = k.DeployKeeperConsumersBenchmark() + } var upkeepAddresses []string @@ -710,7 +720,7 @@ func (k *KeeperBenchmarkTest) DeployBenchmarkKeeperContracts(index int, a *autom linkFunds = big.NewInt(0).Add(linkFunds, minLinkBalance) k.linkToken = a.LinkToken - err = actions.DeployMultiCallAndFundDeploymentAddresses(k.chainClient, k.linkToken, upkeep.NumberOfUpkeeps, linkFunds) + err = actions.SetupMultiCallAndFundDeploymentAddresses(k.chainClient, k.linkToken, upkeep.NumberOfUpkeeps, linkFunds, a.TestConfig) require.NoError(k.t, err, "Sending link funds to deployment addresses shouldn't fail") upkeepIds := actions.RegisterUpkeepContractsWithCheckData(k.t, k.chainClient, k.linkToken, linkFunds, uint32(upkeep.UpkeepGasLimit), a.Registry, a.Registrar, upkeep.NumberOfUpkeeps, upkeepAddresses, checkData, false, false, false, nil) @@ -729,17 +739,17 @@ func (k *KeeperBenchmarkTest) DeployKeeperConsumersBenchmark() contracts.Automat if *k.testConfig.GetAutomationConfig().Resiliency.ContractCallLimit != 0 && k.testConfig.GetAutomationConfig().Resiliency.ContractCallInterval.Duration != 0 { maxRetryAttempts := *k.testConfig.GetAutomationConfig().Resiliency.ContractCallLimit callRetryDelay := k.testConfig.GetAutomationConfig().Resiliency.ContractCallInterval.Duration - keeperConsumerInstance, err = contracts.DeployKeeperConsumerBenchmarkWithRetry(k.chainClient, k.log, maxRetryAttempts, callRetryDelay) + keeperConsumerInstance, err = contracts.DeployAutomationConsumerBenchmarkWithRetry(k.chainClient, k.log, maxRetryAttempts, callRetryDelay) if err != nil { k.log.Error().Err(err).Msg("Deploying AutomationConsumerBenchmark instance shouldn't fail") - keeperConsumerInstance, err = contracts.DeployKeeperConsumerBenchmarkWithRetry(k.chainClient, k.log, maxRetryAttempts, callRetryDelay) + keeperConsumerInstance, err = contracts.DeployAutomationConsumerBenchmarkWithRetry(k.chainClient, k.log, maxRetryAttempts, callRetryDelay) require.NoError(k.t, err, "Error deploying AutomationConsumerBenchmark") } } else { - keeperConsumerInstance, err = contracts.DeployKeeperConsumerBenchmark(k.chainClient) + keeperConsumerInstance, err = contracts.DeployAutomationConsumerBenchmark(k.chainClient) if err != nil { k.log.Error().Err(err).Msg("Deploying AutomationConsumerBenchmark instance %d shouldn't fail") - keeperConsumerInstance, err = contracts.DeployKeeperConsumerBenchmark(k.chainClient) + keeperConsumerInstance, err = contracts.DeployAutomationConsumerBenchmark(k.chainClient) require.NoError(k.t, err, "Error deploying AutomationConsumerBenchmark") } } From 77612582b7b975178eda0c70300af14685a6658e Mon Sep 17 00:00:00 2001 From: Sergey Kudasov Date: Mon, 23 Sep 2024 10:59:21 +0200 Subject: [PATCH 02/14] add CTF deps to dependabot (#14517) * add CTF deps to dependabot * add ctf label * fix registry definition * remove registry * fix changeset * remove changeset --- .github/dependabot.yml | 40 ++++++++++++++++++++++++++++++++++++++++ 1 file changed, 40 insertions(+) diff --git a/.github/dependabot.yml b/.github/dependabot.yml index cea4f07b90d..d7af2d7f750 100644 --- a/.github/dependabot.yml +++ b/.github/dependabot.yml @@ -28,3 +28,43 @@ updates: schedule: interval: monthly open-pull-requests-limit: 0 + - package-ecosystem: gomod + directory: "/lib" + schedule: + interval: "daily" + open-pull-requests-limit: 10 + labels: + - "dependencies" + - "ctf" + - package-ecosystem: gomod + directory: "/wasp" + schedule: + interval: "daily" + open-pull-requests-limit: 10 + labels: + - "dependencies" + - "ctf" + - package-ecosystem: gomod + directory: "/seth" + schedule: + interval: "daily" + open-pull-requests-limit: 10 + labels: + - "dependencies" + - "ctf" + - package-ecosystem: gomod + directory: "/havoc" + schedule: + interval: "daily" + open-pull-requests-limit: 10 + labels: + - "dependencies" + - "ctf" + - package-ecosystem: gomod + directory: "/k8s-test-runner" + schedule: + interval: "daily" + open-pull-requests-limit: 10 + labels: + - "dependencies" + - "ctf" From 359feffcd5622658ebc6805b992d75a3fb82fdc2 Mon Sep 17 00:00:00 2001 From: Lukasz <120112546+lukaszcl@users.noreply.github.com> Date: Mon, 23 Sep 2024 11:02:44 +0200 Subject: [PATCH 03/14] TT-1550 Migrate remaining E2E workflows to the reusable workflow (#14403) * Update some test workflows to run with test config path * Fix summary * Fix * Update on-demand-ocr-soak-test.yml * Run vrfplus tests on ARBITRUM_SEPOLIA on existing env * Add config for staging release testing on arbitrum sepolia * Show test_secrets_override_key in summary * Generate hash of integration-tests/ for test runner image if tag not provided * test updating config but not rebuilding test image * Fix test_runner_hash * Commit all configs for staging * test config change * Update release configs * Do not rebuild test image when md or .github files were changed * Add wemix testnet config for ocrv2 soak test * Use chainlink version from config * Update reusable workflow inputs * Add soak/ocr_test.go:^TestOCRv2Soak$WemixTestnet to CI tests * Fix * Fix test image tag * Remove image from default.toml * Fix chainlink_version * Fix test image hash value * Fix summary * Update workflow * Update config overrides, read from default, env vars and then from BASE64_CONFIG_OVERRIDE if exists BASE64_CONFIG_OVERRIDE should be able to override everything that was already set in the test config * Build chainlink image sha if required * Fix * Add new workflow * Show test config path in reusable workflow * Update list of tests * Update run-selected-tests workflow * Fix * Rename steps * fix workflow * Add TestVRFv2Plus_LiveTestnets to CI tests definition * Fix * Run vrfv2plus tests on simulated network by default * Fix * fix * fix * Update test definition * Add TestVRFv2Plus_Release_Sepolia * Add workflow to run vrf e2e release tests * test new workflow with slack notification * fix * fix * fix * fix color * fix * revert if * revert test definition changes * Add test_secrets_override_key * fix for slack notification after test * TT-1550: reorganizing VRF V2 Plus configs * Use test_config_override_path instead of base64 in other VRFv2Plus workflows * remove file * Refactor on-demand-vrfv2plus-performance-test.yml * Add slack notification for on-demand-vrfv2plus-smoke-tests.yml * Fix * fix 2 * remove debug info * update color of slack notification * Migrate on-demand-vrfv2-eth2-clients-test * Migrate on-demand-ocr-soak-test.yml (WIP) * Rename step * update test names * fix * fix name * fix * Migrate automation-nightly-tests.yml * Build remote runner image asap * Fix * add ocr2 celo_alfajores and base_sepolia test configs * Fix * Fix * Migrate automation-benchmark-tests.yml * fix * Migrate automation-load-tests.yml * Migrate on-demand-vrfv2-eth2-clients-test.yml * Migrate on-demand-vrfv2-performance-test.yml * Remove unused actions * Always show console logs for ocr tests * Allow custom docker test artifacts * Migrate ccip-load-tests.yml * revert this commits * fix ARTIFACT_PATHS * fix * Allow to upload custom test artifacts * Fux * Fix * Clean up toml examples * Remove unused live-testnet-tests.yml workflow Confirmed with Adam * Remove old action * Fix * Fix * update test duration to 24h * Fix citool * debug artifacts on failure * debug * Fix * Use ubuntu-latest * Update workflow slack notifications * Fix test id when running custom tests * Fix * Fix * Add OCR test configs * remove unused workflow * Add slack notifications for vrfv2 workflows * Fix slack notifications for automation workflows * Fix SLACK_USER input in on-demand-ocr-soak-test.yml * Do not use github.sha as default chainlink version * Remove chainlink_version input form automation workflows. Use config instead * update base sepolia test config duration to 1h * Fix * Do not upload cpu and memory profile by default * Do not set E2E_TEST_PYROSCOPE_ENABLED. Get it from toml config instead * reverting base sepolia duration to 24h * Fix TEST_UPLOAD_CPU_PROFILE and TEST_UPLOAD_MEM_PROFILE * debug TEST_UPLOAD_CPU_PROFILE * Read config from env var first then override with TOML * remove debug info * Include tag and sha in workflow slack notification * Fix notification Do not use link for tag because it may not exist. Use short sha. * Revert "Read config from env var first then override with TOML" This reverts commit 2b2fb180dbf936bf8549fb7d40e2084b83e5820b. * Fix and bump ctf * go mod tidy * bump ctf * add automation test configs (#14455) * add automation test configs * update grafana config * update testid in workflow * update testid in e2e-tests * restructure configs * update config * add testType input in benchmark workflow * add testType input in e2e reusable workflow * Revert "add testType input in e2e reusable workflow" This reverts commit 937607cccfbf52f579a6b2ae09ff8fae9f51cac9. * update benchmark workflow * update test log level for load and benchmark --------- Co-authored-by: lukaszcl <120112546+lukaszcl@users.noreply.github.com> * Increase test_log_upload_retention_days from 3 to 5 * Fix * Debug CCIP Load Test in E2E Workflow Conversion (#14479) Fixes CCIP load tests * Run selected e2e tests in merge queue * Run selected e2e tests in merge queue part 2 * Run selected e2e tests in merge queue part 3 * Use ctf-build-test-image@0.1.0 * Fix suites for k8s tests Use test_suite input and matrix.tests.test_env_vars.TEST_SUITE to set test suites for tests in e2e-tests.yml * Update gha * Rename step * Update run-tests GHA * update testsconfigs to use ocr common --------- Co-authored-by: Ilja Pavlovs Co-authored-by: joaoluisam Co-authored-by: davidcauchi Co-authored-by: Anirudh Warrier <12178754+anirudhwarrier@users.noreply.github.com> Co-authored-by: Adam Hamrick --- .../actions/build-chainlink-image/action.yml | 2 +- .github/actions/build-test-image/action.yml | 150 --- .../action.yml | 151 --- .../action.yml | 144 --- .../action.yml | 125 -- .../setup-merge-base64-config/action.yml | 70 -- .../setup-parse-base64-config/action.yml | 44 - .github/e2e-tests.yml | 192 ++- .../workflows/automation-benchmark-tests.yml | 138 +- .github/workflows/automation-load-tests.yml | 150 +-- .../workflows/automation-nightly-tests.yml | 317 +---- .../workflows/automation-ondemand-tests.yml | 3 + .github/workflows/ccip-chaos-tests.yml | 3 + .github/workflows/ccip-load-tests.yml | 315 +---- .github/workflows/integration-chaos-tests.yml | 3 + .../workflows/integration-tests-publish.yml | 2 +- .github/workflows/integration-tests.yml | 135 +- .github/workflows/live-testnet-tests.yml | 1119 ----------------- .github/workflows/on-demand-ocr-soak-test.yml | 150 +-- .../on-demand-vrfv2-performance-test.yml | 3 + .../workflows/on-demand-vrfv2-smoke-tests.yml | 3 + .../on-demand-vrfv2plus-performance-test.yml | 3 + .../on-demand-vrfv2plus-smoke-tests.yml | 3 + .../run-e2e-tests-reusable-workflow.yml | 143 +-- .github/workflows/run-nightly-e2e-tests.yml | 3 + .github/workflows/run-selected-e2e-tests.yml | 4 +- integration-tests/scripts/buildTests | 1 + .../testconfig/automation/automation.toml | 13 + .../testconfig/automation/example.toml | 30 - .../benchmark/1000Upkeeps-1h-2_1.toml | 15 + .../benchmark/1000Upkeeps-1h-2_3.toml | 15 + .../overrides/load/500Upkeeps-1x-1h.toml | 47 + .../overrides/load/50Upkeeps-1x-12h.toml | 43 + .../overrides/load/50Upkeeps-1x-1h.toml | 43 + .../overrides/soak/50Upkeeps-8h-2_1.toml | 15 + .../overrides/soak/50Upkeeps-8h-2_3.toml | 15 + integration-tests/testconfig/default.toml | 123 +- .../testconfig/forwarder_ocr/example.toml | 10 - .../testconfig/forwarder_ocr2/example.toml | 9 - .../testconfig/functions/example.toml | 29 - .../testconfig/keeper/example.toml | 29 - .../testconfig/keeper/keeper.toml | 2 +- .../testconfig/log_poller/example.toml | 29 - .../testconfig/node/example.toml | 30 - integration-tests/testconfig/ocr/example.toml | 10 - .../ocr/overrides/arbitrum_mainnet.toml | 18 + .../ocr/overrides/arbitrum_sepolia.toml | 15 + .../ocr/overrides/base_mainnet.toml | 18 + .../ocr/overrides/base_sepolia.toml | 15 + .../ocr/overrides/celo_alfajores.toml | 15 + .../ocr/overrides/ethereum_sepolia.toml | 15 + .../ocr/overrides/linea_sepolia.toml | 15 + .../ocr/overrides/optimism_mainnet.toml | 18 + .../ocr/overrides/optimism_sepolia.toml | 15 + .../ocr/overrides/scroll_sepolia.toml | 15 + .../ocr/overrides/wemix_mainnet.toml | 18 + .../testconfig/ocr2/example.toml | 10 - .../ocr2/overrides/base_sepolia.toml | 18 + .../ocr2/overrides/ethereum_sepolia.toml | 15 + .../ocr2/overrides/polygon_amoy.toml | 15 + .../ocr2/overrides/polygon_mainnet.toml | 18 + .../ocr2/overrides/wemix_testnet.toml | 15 + .../ocr2/overrides/xlayer_sepolia.toml | 15 + .../testconfig/vrfv2/example.toml | 29 - .../testconfig/vrfv2plus/example.toml | 29 - 65 files changed, 1056 insertions(+), 3168 deletions(-) delete mode 100644 .github/actions/build-test-image/action.yml delete mode 100644 .github/actions/setup-create-base64-config-ccip/action.yml delete mode 100644 .github/actions/setup-create-base64-config-live-testnets/action.yml delete mode 100644 .github/actions/setup-create-base64-upgrade-config/action.yml delete mode 100644 .github/actions/setup-merge-base64-config/action.yml delete mode 100644 .github/actions/setup-parse-base64-config/action.yml delete mode 100644 .github/workflows/live-testnet-tests.yml create mode 100644 integration-tests/testconfig/automation/overrides/benchmark/1000Upkeeps-1h-2_1.toml create mode 100644 integration-tests/testconfig/automation/overrides/benchmark/1000Upkeeps-1h-2_3.toml create mode 100644 integration-tests/testconfig/automation/overrides/load/500Upkeeps-1x-1h.toml create mode 100644 integration-tests/testconfig/automation/overrides/load/50Upkeeps-1x-12h.toml create mode 100644 integration-tests/testconfig/automation/overrides/load/50Upkeeps-1x-1h.toml create mode 100644 integration-tests/testconfig/automation/overrides/soak/50Upkeeps-8h-2_1.toml create mode 100644 integration-tests/testconfig/automation/overrides/soak/50Upkeeps-8h-2_3.toml create mode 100644 integration-tests/testconfig/ocr/overrides/arbitrum_mainnet.toml create mode 100644 integration-tests/testconfig/ocr/overrides/arbitrum_sepolia.toml create mode 100644 integration-tests/testconfig/ocr/overrides/base_mainnet.toml create mode 100644 integration-tests/testconfig/ocr/overrides/base_sepolia.toml create mode 100644 integration-tests/testconfig/ocr/overrides/celo_alfajores.toml create mode 100644 integration-tests/testconfig/ocr/overrides/ethereum_sepolia.toml create mode 100644 integration-tests/testconfig/ocr/overrides/linea_sepolia.toml create mode 100644 integration-tests/testconfig/ocr/overrides/optimism_mainnet.toml create mode 100644 integration-tests/testconfig/ocr/overrides/optimism_sepolia.toml create mode 100644 integration-tests/testconfig/ocr/overrides/scroll_sepolia.toml create mode 100644 integration-tests/testconfig/ocr/overrides/wemix_mainnet.toml create mode 100644 integration-tests/testconfig/ocr2/overrides/base_sepolia.toml create mode 100644 integration-tests/testconfig/ocr2/overrides/ethereum_sepolia.toml create mode 100644 integration-tests/testconfig/ocr2/overrides/polygon_amoy.toml create mode 100644 integration-tests/testconfig/ocr2/overrides/polygon_mainnet.toml create mode 100644 integration-tests/testconfig/ocr2/overrides/wemix_testnet.toml create mode 100644 integration-tests/testconfig/ocr2/overrides/xlayer_sepolia.toml diff --git a/.github/actions/build-chainlink-image/action.yml b/.github/actions/build-chainlink-image/action.yml index 4934e579aec..0e457560bf0 100644 --- a/.github/actions/build-chainlink-image/action.yml +++ b/.github/actions/build-chainlink-image/action.yml @@ -37,7 +37,7 @@ runs: AWS_ROLE_TO_ASSUME: ${{ inputs.AWS_ROLE_TO_ASSUME }} - name: Build Image if: steps.check-image.outputs.exists != 'true' - uses: smartcontractkit/chainlink-github-actions/chainlink-testing-framework/build-image@75a9005952a9e905649cfb5a6971fd9429436acd # v2.3.25 + uses: smartcontractkit/.github/actions/ctf-build-image@1a26fe378d7ebdc34ab1fe31ec4a6d1c376199f8 # ctf-build-image@0.0.0 with: cl_repo: smartcontractkit/chainlink cl_ref: ${{ inputs.git_commit_sha }} diff --git a/.github/actions/build-test-image/action.yml b/.github/actions/build-test-image/action.yml deleted file mode 100644 index bc81226b329..00000000000 --- a/.github/actions/build-test-image/action.yml +++ /dev/null @@ -1,150 +0,0 @@ -name: Build Test Image -description: A composite action that allows building and publishing the test remote runner image - -inputs: - repository: - description: The docker repository for the image - default: chainlink-tests - required: false - tag: - description: The tag to use by default and to use for checking image existance. If not provided, the hash of the integration-tests/ directory will be used - required: false - other_tags: - description: Other tags to push if needed - required: false - suites: - description: The test suites to build into the image - default: chaos migration reorg smoke soak benchmark load - required: false - QA_AWS_ROLE_TO_ASSUME: - description: The AWS role to assume as the CD user, if any. Used in configuring the docker/login-action - required: true - QA_AWS_REGION: - description: The AWS region the ECR repository is located in, should only be needed for public ECR repositories, used in configuring docker/login-action - required: true - QA_AWS_ACCOUNT_NUMBER: - description: The AWS region the ECR repository is located in, should only be needed for public ECR repositories, used in configuring docker/login-action - required: true - -outputs: - test_image: - description: The full name of the test image that was built - value: ${{ steps.image_outputs.outputs.test_image }} - test_image_tag: - description: The tag of the test image that was built - value: ${{ steps.image_outputs.outputs.test_image_tag }} - test_image_repository: - description: The repository of the test image that was built - value: ${{ steps.image_outputs.outputs.test_image_repo }} - -runs: - using: composite - steps: - - # Base Test Image Logic - - name: Get CTF Version - id: version - uses: smartcontractkit/chainlink-github-actions/chainlink-testing-framework/mod-version@fc3e0df622521019f50d772726d6bf8dc919dd38 # v2.3.19 - with: - go-project-path: ./integration-tests - module-name: github.com/smartcontractkit/chainlink-testing-framework/lib - enforce-semantic-tag: false - - name: Get CTF sha - if: steps.version.outputs.is_semantic == 'false' - id: short_sha - env: - VERSION: ${{ steps.version.outputs.version }} - shell: bash - run: | - short_sha="${VERSION##*-}" - echo "short sha is: ${short_sha}" - echo "short_sha=${short_sha}" >> "$GITHUB_OUTPUT" - - name: Checkout chainlink-testing-framework - if: steps.version.outputs.is_semantic == 'false' - uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633 # v4.1.2 - with: - repository: smartcontractkit/chainlink-testing-framework - ref: main - fetch-depth: 0 - path: ctf - - name: Get long sha - if: steps.version.outputs.is_semantic == 'false' - id: long_sha - env: - SHORT_SHA: ${{ steps.short_sha.outputs.short_sha }} - shell: bash - run: | - cd ctf - long_sha=$(git rev-parse ${SHORT_SHA}) - echo "sha is: ${long_sha}" - echo "long_sha=${long_sha}" >> "$GITHUB_OUTPUT" - - name: Check if test base image exists - if: steps.version.outputs.is_semantic == 'false' - id: check-base-image - uses: smartcontractkit/chainlink-github-actions/docker/image-exists@75a9005952a9e905649cfb5a6971fd9429436acd # v2.3.25 - with: - repository: test-base-image - tag: ${{ steps.long_sha.outputs.long_sha }} - AWS_REGION: ${{ inputs.QA_AWS_REGION }} - AWS_ROLE_TO_ASSUME: ${{ inputs.QA_AWS_ROLE_TO_ASSUME }} - - name: Build Base Image - if: steps.version.outputs.is_semantic == 'false' && steps.check-base-image.outputs.exists == 'false' - uses: smartcontractkit/chainlink-github-actions/docker/build-push@75a9005952a9e905649cfb5a6971fd9429436acd # v2.3.25 - env: - BASE_IMAGE_NAME: ${{ inputs.QA_AWS_ACCOUNT_NUMBER }}.dkr.ecr.${{ inputs.QA_AWS_REGION }}.amazonaws.com/test-base-image:${{ steps.long_sha.outputs.long_sha }} - with: - tags: ${{ env.BASE_IMAGE_NAME }} - file: ctf/lib/k8s/Dockerfile.base - AWS_REGION: ${{ inputs.QA_AWS_REGION }} - AWS_ROLE_TO_ASSUME: ${{ inputs.QA_AWS_ROLE_TO_ASSUME }} - # End Base Image Logic - - # Test Runner Logic - - name: Get hash of integration-tests/ for test runner image - id: test_runner_hash - if: ${{ inputs.tag == '' }} - # Do not include testconfig/ in the hash to avoid rebuilding the image when only testconfig/ changes - shell: sh - run: | - HASH_VALUE=$(find integration-tests -type f ! -path 'integration-tests/testconfig/overrides/*.toml' ! -path 'integration-tests/testconfig/overrides/*/*.toml' ! -path 'integration-tests/testconfig/*/overrides/*.toml' ! -path 'integration-tests/testconfig/*/overrides/*/*.toml' ! -path 'integration-tests/ccip-tests/testconfig/*/overrides/*.toml' ! -path 'integration-tests/ccip-tests/testconfig/*/overrides/*/*.toml' ! -path '.github/*/*' ! -path '*/*.md' ! -path '*/*.MD' ! -path 'integration-tests/*/__debug_bin*' ! -path '*/*.MD' ! -path 'integration-tests/*/tmp-manifest*.yaml' ! -path '*/*.MD' ! -path 'integration-tests/*/*.log' ! -path 'integration-tests/*/*_dump.sql' ! -path 'integration-tests/*/.test_summary/*' -exec sha256sum {} + | sort -k 2 | sha256sum | awk '{print $1}') - echo "Computed hash: $HASH_VALUE" - echo "hash_value=$HASH_VALUE" >> $GITHUB_OUTPUT - - - name: Check if image exists - id: check-image - uses: smartcontractkit/chainlink-github-actions/docker/image-exists@75a9005952a9e905649cfb5a6971fd9429436acd # v2.3.25 - with: - repository: ${{ inputs.repository }} - tag: ${{ inputs.tag || steps.test_runner_hash.outputs.hash_value }} - AWS_REGION: ${{ inputs.QA_AWS_REGION }} - AWS_ROLE_TO_ASSUME: ${{ inputs.QA_AWS_ROLE_TO_ASSUME }} - - name: Build and Publish Test Runner - if: steps.check-image.outputs.exists == 'false' - uses: smartcontractkit/chainlink-github-actions/docker/build-push@75a9005952a9e905649cfb5a6971fd9429436acd # v2.3.25 - with: - tags: | - ${{ inputs.QA_AWS_ACCOUNT_NUMBER }}.dkr.ecr.${{ inputs.QA_AWS_REGION }}.amazonaws.com/${{ inputs.repository }}:${{ inputs.tag || steps.test_runner_hash.outputs.hash_value }} - ${{ inputs.other_tags }} - file: ./integration-tests/test.Dockerfile - build-args: | - BASE_IMAGE=${{ inputs.QA_AWS_ACCOUNT_NUMBER }}.dkr.ecr.${{ inputs.QA_AWS_REGION }}.amazonaws.com/test-base-image - IMAGE_VERSION=${{ steps.long_sha.outputs.long_sha || steps.version.outputs.version }} - SUITES="${{ inputs.suites }}" - AWS_REGION: ${{ inputs.QA_AWS_REGION }} - AWS_ROLE_TO_ASSUME: ${{ inputs.QA_AWS_ROLE_TO_ASSUME }} - - name: Print Image Built - shell: sh - env: - INPUTS_REPOSITORY: ${{ inputs.repository }} - INPUTS_TAG: ${{ inputs.tag || steps.test_runner_hash.outputs.hash_value }} - run: | - echo "### ${INPUTS_REPOSITORY} image tag for this test run :ship:" >>$GITHUB_STEP_SUMMARY - echo "\`${INPUTS_TAG}\`" >>$GITHUB_STEP_SUMMARY - - name: Set outputs - id: image_outputs - shell: sh - run: | - echo "test_image_repo=${{ inputs.repository }}" >> $GITHUB_OUTPUT - echo "test_image_tag=${{ inputs.tag || steps.test_runner_hash.outputs.hash_value }}" >> $GITHUB_OUTPUT - echo "test_image=${{ inputs.QA_AWS_ACCOUNT_NUMBER }}.dkr.ecr.${{ inputs.QA_AWS_REGION }}.amazonaws.com/${{ inputs.repository }}:${{ inputs.tag || steps.test_runner_hash.outputs.hash_value }}" >> $GITHUB_OUTPUT - # End Test Runner Logic diff --git a/.github/actions/setup-create-base64-config-ccip/action.yml b/.github/actions/setup-create-base64-config-ccip/action.yml deleted file mode 100644 index cb20c886e38..00000000000 --- a/.github/actions/setup-create-base64-config-ccip/action.yml +++ /dev/null @@ -1,151 +0,0 @@ -name: Create Base64 Config for CCIP Tests -description: A composite action that creates a base64-encoded config to be used by ccip integration tests - -inputs: - runId: - description: The run id - existingNamespace: - description: If test needs to run against already deployed namespace - testLogCollect: - description: Whether to always collect logs, even for passing tests - default: "false" - selectedNetworks: - description: The networks to run tests against - chainlinkVersion: - description: The git commit sha to use for the image tag - upgradeVersion: - description: The git commit sha to use for the image tag - logstreamLogTargets: - description: Where to send logs (e.g. file, loki) - customEvmNodes: - description: Custom EVM nodes to use in key=value format, where key is chain id and value is docker image to use. If they are provided the number of networksSelected must be equal to the number of customEvmNodes - evmNodeLogLevel: - description: Log level for the custom EVM nodes - default: "info" -outputs: - base64_config: - description: The base64-encoded config - value: ${{ steps.base64_config_override.outputs.base64_config }} - -runs: - using: composite - steps: - - name: Prepare Base64 TOML override - shell: bash - id: base64_config_override - env: - RUN_ID: ${{ inputs.runId }} - SELECTED_NETWORKS: ${{ inputs.selectedNetworks }} - EXISTING_NAMESPACE: ${{ inputs.existingNamespace }} - TEST_LOG_COLLECT: ${{ inputs.testLogCollect }} - CHAINLINK_VERSION: ${{ inputs.chainlinkVersion }} - UPGRADE_VERSION: ${{ inputs.upgradeVersion }} - LOGSTREAM_LOG_TARGETS: ${{ inputs.logstreamLogTargets }} - CUSTOM_EVM_NODES: ${{ inputs.customEvmNodes }} - EVM_NODE_LOG_LEVEL: ${{ inputs.evmNodeLogLevel }} - run: | - function convert_to_toml_array() { - local IFS=',' - local input_array=($1) - local toml_array_format="[" - - for element in "${input_array[@]}"; do - toml_array_format+="\"$element\"," - done - - toml_array_format="${toml_array_format%,}]" - echo "$toml_array_format" - } - - selected_networks=$(convert_to_toml_array "$SELECTED_NETWORKS") - log_targets=$(convert_to_toml_array "$LOGSTREAM_LOG_TARGETS") - - if [ -n "$TEST_LOG_COLLECT" ]; then - test_log_collect=true - else - test_log_collect=false - fi - - # make sure the number of networks and nodes match - IFS=',' read -r -a networks_array <<< "$SELECTED_NETWORKS" - IFS=',' read -r -a nodes_array <<< "$CUSTOM_EVM_NODES" - - networks_count=${#networks_array[@]} - nodes_count=${#nodes_array[@]} - - # Initialize or clear CONFIG_TOML environment variable - custom_nodes_toml="" - - # Check if the number of CUSTOM_EVM_NODES is zero - if [ $nodes_count -eq 0 ]; then - echo "The number of CUSTOM_EVM_NODES is zero, won't output any custom private Ethereum network configurations." - else - if [ $networks_count -ne $nodes_count ]; then - echo "The number of elements in SELECTED_NETWORKS (${networks_count}) and CUSTOM_EVM_NODES does not match (${nodes_count})." - exit 1 - else - for i in "${!networks_array[@]}"; do - IFS='=' read -r chain_id docker_image <<< "${nodes_array[i]}" - custom_nodes_toml+=" - [CCIP.Env.PrivateEthereumNetworks.${networks_array[i]}] - ethereum_version=\"\" - execution_layer=\"\" - - [CCIP.Env.PrivateEthereumNetworks.${networks_array[i]}.EthereumChainConfig] - seconds_per_slot=3 - slots_per_epoch=2 - genesis_delay=15 - validator_count=4 - chain_id=${chain_id} - addresses_to_fund=[\"0xf39Fd6e51aad88F6F4ce6aB8827279cffFb92266\", \"0x70997970C51812dc3A010C7d01b50e0d17dc79C8\"] - node_log_level=\"${EVM_NODES_LOG_LEVEL}\" - - [CCIP.Env.PrivateEthereumNetworks.${networks_array[i]}.EthereumChainConfig.HardForkEpochs] - Deneb=500 - - [CCIP.Env.PrivateEthereumNetworks.${networks_array[i]}.CustomDockerImages] - execution_layer=\"${docker_image}\" - " - done - fi - fi - - cat << EOF > config.toml - [CCIP] - [CCIP.Env] - EnvToConnect="$EXISTING_NAMESPACE" - [CCIP.Env.Network] - selected_networks = $selected_networks - [CCIP.Env.NewCLCluster] - [CCIP.Env.NewCLCluster.Common] - [CCIP.Env.NewCLCluster.Common.ChainlinkImage] - version="$CHAINLINK_VERSION" - - $custom_nodes_toml - - [CCIP.Env.Logging] - test_log_collect=$test_log_collect - run_id="$RUN_ID" - - [CCIP.Env.Logging.LogStream] - log_targets=$log_targets - - [CCIP.Groups.load] - TestRunName = '$EXISTING_NAMESPACE' - - [CCIP.Groups.smoke] - TestRunName = '$EXISTING_NAMESPACE' - - EOF - - # Check if UPGRADE_VERSION is not empty and append to config.toml - if [ -n "$UPGRADE_VERSION" ]; then - cat << EOF >> config.toml - [CCIP.Env.NewCLCluster.Common.ChainlinkUpgradeImage] - version="$UPGRADE_VERSION" - EOF - fi - - BASE64_CONFIG=$(cat config.toml | base64 -w 0) - echo ::add-mask::$BASE64_CONFIG - echo "base64_config=$BASE64_CONFIG" >> $GITHUB_OUTPUT \ No newline at end of file diff --git a/.github/actions/setup-create-base64-config-live-testnets/action.yml b/.github/actions/setup-create-base64-config-live-testnets/action.yml deleted file mode 100644 index 64fc134b46e..00000000000 --- a/.github/actions/setup-create-base64-config-live-testnets/action.yml +++ /dev/null @@ -1,144 +0,0 @@ -name: Create Base64 Config -description: A composite action that creates a base64-encoded config to be used by integration tests - -inputs: - runId: - description: The run id - testLogCollect: - description: Whether to always collect logs, even for passing tests - default: "false" - chainlinkImage: - description: The chainlink image to use - default: "public.ecr.aws/chainlink/chainlink" - chainlinkVersion: - description: The git commit sha to use for the image tag - chainlinkPostgresVersion: - description: The postgres version to use with the chainlink node - default: "15.6" - pyroscopeServer: - description: URL of Pyroscope server - pyroscopeEnvironment: - description: Name of Pyroscope environment - pyroscopeKey: - description: Pyroscope server key - lokiEndpoint: - description: Loki push endpoint - lokiTenantId: - description: Loki tenant id - lokiBasicAuth: - description: Loki basic auth - logstreamLogTargets: - description: Where to send logs (e.g. file, loki) - grafanaUrl: - description: Grafana URL - grafanaDashboardUrl: - description: Grafana dashboard URL - grafanaBearerToken: - description: Grafana bearer token - network: - description: Network to run tests on - httpEndpoints: - description: HTTP endpoints to use for network - wsEndpoints: - description: WS endpoints to use for network - fundingKeys: - description: Funding keys to use for network - -runs: - using: composite - steps: - - name: Prepare Base64 TOML override - shell: bash - id: base64-config-override - env: - RUN_ID: ${{ inputs.runId }} - PYROSCOPE_SERVER: ${{ inputs.pyroscopeServer }} - PYROSCOPE_ENVIRONMENT: ${{ inputs.pyroscopeEnvironment }} - PYROSCOPE_KEY: ${{ inputs.pyroscopeKey }} - CHAINLINK_IMAGE: ${{ inputs.chainlinkImage }} - CHAINLINK_VERSION: ${{ inputs.chainlinkVersion }} - CHAINLINK_POSTGRES_VERSION: ${{ inputs.chainlinkPostgresVersion }} - LOKI_ENDPOINT: ${{ inputs.lokiEndpoint }} - LOKI_TENANT_ID: ${{ inputs.lokiTenantId }} - LOKI_BASIC_AUTH: ${{ inputs.lokiBasicAuth }} - LOGSTREAM_LOG_TARGETS: ${{ inputs.logstreamLogTargets }} - GRAFANA_URL: ${{ inputs.grafanaUrl }} - GRAFANA_DASHBOARD_URL: ${{ inputs.grafanaDashboardUrl }} - GRAFANA_BEARER_TOKEN: ${{ inputs.grafanaBearerToken }} - NETWORK: ${{ inputs.network }} - HTTP_ENDPOINTS: ${{ inputs.httpEndpoints }} - WS_ENDPOINTS: ${{ inputs.wsEndpoints }} - FUNDING_KEYS: ${{ inputs.fundingKeys }} - run: | - convert_to_toml_array() { - local IFS=',' - local input_array=($1) - local toml_array_format="[" - - for element in "${input_array[@]}"; do - toml_array_format+="\"$element\"," - done - - toml_array_format="${toml_array_format%,}]" - echo "$toml_array_format" - } - - if [ -n "$PYROSCOPE_SERVER" ]; then - pyroscope_enabled=true - else - pyroscope_enabled=false - fi - - grafana_bearer_token="" - if [ -n "$GRAFANA_BEARER_TOKEN" ]; then - grafana_bearer_token="bearer_token_secret=\"$GRAFANA_BEARER_TOKEN\"" - fi - - cat << EOF > config.toml - [Common] - chainlink_node_funding=0.5 - - [ChainlinkImage] - image="$CHAINLINK_IMAGE" - version="$CHAINLINK_VERSION" - postgres_version="$CHAINLINK_POSTGRES_VERSION" - - [Pyroscope] - enabled=$pyroscope_enabled - server_url="$PYROSCOPE_SERVER" - environment="$PYROSCOPE_ENVIRONMENT" - key_secret="$PYROSCOPE_KEY" - - [Logging] - run_id="$RUN_ID" - - [Logging.LogStream] - log_targets=$(convert_to_toml_array "$LOGSTREAM_LOG_TARGETS") - - [Logging.Loki] - tenant_id="$LOKI_TENANT_ID" - endpoint="$LOKI_URL" - basic_auth_secret="$LOKI_BASIC_AUTH" - - [Logging.Grafana] - base_url="$GRAFANA_URL" - dashboard_url="$GRAFANA_DASHBOARD_URL" - $grafana_bearer_token - - [Network] - selected_networks=["$NETWORK"] - - [Network.RpcHttpUrls] - "$NETWORK" = $(convert_to_toml_array "$HTTP_ENDPOINTS") - - [Network.RpcWsUrls] - "$NETWORK" = $(convert_to_toml_array "$WS_ENDPOINTS") - - [Network.WalletKeys] - "$NETWORK" = $(convert_to_toml_array "$FUNDING_KEYS") - EOF - - BASE64_CONFIG_OVERRIDE=$(cat config.toml | base64 -w 0) - echo ::add-mask::$BASE64_CONFIG_OVERRIDE - echo "BASE64_CONFIG_OVERRIDE=$BASE64_CONFIG_OVERRIDE" >> $GITHUB_ENV - touch .root_dir diff --git a/.github/actions/setup-create-base64-upgrade-config/action.yml b/.github/actions/setup-create-base64-upgrade-config/action.yml deleted file mode 100644 index c2d0bc19f35..00000000000 --- a/.github/actions/setup-create-base64-upgrade-config/action.yml +++ /dev/null @@ -1,125 +0,0 @@ -name: Create Base64 Upgrade Config -description: A composite action that creates a base64-encoded config to be used by Chainlink version upgrade tests - -inputs: - selectedNetworks: - description: The networks to run tests against - chainlinkImage: - description: The chainlink image to upgrade from - default: "public.ecr.aws/chainlink/chainlink" - chainlinkVersion: - description: The git commit sha to use for the image tag - chainlinkPostgresVersion: - description: The postgres version to use with the chainlink node - default: "15.6" - upgradeImage: - description: The chainlink image to upgrade to - default: "public.ecr.aws/chainlink/chainlink" - upgradeVersion: - description: The git commit sha to use for the image tag - runId: - description: The run id - testLogCollect: - description: Whether to always collect logs, even for passing tests - default: "false" - lokiEndpoint: - description: Loki push endpoint - lokiTenantId: - description: Loki tenant id - lokiBasicAuth: - description: Loki basic auth - logstreamLogTargets: - description: Where to send logs (e.g. file, loki) - grafanaUrl: - description: Grafana URL - grafanaDashboardUrl: - description: Grafana dashboard URL - grafanaBearerToken: - description: Grafana bearer token - -runs: - using: composite - steps: - - name: Prepare Base64 TOML override - shell: bash - id: base64-config-override - env: - SELECTED_NETWORKS: ${{ inputs.selectedNetworks }} - CHAINLINK_IMAGE: ${{ inputs.chainlinkImage }} - CHAINLINK_VERSION: ${{ inputs.chainlinkVersion }} - CHAINLINK_POSTGRES_VERSION: ${{ inputs.chainlinkPostgresVersion }} - UPGRADE_IMAGE: ${{ inputs.upgradeImage }} - UPGRADE_VERSION: ${{ inputs.upgradeVersion }} - RUN_ID: ${{ inputs.runId }} - TEST_LOG_COLLECT: ${{ inputs.testLogCollect }} - LOKI_ENDPOINT: ${{ inputs.lokiEndpoint }} - LOKI_TENANT_ID: ${{ inputs.lokiTenantId }} - LOKI_BASIC_AUTH: ${{ inputs.lokiBasicAuth }} - LOGSTREAM_LOG_TARGETS: ${{ inputs.logstreamLogTargets }} - GRAFANA_URL: ${{ inputs.grafanaUrl }} - GRAFANA_DASHBOARD_URL: ${{ inputs.grafanaDashboardUrl }} - GRAFANA_BEARER_TOKEN: ${{ inputs.grafanaBearerToken }} - run: | - function convert_to_toml_array() { - local IFS=',' - local input_array=($1) - local toml_array_format="[" - - for element in "${input_array[@]}"; do - toml_array_format+="\"$element\"," - done - - toml_array_format="${toml_array_format%,}]" - echo "$toml_array_format" - } - - selected_networks=$(convert_to_toml_array "$SELECTED_NETWORKS") - - if [ -n "$TEST_LOG_COLLECT" ]; then - test_log_collect=true - else - test_log_collect=false - fi - - log_targets=$(convert_to_toml_array "$LOGSTREAM_LOG_TARGETS") - - grafana_bearer_token="" - if [ -n "$GRAFANA_BEARER_TOKEN" ]; then - grafana_bearer_token="bearer_token_secret=\"$GRAFANA_BEARER_TOKEN\"" - fi - - cat << EOF > config.toml - [Network] - selected_networks=$selected_networks - - [ChainlinkImage] - image="$CHAINLINK_IMAGE" - version="$CHAINLINK_VERSION" - postgres_version="$CHAINLINK_POSTGRES_VERSION" - - [ChainlinkUpgradeImage] - image="$UPGRADE_IMAGE" - version="$UPGRADE_VERSION" - postgres_version="$CHAINLINK_POSTGRES_VERSION" - - [Logging] - test_log_collect=$test_log_collect - run_id="$RUN_ID" - - [Logging.LogStream] - log_targets=$log_targets - - [Logging.Loki] - tenant_id="$LOKI_TENANT_ID" - endpoint="$LOKI_ENDPOINT" - basic_auth_secret="$LOKI_BASIC_AUTH" - - [Logging.Grafana] - base_url="$GRAFANA_URL" - dashboard_url="$GRAFANA_DASHBOARD_URL" - $grafana_bearer_token - EOF - - BASE64_CONFIG_OVERRIDE=$(cat config.toml | base64 -w 0) - echo ::add-mask::$BASE64_CONFIG_OVERRIDE - echo "BASE64_CONFIG_OVERRIDE=$BASE64_CONFIG_OVERRIDE" >> $GITHUB_ENV diff --git a/.github/actions/setup-merge-base64-config/action.yml b/.github/actions/setup-merge-base64-config/action.yml deleted file mode 100644 index 79dc8758315..00000000000 --- a/.github/actions/setup-merge-base64-config/action.yml +++ /dev/null @@ -1,70 +0,0 @@ -name: Merge Base64 Config -description: A composite action that merges user-provided Base64-encoded config with repository's secrets - -inputs: - base64Config: - description: Base64-encoded config to decode - -runs: - using: composite - steps: - - name: Install dasel - shell: bash - run: | - if ! which dasel > /dev/null; then - curl -L -o dasel "https://github.com/TomWright/dasel/releases/download/v2.6.0/dasel_linux_amd64" && chmod +x dasel && sudo mv dasel /usr/local/bin/ - else - echo "Dasel is already installed." - fi - - name: Add masks and export base64 config - shell: bash - run: | - BASE64_CONFIG_OVERRIDE=$(jq -r '.inputs.base64Config' $GITHUB_EVENT_PATH) - echo ::add-mask::$BASE64_CONFIG_OVERRIDE - echo "BASE64_CONFIG_OVERRIDE=$BASE64_CONFIG_OVERRIDE" >> $GITHUB_ENV - - decoded_toml=$(echo $BASE64_CONFIG_OVERRIDE | base64 -d) - CHAINLINK_IMAGE=$(echo "$decoded_toml" | { dasel -r toml 'ChainlinkImage.image' 2>/dev/null || echo ''; }) - echo ::add-mask::$CHAINLINK_IMAGE - CHAINLINK_VERSION=$(echo "$decoded_toml" | { dasel -r toml 'ChainlinkImage.version' 2>/dev/null || echo ''; }) - NETWORKS=$(echo "$decoded_toml" | awk -F'=' '/^[[:space:]]*selected_networks[[:space:]]*=/ {gsub(/^[[:space:]]+|[[:space:]]+$/, "", $2); print $2}' 2>/dev/null) - - if [ -n "$CHAINLINK_IMAGE" ]; then - echo "CHAINLINK_IMAGE=$CHAINLINK_IMAGE" >> $GITHUB_ENV - else - echo "No Chainlink Image found in base64-ed config" - fi - if [ -n "$CHAINLINK_VERSION" ]; then - echo "CHAINLINK_VERSION=$CHAINLINK_VERSION" >> $GITHUB_ENV - else - echo "No Chainlink Version found in base64-ed config" - fi - if [ -n "$NETWORKS" ]; then - echo "NETWORKS=$NETWORKS" >> $GITHUB_ENV - fi - - grafana_bearer_token="" - if [ -n "$GRAFANA_BEARER_TOKEN" ]; then - grafana_bearer_token="bearer_token_secret=\"$GRAFANA_BEARER_TOKEN\"" - fi - - # use Loki config from GH secrets and merge it with base64 input - cat << EOF > config.toml - [Logging.Loki] - tenant_id="$LOKI_TENANT_ID" - endpoint="$LOKI_URL" - basic_auth_secret="$LOKI_BASIC_AUTH" - # legacy, you only need this to access the cloud version - # bearer_token_secret="bearer_token" - - [Logging.Grafana] - base_url="$GRAFANA_URL" - dashboard_url="$GRAFANA_DASHBOARD_URL" - $grafana_bearer_token - EOF - - echo "$decoded_toml" >> final_config.toml - cat config.toml >> final_config.toml - BASE64_CONFIG_OVERRIDE=$(cat final_config.toml | base64 -w 0) - echo ::add-mask::$BASE64_CONFIG_OVERRIDE - echo "BASE64_CONFIG_OVERRIDE=$BASE64_CONFIG_OVERRIDE" >> $GITHUB_ENV \ No newline at end of file diff --git a/.github/actions/setup-parse-base64-config/action.yml b/.github/actions/setup-parse-base64-config/action.yml deleted file mode 100644 index 72e8982e6d0..00000000000 --- a/.github/actions/setup-parse-base64-config/action.yml +++ /dev/null @@ -1,44 +0,0 @@ -name: Parse Base64 Config -description: A composite action that extracts the chainlink image, version and network from a base64-encoded config - -inputs: - base64Config: - description: Base64-encoded config to decode - -runs: - using: composite - steps: - - name: Install dasel - shell: bash - run: | - if ! which dasel > /dev/null; then - curl -L -o dasel "https://github.com/TomWright/dasel/releases/download/v2.6.0/dasel_linux_amd64" && chmod +x dasel && sudo mv dasel /usr/local/bin/ - else - echo "Dasel is already installed." - fi - - name: Add masks and export base64 config - shell: bash - run: | - decoded_toml=$(echo $BASE64_CONFIG_OVERRIDE | base64 -d) - CHAINLINK_IMAGE=$(echo "$decoded_toml" | { dasel -r toml 'ChainlinkImage.image' 2>/dev/null || echo ''; }) - echo ::add-mask::$CHAINLINK_IMAGE - CHAINLINK_VERSION=$(echo "$decoded_toml" | { dasel -r toml 'ChainlinkImage.version' 2>/dev/null || echo ''; }) - NETWORKS=$(echo "$decoded_toml" | awk -F'=' '/^[[:space:]]*selected_networks[[:space:]]*=/ {gsub(/^[[:space:]]+|[[:space:]]+$/, "", $2); print $2}' 2>/dev/null) - ETH2_EL_CLIENT=$(echo "$decoded_toml" | awk -F'=' '/^[[:space:]]*execution_layer[[:space:]]*=/ {gsub(/^[[:space:]]+|[[:space:]]+$/, "", $2); print $2}' 2>/dev/null) - - if [ -n "$CHAINLINK_IMAGE" ]; then - echo "CHAINLINK_IMAGE=$CHAINLINK_IMAGE" >> $GITHUB_ENV - else - echo "No Chainlink Image found in base64-ed config" - fi - if [ -n "$CHAINLINK_VERSION" ]; then - echo "CHAINLINK_VERSION=$CHAINLINK_VERSION" >> $GITHUB_ENV - else - echo "No Chainlink Version found in base64-ed config. Exiting" - fi - if [ -n "$NETWORKS" ]; then - echo "NETWORKS=$NETWORKS" >> $GITHUB_ENV - fi - if [ -n "$ETH2_EL_CLIENT" ]; then - echo "ETH2_EL_CLIENT=$ETH2_EL_CLIENT" >> $GITHUB_ENV - fi \ No newline at end of file diff --git a/.github/e2e-tests.yml b/.github/e2e-tests.yml index 0ee7a50d0c5..d5c257de8fd 100644 --- a/.github/e2e-tests.yml +++ b/.github/e2e-tests.yml @@ -16,27 +16,27 @@ runner-test-matrix: runs_on: ubuntu-latest workflows: - PR E2E Core Tests + - Merge Queue E2E Core Tests - Nightly E2E Tests test_cmd: cd integration-tests/ && go test smoke/ocr_test.go -timeout 30m -count=1 -test.parallel=2 -json pyroscope_env: ci-smoke-ocr-evm-simulated - # Example of a configuration for running a single soak test in Kubernetes Remote Runner - - id: soak/ocr_test.go:^TestOCRv1Soak$ + - id: soak/ocr_test.go:TestOCRv1Soak path: integration-tests/soak/ocr_test.go test_env_type: k8s-remote-runner runs_on: ubuntu-latest - test_cmd: cd integration-tests/ && go test soak/ocr_test.go -v -test.run ^TestOCRv1Soak$ -test.parallel=1 -timeout 30m -count=1 -json - test_config_override_required: true + test_cmd: cd integration-tests/ && go test soak/ocr_test.go -v -test.run ^TestOCRv1Soak$ -test.parallel=1 -timeout 900h -count=1 -json + test_cmd_opts: 2>&1 | tee /tmp/gotest.log | gotestloghelper -ci -singlepackage -hidepassingtests=false test_secrets_required: true test_env_vars: TEST_SUITE: soak - - id: soak/ocr_test.go:^TestOCRv2Soak$ + - id: soak/ocr_test.go:TestOCRv2Soak path: integration-tests/soak/ocr_test.go test_env_type: k8s-remote-runner runs_on: ubuntu-latest - test_cmd: cd integration-tests/ && go test soak/ocr_test.go -v -test.run ^TestOCRv2Soak$ -test.parallel=1 -timeout 30m -count=1 -json - test_config_override_required: true + test_cmd: cd integration-tests/ && go test soak/ocr_test.go -v -test.run ^TestOCRv2Soak$ -test.parallel=1 -timeout 900h -count=1 -json + test_cmd_opts: 2>&1 | tee /tmp/gotest.log | gotestloghelper -ci -singlepackage -hidepassingtests=false test_secrets_required: true test_env_vars: TEST_SUITE: soak @@ -45,88 +45,80 @@ runner-test-matrix: path: integration-tests/soak/ocr_test.go test_env_type: k8s-remote-runner runs_on: ubuntu-latest - test_cmd: cd integration-tests/ && go test soak/ocr_test.go -v -test.run ^TestOCRv2Soak$ -test.parallel=1 -timeout 30m -count=1 -json + test_cmd: cd integration-tests/ && go test soak/ocr_test.go -v -test.run ^TestOCRv2Soak$ -test.parallel=1 -timeout 900h -count=1 -json test_config_override_path: integration-tests/testconfig/ocr2/overrides/wemix_testnet.toml test_secrets_required: true test_env_vars: TEST_SUITE: soak - - id: soak/ocr_test.go:^TestForwarderOCRv1Soak$ + - id: soak/ocr_test.go:TestForwarderOCRv1Soak path: integration-tests/soak/ocr_test.go test_env_type: k8s-remote-runner runs_on: ubuntu-latest - test_cmd: cd integration-tests/ && go test soak/ocr_test.go -v -test.run ^TestForwarderOCRv1Soak$ -test.parallel=1 -timeout 30m -count=1 -json - test_config_override_required: true + test_cmd: cd integration-tests/ && go test soak/ocr_test.go -v -test.run ^TestForwarderOCRv1Soak$ -test.parallel=1 -timeout 900h -count=1 -json test_secrets_required: true test_env_vars: TEST_SUITE: soak - - id: soak/ocr_test.go:^TestForwarderOCRv2Soak$ + - id: soak/ocr_test.go:TestForwarderOCRv2Soak path: integration-tests/soak/ocr_test.go test_env_type: k8s-remote-runner runs_on: ubuntu-latest - test_cmd: cd integration-tests/ && go test soak/ocr_test.go -v -test.run ^TestForwarderOCRv2Soak$ -test.parallel=1 -timeout 30m -count=1 -json - test_config_override_required: true + test_cmd: cd integration-tests/ && go test soak/ocr_test.go -v -test.run ^TestForwarderOCRv2Soak$ -test.parallel=1 -timeout 900h -count=1 -json test_secrets_required: true test_env_vars: TEST_SUITE: soak - - id: soak/ocr_test.go:^TestOCRSoak_GethReorgBelowFinality_FinalityTagDisabled$ + - id: soak/ocr_test.go:TestOCRSoak_GethReorgBelowFinality_FinalityTagDisabled path: integration-tests/soak/ocr_test.go test_env_type: k8s-remote-runner runs_on: ubuntu-latest - test_cmd: cd integration-tests/ && go test soak/ocr_test.go -v -test.run ^TestOCRSoak_GethReorgBelowFinality_FinalityTagDisabled$ -test.parallel=1 -timeout 30m -count=1 -json - test_config_override_required: true + test_cmd: cd integration-tests/ && go test soak/ocr_test.go -v -test.run TestOCRSoak_GethReorgBelowFinality_FinalityTagDisabled -test.parallel=1 -timeout 900h -count=1 -json test_secrets_required: true test_env_vars: TEST_SUITE: soak - - id: soak/ocr_test.go:^TestOCRSoak_GethReorgBelowFinality_FinalityTagEnabled$ + - id: soak/ocr_test.go:TestOCRSoak_GethReorgBelowFinality_FinalityTagEnabled path: integration-tests/soak/ocr_test.go test_env_type: k8s-remote-runner runs_on: ubuntu-latest - test_cmd: cd integration-tests/ && go test soak/ocr_test.go -v -test.run ^TestOCRSoak_GethReorgBelowFinality_FinalityTagEnabled$ -test.parallel=1 -timeout 30m -count=1 -json - test_config_override_required: true + test_cmd: cd integration-tests/ && go test soak/ocr_test.go -v -test.run ^TestOCRSoak_GethReorgBelowFinality_FinalityTagEnabled$ -test.parallel=1 -timeout 900h -count=1 -json test_secrets_required: true test_env_vars: TEST_SUITE: soak - - id: soak/ocr_test.go:^TestOCRSoak_GasSpike$ + - id: soak/ocr_test.go:TestOCRSoak_GasSpike path: integration-tests/soak/ocr_test.go test_env_type: k8s-remote-runner runs_on: ubuntu-latest - test_cmd: cd integration-tests/ && go test soak/ocr_test.go -v -test.run ^TestOCRSoak_GasSpike$ -test.parallel=1 -timeout 30m -count=1 -json - test_config_override_required: true + test_cmd: cd integration-tests/ && go test soak/ocr_test.go -v -test.run ^TestOCRSoak_GasSpike$ -test.parallel=1 -timeout 900h -count=1 -json test_secrets_required: true test_env_vars: TEST_SUITE: soak - - id: soak/ocr_test.go:^TestOCRSoak_ChangeBlockGasLimit$ + - id: soak/ocr_test.go:TestOCRSoak_ChangeBlockGasLimit path: integration-tests/soak/ocr_test.go test_env_type: k8s-remote-runner runs_on: ubuntu-latest - test_cmd: cd integration-tests/ && go test soak/ocr_test.go -v -test.run ^TestOCRSoak_ChangeBlockGasLimit$ -test.parallel=1 -timeout 30m -count=1 -json - test_config_override_required: true + test_cmd: cd integration-tests/ && go test soak/ocr_test.go -v -test.run ^TestOCRSoak_ChangeBlockGasLimit$ -test.parallel=1 -timeout 900h -count=1 -json test_secrets_required: true test_env_vars: TEST_SUITE: soak - - id: soak/ocr_test.go:^TestOCRSoak_RPCDownForAllCLNodes$ + - id: soak/ocr_test.go:TestOCRSoak_RPCDownForAllCLNodes path: integration-tests/soak/ocr_test.go test_env_type: k8s-remote-runner runs_on: ubuntu-latest - test_cmd: cd integration-tests/ && go test soak/ocr_test.go -v -test.run ^TestOCRSoak_RPCDownForAllCLNodes$ -test.parallel=1 -timeout 30m -count=1 -json - test_config_override_required: true + test_cmd: cd integration-tests/ && go test soak/ocr_test.go -v -test.run ^TestOCRSoak_RPCDownForAllCLNodes$ -test.parallel=1 -timeout 900h -count=1 -json test_secrets_required: true test_env_vars: TEST_SUITE: soak - - id: soak/ocr_test.go:^TestOCRSoak_RPCDownForHalfCLNodes$ + - id: soak/ocr_test.go:TestOCRSoak_RPCDownForHalfCLNodes path: integration-tests/soak/ocr_test.go test_env_type: k8s-remote-runner runs_on: ubuntu-latest - test_cmd: cd integration-tests/ && go test soak/ocr_test.go -v -test.run ^TestOCRSoak_RPCDownForHalfCLNodes$ -test.parallel=1 -timeout 30m -count=1 -json - test_config_override_required: true + test_cmd: cd integration-tests/ && go test soak/ocr_test.go -v -test.run ^TestOCRSoak_RPCDownForHalfCLNodes$ -test.parallel=1 -timeout 900h -count=1 -json test_secrets_required: true test_env_vars: TEST_SUITE: soak @@ -137,6 +129,7 @@ runner-test-matrix: runs_on: ubuntu-latest workflows: - PR E2E Core Tests + - Merge Queue E2E Core Tests - Nightly E2E Tests test_cmd: cd integration-tests/ && go test smoke/forwarder_ocr_test.go -timeout 30m -count=1 -test.parallel=2 -json pyroscope_env: ci-smoke-forwarder-ocr-evm-simulated @@ -147,6 +140,7 @@ runner-test-matrix: runs_on: ubuntu-latest workflows: - PR E2E Core Tests + - Merge Queue E2E Core Tests - Nightly E2E Tests test_cmd: cd integration-tests/ && go test smoke/forwarders_ocr2_test.go -timeout 30m -count=1 -test.parallel=2 -json pyroscope_env: ci-smoke-forwarder-ocr-evm-simulated @@ -157,6 +151,7 @@ runner-test-matrix: runs_on: ubuntu22.04-16cores-64GB workflows: - PR E2E Core Tests + - Merge Queue E2E Core Tests - Nightly E2E Tests test_cmd: cd integration-tests/ && go test smoke/ocr2_test.go -timeout 30m -count=1 -test.parallel=6 -json pyroscope_env: ci-smoke-ocr2-evm-simulated @@ -169,6 +164,7 @@ runner-test-matrix: runs_on: ubuntu22.04-16cores-64GB workflows: - PR E2E Core Tests + - Merge Queue E2E Core Tests - Nightly E2E Tests test_cmd: cd integration-tests/ && go test smoke/ocr2_test.go -timeout 30m -count=1 -test.parallel=6 -json pyroscope_env: ci-smoke-ocr2-plugins-evm-simulated @@ -197,6 +193,7 @@ runner-test-matrix: runs_on: ubuntu-latest workflows: - PR E2E Core Tests + - Merge Queue E2E Core Tests - Nightly E2E Tests test_cmd: cd integration-tests/smoke && go test -test.run "^TestAutomationBasic/registry_2_0|TestAutomationBasic/registry_2_1_conditional|TestAutomationBasic/registry_2_1_logtrigger$" -test.parallel=3 -timeout 30m -count=1 -json pyroscope_env: ci-smoke-automation-evm-simulated @@ -207,6 +204,7 @@ runner-test-matrix: runs_on: ubuntu-latest workflows: - PR E2E Core Tests + - Merge Queue E2E Core Tests - Nightly E2E Tests test_cmd: cd integration-tests/smoke && go test -test.run "^TestAutomationBasic/registry_2_1_with_mercury_v02|TestAutomationBasic/registry_2_1_with_mercury_v03|TestAutomationBasic/registry_2_1_with_logtrigger_and_mercury_v02$" -test.parallel=3 -timeout 30m -count=1 -json pyroscope_env: ci-smoke-automation-evm-simulated @@ -217,6 +215,7 @@ runner-test-matrix: runs_on: ubuntu-latest workflows: - PR E2E Core Tests + - Merge Queue E2E Core Tests - Nightly E2E Tests test_cmd: cd integration-tests/smoke && go test -test.run "^TestAutomationBasic/registry_2_2_conditional|TestAutomationBasic/registry_2_2_logtrigger|TestAutomationBasic/registry_2_2_with_mercury_v02$" -test.parallel=3 -timeout 30m -count=1 -json pyroscope_env: ci-smoke-automation-evm-simulated @@ -227,6 +226,7 @@ runner-test-matrix: runs_on: ubuntu-latest workflows: - PR E2E Core Tests + - Merge Queue E2E Core Tests - Nightly E2E Tests test_cmd: cd integration-tests/smoke && go test -test.run "^TestAutomationBasic/registry_2_2_with_mercury_v03|TestAutomationBasic/registry_2_2_with_logtrigger_and_mercury_v02$" -test.parallel=2 -timeout 30m -count=1 -json pyroscope_env: ci-smoke-automation-evm-simulated @@ -237,6 +237,7 @@ runner-test-matrix: runs_on: ubuntu-latest workflows: - PR E2E Core Tests + - Merge Queue E2E Core Tests - Nightly E2E Tests test_cmd: cd integration-tests/smoke && go test -test.run "^TestAutomationBasic/registry_2_3_conditional_native|TestAutomationBasic/registry_2_3_conditional_link$" -test.parallel=2 -timeout 30m -count=1 -json pyroscope_env: ci-smoke-automation-evm-simulated @@ -247,6 +248,7 @@ runner-test-matrix: runs_on: ubuntu-latest workflows: - PR E2E Core Tests + - Merge Queue E2E Core Tests - Nightly E2E Tests test_cmd: cd integration-tests/smoke && go test -test.run "^TestAutomationBasic/registry_2_3_logtrigger_native|TestAutomationBasic/registry_2_3_logtrigger_link$" -test.parallel=2 -timeout 30m -count=1 -json pyroscope_env: ci-smoke-automation-evm-simulated @@ -257,6 +259,7 @@ runner-test-matrix: runs_on: ubuntu-latest workflows: - PR E2E Core Tests + - Merge Queue E2E Core Tests - Nightly E2E Tests test_cmd: cd integration-tests/smoke && go test -test.run "^TestAutomationBasic/registry_2_3_with_mercury_v03_link|TestAutomationBasic/registry_2_3_with_logtrigger_and_mercury_v02_link$" -test.parallel=2 -timeout 30m -count=1 -json pyroscope_env: ci-smoke-automation-evm-simulated @@ -267,6 +270,7 @@ runner-test-matrix: runs_on: ubuntu-latest workflows: - PR E2E Core Tests + - Merge Queue E2E Core Tests - Nightly E2E Tests test_cmd: cd integration-tests/smoke && go test -test.run ^TestSetUpkeepTriggerConfig$ -test.parallel=2 -timeout 30m -count=1 -json pyroscope_env: ci-smoke-automation-evm-simulated @@ -277,6 +281,7 @@ runner-test-matrix: runs_on: ubuntu-latest workflows: - PR E2E Core Tests + - Merge Queue E2E Core Tests - Nightly E2E Tests test_cmd: cd integration-tests/smoke && go test -test.run ^TestAutomationAddFunds$ -test.parallel=3 -timeout 30m -count=1 -json pyroscope_env: ci-smoke-automation-evm-simulated @@ -287,6 +292,7 @@ runner-test-matrix: runs_on: ubuntu-latest workflows: - PR E2E Core Tests + - Merge Queue E2E Core Tests - Nightly E2E Tests test_cmd: cd integration-tests/smoke && go test -test.run ^TestAutomationPauseUnPause$ -test.parallel=3 -timeout 30m -count=1 -json pyroscope_env: ci-smoke-automation-evm-simulated @@ -297,6 +303,7 @@ runner-test-matrix: runs_on: ubuntu-latest workflows: - PR E2E Core Tests + - Merge Queue E2E Core Tests - Nightly E2E Tests test_cmd: cd integration-tests/smoke && go test -test.run ^TestAutomationRegisterUpkeep$ -test.parallel=3 -timeout 30m -count=1 -json pyroscope_env: ci-smoke-automation-evm-simulated @@ -307,6 +314,7 @@ runner-test-matrix: runs_on: ubuntu-latest workflows: - PR E2E Core Tests + - Merge Queue E2E Core Tests - Nightly E2E Tests test_cmd: cd integration-tests/smoke && go test -test.run ^TestAutomationPauseRegistry$ -test.parallel=3 -timeout 30m -count=1 -json pyroscope_env: ci-smoke-automation-evm-simulated @@ -317,6 +325,7 @@ runner-test-matrix: runs_on: ubuntu-latest workflows: - PR E2E Core Tests + - Merge Queue E2E Core Tests - Nightly E2E Tests test_cmd: cd integration-tests/smoke && go test -test.run ^TestAutomationKeeperNodesDown$ -test.parallel=3 -timeout 30m -count=1 -json pyroscope_env: ci-smoke-automation-evm-simulated @@ -327,6 +336,7 @@ runner-test-matrix: runs_on: ubuntu-latest workflows: - PR E2E Core Tests + - Merge Queue E2E Core Tests - Nightly E2E Tests test_cmd: cd integration-tests/smoke && go test -test.run ^TestAutomationPerformSimulation$ -test.parallel=3 -timeout 30m -count=1 -json pyroscope_env: ci-smoke-automation-evm-simulated @@ -337,6 +347,7 @@ runner-test-matrix: runs_on: ubuntu-latest workflows: - PR E2E Core Tests + - Merge Queue E2E Core Tests - Nightly E2E Tests test_cmd: cd integration-tests/smoke && go test -test.run ^TestAutomationCheckPerformGasLimit$ -test.parallel=3 -timeout 30m -count=1 -json pyroscope_env: ci-smoke-automation-evm-simulated @@ -347,6 +358,7 @@ runner-test-matrix: runs_on: ubuntu-latest workflows: - PR E2E Core Tests + - Merge Queue E2E Core Tests - Nightly E2E Tests test_cmd: cd integration-tests/smoke && go test -test.run ^TestUpdateCheckData$ -test.parallel=3 -timeout 30m -count=1 -json pyroscope_env: ci-smoke-automation-evm-simulated @@ -357,6 +369,7 @@ runner-test-matrix: runs_on: ubuntu-latest workflows: - PR E2E Core Tests + - Merge Queue E2E Core Tests - Nightly E2E Tests test_cmd: cd integration-tests/smoke && go test -test.run ^TestSetOffchainConfigWithMaxGasPrice$ -test.parallel=2 -timeout 30m -count=1 -json pyroscope_env: ci-smoke-automation-evm-simulated @@ -366,7 +379,7 @@ runner-test-matrix: test_env_type: docker runs_on: ubuntu-latest workflows: - - PR E2E Core Tests + - Merge Queue E2E Core Tests - Nightly E2E Tests test_cmd: cd integration-tests/smoke && go test -test.run ^TestKeeperBasicSmoke$ -test.parallel=3 -timeout 30m -count=1 -json pyroscope_env: ci-smoke-keeper-evm-simulated @@ -376,7 +389,7 @@ runner-test-matrix: test_env_type: docker runs_on: ubuntu-latest workflows: - - PR E2E Core Tests + - Merge Queue E2E Core Tests - Nightly E2E Tests test_cmd: cd integration-tests/smoke && go test -test.run ^TestKeeperBlockCountPerTurn$ -test.parallel=3 -timeout 30m -count=1 -json pyroscope_env: ci-smoke-keeper-evm-simulated @@ -386,7 +399,7 @@ runner-test-matrix: test_env_type: docker runs_on: ubuntu-latest workflows: - - PR E2E Core Tests + - Merge Queue E2E Core Tests - Nightly E2E Tests test_cmd: cd integration-tests/smoke && go test -test.run ^TestKeeperSimulation$ -test.parallel=2 -timeout 30m -count=1 -json pyroscope_env: ci-smoke-keeper-evm-simulated @@ -396,7 +409,7 @@ runner-test-matrix: test_env_type: docker runs_on: ubuntu-latest workflows: - - PR E2E Core Tests + - Merge Queue E2E Core Tests - Nightly E2E Tests test_cmd: cd integration-tests/smoke && go test -test.run ^TestKeeperCheckPerformGasLimit$ -test.parallel=2 -timeout 30m -count=1 -json pyroscope_env: ci-smoke-keeper-evm-simulated @@ -406,7 +419,7 @@ runner-test-matrix: test_env_type: docker runs_on: ubuntu-latest workflows: - - PR E2E Core Tests + - Merge Queue E2E Core Tests - Nightly E2E Tests test_cmd: cd integration-tests/smoke && go test -test.run ^TestKeeperRegisterUpkeep$ -test.parallel=3 -timeout 30m -count=1 -json pyroscope_env: ci-smoke-keeper-evm-simulated @@ -416,7 +429,7 @@ runner-test-matrix: test_env_type: docker runs_on: ubuntu-latest workflows: - - PR E2E Core Tests + - Merge Queue E2E Core Tests - Nightly E2E Tests test_cmd: cd integration-tests/smoke && go test -test.run ^TestKeeperAddFunds$ -test.parallel=3 -timeout 30m -count=1 -json pyroscope_env: ci-smoke-keeper-evm-simulated @@ -426,7 +439,7 @@ runner-test-matrix: test_env_type: docker runs_on: ubuntu-latest workflows: - - PR E2E Core Tests + - Merge Queue E2E Core Tests - Nightly E2E Tests test_cmd: cd integration-tests/smoke && go test -test.run ^TestKeeperRemove$ -test.parallel=3 -timeout 30m -count=1 -json pyroscope_env: ci-smoke-keeper-evm-simulated @@ -436,7 +449,7 @@ runner-test-matrix: test_env_type: docker runs_on: ubuntu-latest workflows: - - PR E2E Core Tests + - Merge Queue E2E Core Tests - Nightly E2E Tests test_cmd: cd integration-tests/smoke && go test -test.run ^TestKeeperPauseRegistry$ -test.parallel=2 -timeout 30m -count=1 -json pyroscope_env: ci-smoke-keeper-evm-simulated @@ -446,7 +459,7 @@ runner-test-matrix: test_env_type: docker runs_on: ubuntu-latest workflows: - - PR E2E Core Tests + - Merge Queue E2E Core Tests - Nightly E2E Tests test_cmd: cd integration-tests/smoke && go test -test.run ^TestKeeperMigrateRegistry$ -test.parallel=1 -timeout 30m -count=1 -json pyroscope_env: ci-smoke-keeper-evm-simulated @@ -456,7 +469,7 @@ runner-test-matrix: test_env_type: docker runs_on: ubuntu-latest workflows: - - PR E2E Core Tests + - Merge Queue E2E Core Tests - Nightly E2E Tests test_cmd: cd integration-tests/smoke && go test -test.run ^TestKeeperNodeDown$ -test.parallel=3 -timeout 30m -count=1 -json pyroscope_env: ci-smoke-keeper-evm-simulated @@ -466,7 +479,7 @@ runner-test-matrix: test_env_type: docker runs_on: ubuntu-latest workflows: - - PR E2E Core Tests + - Merge Queue E2E Core Tests - Nightly E2E Tests test_cmd: cd integration-tests/smoke && go test -test.run ^TestKeeperPauseUnPauseUpkeep$ -test.parallel=1 -timeout 30m -count=1 -json pyroscope_env: ci-smoke-keeper-evm-simulated @@ -476,7 +489,7 @@ runner-test-matrix: test_env_type: docker runs_on: ubuntu-latest workflows: - - PR E2E Core Tests + - Merge Queue E2E Core Tests - Nightly E2E Tests test_cmd: cd integration-tests/smoke && go test -test.run ^TestKeeperUpdateCheckData$ -test.parallel=1 -timeout 30m -count=1 -json pyroscope_env: ci-smoke-keeper-evm-simulated @@ -486,7 +499,7 @@ runner-test-matrix: test_env_type: docker runs_on: ubuntu-latest workflows: - - PR E2E Core Tests + - Merge Queue E2E Core Tests - Nightly E2E Tests test_cmd: cd integration-tests/smoke && go test -test.run ^TestKeeperJobReplacement$ -test.parallel=1 -timeout 30m -count=1 -json pyroscope_env: ci-smoke-keeper-evm-simulated @@ -497,12 +510,10 @@ runner-test-matrix: test_env_type: k8s-remote-runner test_cmd: cd integration-tests/load/automationv2_1 && go test -test.run TestLogTrigger -test.parallel=1 -timeout 60m -count=1 -json remote_runner_memory: 4Gi - test_config_override_required: true test_secrets_required: true test_env_vars: + TEST_LOG_LEVEL: info TEST_SUITE: automationv2_1 - workflows: - - Automation Load Test pyroscope_env: automation-load-test - id: smoke/automation_upgrade_test.go:^TestAutomationNodeUpgrade/registry_2_0 @@ -510,13 +521,13 @@ runner-test-matrix: test_env_type: docker runs_on: ubuntu22.04-8cores-32GB workflows: - - Run Automation Product Nightly E2E Tests + - Automation Nightly Tests test_cmd: cd integration-tests/smoke && go test -test.run ^TestAutomationNodeUpgrade/registry_2_0 -test.parallel=1 -timeout 60m -count=1 -json test_env_vars: E2E_TEST_CHAINLINK_IMAGE: public.ecr.aws/chainlink/chainlink E2E_TEST_CHAINLINK_VERSION: latest E2E_TEST_CHAINLINK_UPGRADE_IMAGE: '{{ env.QA_CHAINLINK_IMAGE }}' - E2E_TEST_CHAINLINK_UPGRADE_VERSION: develop + E2E_TEST_CHAINLINK_UPGRADE_VERSION: '{{ env.DEFAULT_CHAINLINK_UPGRADE_VERSION }}' pyroscope_env: ci-smoke-automation-upgrade-tests - id: smoke/automation_upgrade_test.go:^TestAutomationNodeUpgrade/registry_2_1 @@ -524,13 +535,13 @@ runner-test-matrix: test_env_type: docker runs_on: ubuntu22.04-8cores-32GB workflows: - - Run Automation Product Nightly E2E Tests + - Automation Nightly Tests test_cmd: cd integration-tests/smoke && go test -test.run ^TestAutomationNodeUpgrade/registry_2_1 -test.parallel=5 -timeout 60m -count=1 -json test_env_vars: E2E_TEST_CHAINLINK_IMAGE: public.ecr.aws/chainlink/chainlink E2E_TEST_CHAINLINK_VERSION: latest E2E_TEST_CHAINLINK_UPGRADE_IMAGE: '{{ env.QA_CHAINLINK_IMAGE }}' - E2E_TEST_CHAINLINK_UPGRADE_VERSION: develop + E2E_TEST_CHAINLINK_UPGRADE_VERSION: '{{ env.DEFAULT_CHAINLINK_UPGRADE_VERSION }}' pyroscope_env: ci-smoke-automation-upgrade-tests - id: smoke/automation_upgrade_test.go:^TestAutomationNodeUpgrade/registry_2_2 @@ -538,13 +549,13 @@ runner-test-matrix: test_env_type: docker runs_on: ubuntu22.04-8cores-32GB workflows: - - Run Automation Product Nightly E2E Tests + - Automation Nightly Tests test_cmd: cd integration-tests/smoke && go test -test.run ^TestAutomationNodeUpgrade/registry_2_2 -test.parallel=5 -timeout 60m -count=1 -json test_env_vars: E2E_TEST_CHAINLINK_IMAGE: public.ecr.aws/chainlink/chainlink E2E_TEST_CHAINLINK_VERSION: latest E2E_TEST_CHAINLINK_UPGRADE_IMAGE: '{{ env.QA_CHAINLINK_IMAGE }}' - E2E_TEST_CHAINLINK_UPGRADE_VERSION: develop + E2E_TEST_CHAINLINK_UPGRADE_VERSION: '{{ env.DEFAULT_CHAINLINK_UPGRADE_VERSION }}' pyroscope_env: ci-smoke-automation-upgrade-tests - id: reorg/automation_reorg_test.go^TestAutomationReorg/registry_2_0 @@ -603,7 +614,7 @@ runner-test-matrix: test_env_vars: TEST_SUITE: chaos - - id: benchmark/automation_test.go:^TestAutomationBenchmark$ + - id: benchmark/automation_test.go:TestAutomationBenchmark path: integration-tests/benchmark/automation_test.go test_env_type: k8s-remote-runner remote_runner_memory: 4Gi @@ -613,9 +624,24 @@ runner-test-matrix: test_cmd: cd integration-tests/benchmark && go test -v -test.run ^TestAutomationBenchmark$ -test.parallel=1 -timeout 30m -count=1 -json pyroscope_env: ci-benchmark-automation-nightly test_env_vars: + TEST_LOG_LEVEL: info TEST_SUITE: benchmark TEST_TYPE: benchmark + - id: soak/automation_test.go:TestAutomationBenchmark + path: integration-tests/benchmark/automation_test.go + test_env_type: k8s-remote-runner + remote_runner_memory: 4Gi + runs_on: ubuntu-latest + # workflows: + # - Nightly E2E Tests + test_cmd: cd integration-tests/benchmark && go test -v -test.run ^TestAutomationBenchmark$ -test.parallel=1 -timeout 30m -count=1 -json + pyroscope_env: ci-benchmark-automation-nightly + test_env_vars: + TEST_LOG_LEVEL: info + TEST_SUITE: benchmark + TEST_TYPE: soak + # END: Automation tests # START: VRF tests @@ -625,7 +651,6 @@ runner-test-matrix: runs_on: ubuntu22.04-8cores-32GB test_env_type: docker test_cmd: cd integration-tests/smoke && go test -v -test.run TestVRFv2Basic -test.parallel=1 -timeout 30m -count=1 -json - test_config_override_required: true test_secrets_required: true workflows: - On Demand VRFV2 Smoke Test (Ethereum clients) @@ -684,6 +709,7 @@ runner-test-matrix: runs_on: ubuntu-latest workflows: - PR E2E Core Tests + - Merge Queue E2E Core Tests - Nightly E2E Tests test_cmd: cd integration-tests/ && go test smoke/vrf_test.go -timeout 30m -count=1 -test.parallel=2 -json pyroscope_env: ci-smoke-vrf-evm-simulated @@ -694,6 +720,7 @@ runner-test-matrix: runs_on: ubuntu-latest workflows: - PR E2E Core Tests + - Merge Queue E2E Core Tests - Nightly E2E Tests test_cmd: cd integration-tests/ && go test smoke/vrfv2_test.go -timeout 30m -count=1 -test.parallel=6 -json pyroscope_env: ci-smoke-vrf2-evm-simulated @@ -704,6 +731,7 @@ runner-test-matrix: runs_on: ubuntu-latest workflows: - PR E2E Core Tests + - Merge Queue E2E Core Tests - Nightly E2E Tests test_cmd: cd integration-tests/ && go test smoke/vrfv2plus_test.go -timeout 30m -count=1 -test.parallel=9 -json pyroscope_env: ci-smoke-vrf2plus-evm-simulated @@ -736,6 +764,7 @@ runner-test-matrix: runs_on: ubuntu-latest workflows: - PR E2E Core Tests + - Merge Queue E2E Core Tests - Nightly E2E Tests test_cmd: cd integration-tests/smoke && go test -test.run ^TestLogPollerFewFiltersFixedDepth$ -test.parallel=1 -timeout 30m -count=1 -json pyroscope_env: ci-smoke-log_poller-evm-simulated @@ -746,6 +775,7 @@ runner-test-matrix: runs_on: ubuntu-latest workflows: - PR E2E Core Tests + - Merge Queue E2E Core Tests - Nightly E2E Tests test_cmd: cd integration-tests/smoke && go test -test.run ^TestLogPollerFewFiltersFinalityTag$ -test.parallel=1 -timeout 30m -count=1 -json pyroscope_env: ci-smoke-log_poller-evm-simulated @@ -756,6 +786,7 @@ runner-test-matrix: runs_on: ubuntu-latest workflows: - PR E2E Core Tests + - Merge Queue E2E Core Tests - Nightly E2E Tests test_cmd: cd integration-tests/smoke && go test -test.run ^TestLogPollerWithChaosFixedDepth$ -test.parallel=1 -timeout 30m -count=1 -json pyroscope_env: ci-smoke-log_poller-evm-simulated @@ -766,6 +797,7 @@ runner-test-matrix: runs_on: ubuntu-latest workflows: - PR E2E Core Tests + - Merge Queue E2E Core Tests - Nightly E2E Tests test_cmd: cd integration-tests/smoke && go test -test.run ^TestLogPollerWithChaosFinalityTag$ -test.parallel=1 -timeout 30m -count=1 -json pyroscope_env: ci-smoke-log_poller-evm-simulated @@ -776,6 +808,7 @@ runner-test-matrix: runs_on: ubuntu-latest workflows: - PR E2E Core Tests + - Merge Queue E2E Core Tests - Nightly E2E Tests test_cmd: cd integration-tests/smoke && go test -test.run ^TestLogPollerWithChaosPostgresFinalityTag$ -test.parallel=1 -timeout 30m -count=1 -json pyroscope_env: ci-smoke-log_poller-evm-simulated @@ -786,6 +819,7 @@ runner-test-matrix: runs_on: ubuntu-latest workflows: - PR E2E Core Tests + - Merge Queue E2E Core Tests - Nightly E2E Tests test_cmd: cd integration-tests/smoke && go test -test.run ^TestLogPollerWithChaosPostgresFixedDepth$ -test.parallel=1 -timeout 30m -count=1 -json pyroscope_env: ci-smoke-log_poller-evm-simulated @@ -796,6 +830,7 @@ runner-test-matrix: runs_on: ubuntu-latest workflows: - PR E2E Core Tests + - Merge Queue E2E Core Tests - Nightly E2E Tests test_cmd: cd integration-tests/smoke && go test -test.run ^TestLogPollerReplayFixedDepth$ -test.parallel=1 -timeout 30m -count=1 -json pyroscope_env: ci-smoke-log_poller-evm-simulated @@ -806,6 +841,7 @@ runner-test-matrix: runs_on: ubuntu-latest workflows: - PR E2E Core Tests + - Merge Queue E2E Core Tests - Nightly E2E Tests test_cmd: cd integration-tests/smoke && go test -test.run ^TestLogPollerReplayFinalityTag$ -test.parallel=1 -timeout 30m -count=1 -json pyroscope_env: ci-smoke-log_poller-evm-simulated @@ -820,6 +856,7 @@ runner-test-matrix: runs_on: ubuntu-latest workflows: - PR E2E Core Tests + - Merge Queue E2E Core Tests - Nightly E2E Tests test_cmd: cd integration-tests/ && go test smoke/runlog_test.go -timeout 30m -test.parallel=2 -count=1 -json pyroscope_env: ci-smoke-runlog-evm-simulated @@ -830,6 +867,7 @@ runner-test-matrix: runs_on: ubuntu-latest workflows: - PR E2E Core Tests + - Merge Queue E2E Core Tests - Nightly E2E Tests test_cmd: cd integration-tests/ && go test smoke/cron_test.go -timeout 30m -count=1 -json pyroscope_env: ci-smoke-cron-evm-simulated @@ -840,6 +878,7 @@ runner-test-matrix: runs_on: ubuntu-latest workflows: - PR E2E Core Tests + - Merge Queue E2E Core Tests - Nightly E2E Tests test_cmd: cd integration-tests/ && go test smoke/flux_test.go -timeout 30m -count=1 -json pyroscope_env: ci-smoke-flux-evm-simulated @@ -850,6 +889,7 @@ runner-test-matrix: runs_on: ubuntu-latest workflows: - PR E2E Core Tests + - Merge Queue E2E Core Tests - Nightly E2E Tests test_cmd: cd integration-tests/ && go test smoke/reorg_above_finality_test.go -timeout 30m -count=1 -json pyroscope_env: ci-smoke-reorg-above-finality-evm-simulated @@ -860,6 +900,7 @@ runner-test-matrix: runs_on: ubuntu-latest workflows: - PR E2E Core Tests + - Merge Queue E2E Core Tests - Nightly E2E Tests test_cmd: cd integration-tests/migration && go test upgrade_version_test.go -timeout 30m -count=1 -test.parallel=2 -json test_env_vars: @@ -874,6 +915,7 @@ runner-test-matrix: runs_on: ubuntu-latest workflows: - PR E2E Core Tests + - Merge Queue E2E Core Tests - Nightly E2E Tests test_cmd: cd integration-tests/ && go test smoke/job_distributor_test.go -timeout 30m -count=1 -json pyroscope_env: ci-smoke-jd-evm-simulated @@ -888,6 +930,7 @@ runner-test-matrix: runs_on: ubuntu-latest workflows: - PR E2E CCIP Tests + - Merge Queue E2E CCIP Tests - Nightly E2E Tests test_cmd: cd integration-tests/ccip-tests/smoke && go test ccip_test.go -test.run ^TestSmokeCCIPForBidirectionalLane$ -timeout 30m -count=1 -test.parallel=1 -json test_env_vars: @@ -899,6 +942,7 @@ runner-test-matrix: runs_on: ubuntu-latest workflows: - PR E2E CCIP Tests + - Merge Queue E2E CCIP Tests - Nightly E2E Tests test_cmd: cd integration-tests/ccip-tests/smoke && go test ccip_test.go -test.run ^TestSmokeCCIPForBidirectionalLane$ -timeout 30m -count=1 -test.parallel=1 -json test_env_vars: @@ -911,6 +955,7 @@ runner-test-matrix: runs_on: ubuntu-latest workflows: - PR E2E CCIP Tests + - Merge Queue E2E CCIP Tests - Nightly E2E Tests test_cmd: cd integration-tests/ccip-tests/smoke && go test ccip_test.go -test.run ^TestSmokeCCIPForBidirectionalLane$ -timeout 30m -count=1 -test.parallel=1 -json test_env_vars: @@ -923,6 +968,7 @@ runner-test-matrix: runs_on: ubuntu-latest workflows: - PR E2E CCIP Tests + - Merge Queue E2E CCIP Tests - Nightly E2E Tests test_cmd: cd integration-tests/ccip-tests/smoke && go test ccip_test.go -test.run ^TestSmokeCCIPForBidirectionalLane$ -timeout 30m -count=1 -test.parallel=1 -json test_env_vars: @@ -948,6 +994,7 @@ runner-test-matrix: runs_on: ubuntu-latest workflows: - PR E2E CCIP Tests + - Merge Queue E2E CCIP Tests - Nightly E2E Tests test_cmd: cd integration-tests/ccip-tests/smoke && go test ccip_test.go -test.run ^TestSmokeCCIPTokenPoolRateLimits$ -timeout 30m -count=1 -test.parallel=1 -json test_env_vars: @@ -959,6 +1006,7 @@ runner-test-matrix: runs_on: ubuntu-latest workflows: - PR E2E CCIP Tests + - Merge Queue E2E CCIP Tests - Nightly E2E Tests test_cmd: cd integration-tests/ccip-tests/smoke && go test ccip_test.go -test.run ^TestSmokeCCIPMulticall$ -timeout 30m -count=1 -test.parallel=1 -json test_env_vars: @@ -970,6 +1018,7 @@ runner-test-matrix: runs_on: ubuntu-latest workflows: - PR E2E CCIP Tests + - Merge Queue E2E CCIP Tests - Nightly E2E Tests test_cmd: cd integration-tests/ccip-tests/smoke && go test ccip_test.go -test.run ^TestSmokeCCIPManuallyExecuteAfterExecutionFailingDueToInsufficientGas$ -timeout 30m -count=1 -test.parallel=1 -json test_env_vars: @@ -981,6 +1030,7 @@ runner-test-matrix: runs_on: ubuntu-latest workflows: - PR E2E CCIP Tests + - Merge Queue E2E CCIP Tests - Nightly E2E Tests test_cmd: cd integration-tests/ccip-tests/smoke && go test ccip_test.go -test.run ^TestSmokeCCIPOnRampLimits$ -timeout 30m -count=1 -test.parallel=1 -json test_env_vars: @@ -1012,6 +1062,7 @@ runner-test-matrix: runs_on: ubuntu-latest workflows: - PR E2E CCIP Tests + - Merge Queue E2E CCIP Tests - Nightly E2E Tests test_cmd: cd integration-tests/ccip-tests/smoke && go test ccip_test.go -test.run ^TestSmokeCCIPReorgBelowFinality$ -timeout 30m -count=1 -test.parallel=1 -json test_env_vars: @@ -1024,6 +1075,7 @@ runner-test-matrix: runs_on: ubuntu-latest workflows: - PR E2E CCIP Tests + - Merge Queue E2E CCIP Tests - Nightly E2E Tests test_cmd: cd integration-tests/ccip-tests/smoke && go test ccip_test.go -test.run ^TestSmokeCCIPReorgAboveFinalityAtDestination$ -timeout 30m -count=1 -test.parallel=1 -json test_env_vars: @@ -1036,12 +1088,40 @@ runner-test-matrix: runs_on: ubuntu-latest workflows: - PR E2E CCIP Tests + - Merge Queue E2E CCIP Tests - Nightly E2E Tests test_cmd: cd integration-tests/ccip-tests/smoke && go test ccip_test.go -test.run ^TestSmokeCCIPReorgAboveFinalityAtSource$ -timeout 30m -count=1 -test.parallel=1 -json test_env_vars: E2E_TEST_SELECTED_NETWORK: SIMULATED_1,SIMULATED_2 test_config_override_path: integration-tests/ccip-tests/testconfig/tomls/ccip-reorg.toml + - id: integration-tests/ccip-tests/load/ccip_test.go:TestLoadCCIPStableRPS + path: integration-tests/ccip-tests/load/ccip_test.go + test_env_type: k8s-remote-runner + runs_on: ubuntu-latest + test_cmd: cd integration-tests/ccip-tests/load && DETACH_RUNNER=false go test -test.run ^TestLoadCCIPStableRPS$ -timeout 70m -count=1 -test.parallel=1 -json + test_env_vars: + E2E_TEST_GRAFANA_DASHBOARD_URL: "/d/6vjVx-1V8/ccip-long-running-tests" + workflows: + - E2E CCIP Load Tests + test_artifacts_on_failure: + - ./integration-tests/load/logs/payload_ccip.json + + # Enable when CCIP-2277 is resolved + # + # - id: integration-tests/ccip-tests/load/ccip_test.go:TestLoadCCIPStableRPSAfterARMCurseAndUncurse + # path: integration-tests/ccip-tests/load/ccip_test.go + # test_env_type: k8s-remote-runner + # runs_on: ubuntu-latest + # test_cmd: cd integration-tests/ccip-tests/load && DETACH_RUNNER=false go test -test.run $TestLoadCCIPStableRPSAfterARMCurseAndUncurse$ -timeout 70m -count=1 -test.parallel=1 -json + # test_config_override_path: integration-tests/ccip-tests/testconfig/tomls/load-with-arm-curse-uncurse.toml + # test_env_vars: + # E2E_TEST_GRAFANA_DASHBOARD_URL: "/d/6vjVx-1V8/ccip-long-running-tests" + # workflows: + # - E2E CCIP Load Tests + # test_artifacts_on_failure: + # - ./integration-tests/load/logs/payload_ccip.json + - id: ccip-tests/chaos/ccip_test.go path: integration-tests/ccip-tests/chaos/ccip_test.go test_env_type: k8s-remote-runner diff --git a/.github/workflows/automation-benchmark-tests.yml b/.github/workflows/automation-benchmark-tests.yml index 85172bc98af..d1af80fb91e 100644 --- a/.github/workflows/automation-benchmark-tests.yml +++ b/.github/workflows/automation-benchmark-tests.yml @@ -2,108 +2,52 @@ name: Automation Benchmark Test on: workflow_dispatch: inputs: - testType: - description: Type of test to run (benchmark, soak) - required: true - default: benchmark - type: string - base64Config: - description: base64-ed config - required: true - type: string + test_config_override_path: + description: Path to a test config file used to override the default test config + required: false + type: string + test_secrets_override_key: + description: Key to run tests with custom test secrets + required: false + type: string slackMemberID: description: Notifies test results (Not your @) required: true default: U02Q14G80TY type: string - test_secrets_override_key: - description: 'Key to run tests with custom test secrets' - required: false - type: string + testType: + description: Type of test to run (benchmark, soak) + required: true + default: benchmark + type: string jobs: - automation_benchmark: - environment: integration - permissions: - checks: write - pull-requests: write - id-token: write - contents: read - name: Automation Benchmark Test - runs-on: ubuntu22.04-16cores-64GB - env: - SLACK_API_KEY: ${{ secrets.QA_SLACK_API_KEY }} + run-e2e-tests-workflow: + name: Run E2E Tests + uses: ./.github/workflows/run-e2e-tests-reusable-workflow.yml + with: + test_ids: '${{ inputs.testType }}/automation_test.go:TestAutomationBenchmark' + test_config_override_path: ${{ inputs.test_config_override_path }} + SLACK_USER: ${{ inputs.slackMemberID }} SLACK_CHANNEL: C03KJ5S7KEK - CHAINLINK_ENV_USER: ${{ github.actor }} - REF_NAME: ${{ github.head_ref || github.ref_name }} - steps: - - name: Checkout the repo - uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633 # v4.1.2 - with: - ref: ${{ env.REF_NAME }} - - name: Get Slack config and mask base64 config - run: | - SLACK_USER=$(jq -r '.inputs.slackMemberID' $GITHUB_EVENT_PATH) - echo ::add-mask::$SLACK_USER - echo SLACK_USER=$SLACK_USER >> $GITHUB_ENV - - BASE64_CONFIG_OVERRIDE=$(jq -r '.inputs.base64Config' $GITHUB_EVENT_PATH) - echo ::add-mask::$BASE64_CONFIG_OVERRIDE - echo "BASE64_CONFIG_OVERRIDE=$BASE64_CONFIG_OVERRIDE" >> $GITHUB_ENV - - name: Parse base64 config - uses: ./.github/actions/setup-parse-base64-config - with: - base64Config: ${{ env.BASE64_CONFIG_OVERRIDE }} - - name: Send details to Step Summary - shell: bash - run: | - echo "### chainlink image used for this test run :link:" >>$GITHUB_STEP_SUMMARY - echo "\`${{ env.CHAINLINK_IMAGE }}\`" >>$GITHUB_STEP_SUMMARY - echo "### chainlink-tests image tag for this test run :ship:" >>$GITHUB_STEP_SUMMARY - echo "\`${GITHUB_SHA}\`" >>$GITHUB_STEP_SUMMARY - echo "### Networks on which test was run" >>$GITHUB_STEP_SUMMARY - echo "\`${{ env.NETWORKS }}\`" >>$GITHUB_STEP_SUMMARY - - name: Build Test Image - id: build-test-image - uses: ./.github/actions/build-test-image - with: - QA_AWS_ROLE_TO_ASSUME: ${{ secrets.QA_AWS_ROLE_TO_ASSUME }} - QA_AWS_REGION: ${{ secrets.QA_AWS_REGION }} - QA_AWS_ACCOUNT_NUMBER: ${{ secrets.QA_AWS_ACCOUNT_NUMBER }} - suites: benchmark chaos reorg load - - name: Run Tests - uses: smartcontractkit/chainlink-github-actions/chainlink-testing-framework/run-tests@94cb11f4bd545607a2f221c6685052b3abee723d # v2.3.32 - env: - DETACH_RUNNER: true - TEST_SUITE: benchmark - TEST_ARGS: -test.timeout 720h - ENV_JOB_IMAGE: ${{ steps.build-test-image.outputs.test_image }} - INTERNAL_DOCKER_REPO: ${{ secrets.QA_AWS_ACCOUNT_NUMBER }}.dkr.ecr.${{ secrets.QA_AWS_REGION }}.amazonaws.com - TEST_TYPE: ${{ github.event.inputs.testType }} - TEST_TEST_TYPE: ${{ github.event.inputs.testType }} - RR_MEM: 4Gi - TEST_LOG_LEVEL: info - with: - test_command_to_run: cd integration-tests && go test -timeout 30m -v -run ^TestAutomationBenchmark$ ./benchmark -count=1 - test_download_vendor_packages_command: make gomod - test_secrets_override_base64: ${{ secrets[inputs.test_secrets_override_key] }} - test_config_override_base64: ${{ env.BASE64_CONFIG_OVERRIDE }} - cl_repo: ${{ env.CHAINLINK_IMAGE }} - cl_image_tag: ${{ env.CHAINLINK_VERSION }} - token: ${{ secrets.GITHUB_TOKEN }} - should_cleanup: false - go_mod_path: ./integration-tests/go.mod - QA_AWS_REGION: ${{ secrets.QA_AWS_REGION }} - QA_AWS_ROLE_TO_ASSUME: ${{ secrets.QA_AWS_ROLE_TO_ASSUME }} - QA_KUBECONFIG: ${{ secrets.QA_KUBECONFIG }} - - name: Collect Metrics - if: always() - id: collect-gha-metrics - uses: smartcontractkit/push-gha-metrics-action@d9da21a2747016b3e13de58c7d4115a3d5c97935 # v3.0.1 - with: - id: automation-benchmark-build-test-image - org-id: ${{ secrets.GRAFANA_INTERNAL_TENANT_ID }} - basic-auth: ${{ secrets.GRAFANA_INTERNAL_BASIC_AUTH }} - hostname: ${{ secrets.GRAFANA_INTERNAL_HOST }} - this-job-name: Automation Benchmark Test - continue-on-error: true + secrets: + QA_AWS_REGION: ${{ secrets.QA_AWS_REGION }} + QA_AWS_ROLE_TO_ASSUME: ${{ secrets.QA_AWS_ROLE_TO_ASSUME }} + QA_AWS_ACCOUNT_NUMBER: ${{ secrets.QA_AWS_ACCOUNT_NUMBER }} + QA_PYROSCOPE_INSTANCE: ${{ secrets.QA_PYROSCOPE_INSTANCE }} + QA_PYROSCOPE_KEY: ${{ secrets.QA_PYROSCOPE_KEY }} + QA_KUBECONFIG: ${{ secrets.QA_KUBECONFIG }} + GRAFANA_INTERNAL_TENANT_ID: ${{ secrets.GRAFANA_INTERNAL_TENANT_ID }} + GRAFANA_INTERNAL_BASIC_AUTH: ${{ secrets.GRAFANA_INTERNAL_BASIC_AUTH }} + GRAFANA_INTERNAL_HOST: ${{ secrets.GRAFANA_INTERNAL_HOST }} + GRAFANA_INTERNAL_URL_SHORTENER_TOKEN: ${{ secrets.GRAFANA_INTERNAL_URL_SHORTENER_TOKEN }} + LOKI_TENANT_ID: ${{ secrets.LOKI_TENANT_ID }} + LOKI_URL: ${{ secrets.LOKI_URL }} + LOKI_BASIC_AUTH: ${{ secrets.LOKI_BASIC_AUTH }} + GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} + AWS_REGION: ${{ secrets.QA_AWS_REGION }} + AWS_OIDC_IAM_ROLE_VALIDATION_PROD_ARN: ${{ secrets.AWS_OIDC_IAM_ROLE_VALIDATION_PROD_ARN }} + AWS_API_GW_HOST_GRAFANA: ${{ secrets.AWS_API_GW_HOST_GRAFANA }} + TEST_SECRETS_OVERRIDE_BASE64: ${{ secrets[inputs.test_secrets_override_key] }} + SLACK_BOT_TOKEN: ${{ secrets.QA_SLACK_API_KEY }} + SLACK_API_KEY: ${{ secrets.QA_SLACK_API_KEY }} diff --git a/.github/workflows/automation-load-tests.yml b/.github/workflows/automation-load-tests.yml index 95368825e33..23c053203a1 100644 --- a/.github/workflows/automation-load-tests.yml +++ b/.github/workflows/automation-load-tests.yml @@ -2,123 +2,47 @@ name: Automation Load Test on: workflow_dispatch: inputs: - base64Config: - description: base64-ed config - required: true - type: string + test_config_override_path: + description: Path to a test config file used to override the default test config + required: false + type: string + test_secrets_override_key: + description: 'Key to run tests with custom test secrets' + required: false + type: string slackMemberID: description: Notifies test results (Not your @) required: true default: U02Q14G80TY - type: string - test_secrets_override_key: - description: 'Key to run tests with custom test secrets' - required: false - type: string + type: string jobs: - automation_load: - environment: integration - permissions: - checks: write - pull-requests: write - id-token: write - contents: read - name: Automation Load Test - runs-on: ubuntu22.04-16cores-64GB - env: - SLACK_API_KEY: ${{ secrets.QA_SLACK_API_KEY }} + run-e2e-tests-workflow: + name: Run E2E Tests + uses: ./.github/workflows/run-e2e-tests-reusable-workflow.yml + with: + test_ids: 'load/automationv2_1/automationv2_1_test.go:TestLogTrigger' + test_config_override_path: ${{ inputs.test_config_override_path }} + SLACK_USER: ${{ inputs.slackMemberID }} SLACK_CHANNEL: C03KJ5S7KEK - CHAINLINK_ENV_USER: ${{ github.actor }} - REF_NAME: ${{ github.head_ref || github.ref_name }} - steps: - - name: Checkout the repo - uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633 # v4.1.2 - with: - ref: ${{ env.REF_NAME }} - - name: Get Slack config and mask base64 config - run: | - SLACK_USER=$(jq -r '.inputs.slackMemberID' $GITHUB_EVENT_PATH) - echo ::add-mask::$SLACK_USER - echo SLACK_USER=$SLACK_USER >> $GITHUB_ENV - - BASE64_CONFIG_OVERRIDE=$(jq -r '.inputs.base64Config' $GITHUB_EVENT_PATH) - echo ::add-mask::$BASE64_CONFIG_OVERRIDE - echo "BASE64_CONFIG_OVERRIDE=$BASE64_CONFIG_OVERRIDE" >> $GITHUB_ENV - - name: Merge Pyrsoscope config - env: - PYROSCOPE_SERVER: ${{ secrets.QA_PYROSCOPE_INSTANCE }} - PYROSCOPE_ENVIRONMENT: "automation-load-test" - PYROSCOPE_KEY: ${{ secrets.QA_PYROSCOPE_KEY }} - run: | - decoded_toml=$(echo $BASE64_CONFIG_OVERRIDE | base64 -d) - - # use Pyroscope config from GH secrets and merge it with base64 input - cat << EOF > config.toml - server_url="$PYROSCOPE_SERVER" - environment="$PYROSCOPE_ENVIRONMENT" - key_secret="$PYROSCOPE_KEY" - EOF - - echo "$decoded_toml" >> final_config.toml - cat config.toml >> final_config.toml - BASE64_CONFIG_OVERRIDE=$(cat final_config.toml | base64 -w 0) - echo ::add-mask::$BASE64_CONFIG_OVERRIDE - echo "BASE64_CONFIG_OVERRIDE=$BASE64_CONFIG_OVERRIDE" >> $GITHUB_ENV - - name: Parse base64 config - uses: ./.github/actions/setup-parse-base64-config - with: - base64Config: ${{ env.BASE64_CONFIG_OVERRIDE }} - - name: Send details to Step Summary - shell: bash - run: | - echo "### chainlink image used for this test run :link:" >>$GITHUB_STEP_SUMMARY - echo "\`${{ env.CHAINLINK_IMAGE }}\`" >>$GITHUB_STEP_SUMMARY - echo "### chainlink-tests image tag for this test run :ship:" >>$GITHUB_STEP_SUMMARY - echo "\`${GITHUB_SHA}\`" >>$GITHUB_STEP_SUMMARY - echo "### Networks on which test was run" >>$GITHUB_STEP_SUMMARY - echo "\`${{ env.NETWORKS }}\`" >>$GITHUB_STEP_SUMMARY - - name: Build Test Image - id: build-test-image - uses: ./.github/actions/build-test-image - with: - QA_AWS_ROLE_TO_ASSUME: ${{ secrets.QA_AWS_ROLE_TO_ASSUME }} - QA_AWS_REGION: ${{ secrets.QA_AWS_REGION }} - QA_AWS_ACCOUNT_NUMBER: ${{ secrets.QA_AWS_ACCOUNT_NUMBER }} - suites: benchmark chaos reorg load - - name: Run Tests - uses: smartcontractkit/chainlink-github-actions/chainlink-testing-framework/run-tests@94cb11f4bd545607a2f221c6685052b3abee723d # v2.3.32 - env: - RR_CPU: 4000m - RR_MEM: 4Gi - DETACH_RUNNER: true - TEST_SUITE: automationv2_1 - TEST_ARGS: -test.timeout 720h - ENV_JOB_IMAGE: ${{ steps.build-test-image.outputs.test_image }} - INTERNAL_DOCKER_REPO: ${{ secrets.QA_AWS_ACCOUNT_NUMBER }}.dkr.ecr.${{ secrets.QA_AWS_REGION }}.amazonaws.com - PYROSCOPE_SERVER: ${{ secrets.QA_PYROSCOPE_INSTANCE }} - PYROSCOPE_KEY: ${{ secrets.QA_PYROSCOPE_KEY }} - with: - test_command_to_run: cd integration-tests/load && go test -timeout 1h -v -run TestLogTrigger ./automationv2_1 -count=1 - test_secrets_override_base64: ${{ secrets[inputs.test_secrets_override_key] }} - test_config_override_base64: ${{ env.BASE64_CONFIG_OVERRIDE }} - test_download_vendor_packages_command: make gomod - cl_repo: ${{ env.CHAINLINK_IMAGE }} - cl_image_tag: ${{ env.CHAINLINK_VERSION }} - token: ${{ secrets.GITHUB_TOKEN }} - should_cleanup: false - go_mod_path: ./integration-tests/go.mod - QA_AWS_REGION: ${{ secrets.QA_AWS_REGION }} - QA_AWS_ROLE_TO_ASSUME: ${{ secrets.QA_AWS_ROLE_TO_ASSUME }} - QA_KUBECONFIG: ${{ secrets.QA_KUBECONFIG }} - - name: Collect Metrics - if: always() - id: collect-gha-metrics - uses: smartcontractkit/push-gha-metrics-action@d9da21a2747016b3e13de58c7d4115a3d5c97935 # v3.0.1 - with: - id: automation-load-test - org-id: ${{ secrets.GRAFANA_INTERNAL_TENANT_ID }} - basic-auth: ${{ secrets.GRAFANA_INTERNAL_BASIC_AUTH }} - hostname: ${{ secrets.GRAFANA_INTERNAL_HOST }} - this-job-name: Automation Load Test - continue-on-error: true + secrets: + QA_AWS_REGION: ${{ secrets.QA_AWS_REGION }} + QA_AWS_ROLE_TO_ASSUME: ${{ secrets.QA_AWS_ROLE_TO_ASSUME }} + QA_AWS_ACCOUNT_NUMBER: ${{ secrets.QA_AWS_ACCOUNT_NUMBER }} + QA_PYROSCOPE_INSTANCE: ${{ secrets.QA_PYROSCOPE_INSTANCE }} + QA_PYROSCOPE_KEY: ${{ secrets.QA_PYROSCOPE_KEY }} + QA_KUBECONFIG: ${{ secrets.QA_KUBECONFIG }} + GRAFANA_INTERNAL_TENANT_ID: ${{ secrets.GRAFANA_INTERNAL_TENANT_ID }} + GRAFANA_INTERNAL_BASIC_AUTH: ${{ secrets.GRAFANA_INTERNAL_BASIC_AUTH }} + GRAFANA_INTERNAL_HOST: ${{ secrets.GRAFANA_INTERNAL_HOST }} + GRAFANA_INTERNAL_URL_SHORTENER_TOKEN: ${{ secrets.GRAFANA_INTERNAL_URL_SHORTENER_TOKEN }} + LOKI_TENANT_ID: ${{ secrets.LOKI_TENANT_ID }} + LOKI_URL: ${{ secrets.LOKI_URL }} + LOKI_BASIC_AUTH: ${{ secrets.LOKI_BASIC_AUTH }} + GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} + AWS_REGION: ${{ secrets.QA_AWS_REGION }} + AWS_OIDC_IAM_ROLE_VALIDATION_PROD_ARN: ${{ secrets.AWS_OIDC_IAM_ROLE_VALIDATION_PROD_ARN }} + AWS_API_GW_HOST_GRAFANA: ${{ secrets.AWS_API_GW_HOST_GRAFANA }} + TEST_SECRETS_OVERRIDE_BASE64: ${{ secrets[inputs.test_secrets_override_key] }} + SLACK_BOT_TOKEN: ${{ secrets.QA_SLACK_API_KEY }} + SLACK_API_KEY: ${{ secrets.QA_SLACK_API_KEY }} diff --git a/.github/workflows/automation-nightly-tests.yml b/.github/workflows/automation-nightly-tests.yml index d73df6a8c18..06dc00b3a14 100644 --- a/.github/workflows/automation-nightly-tests.yml +++ b/.github/workflows/automation-nightly-tests.yml @@ -7,291 +7,34 @@ on: - "*" workflow_dispatch: -env: - CHAINLINK_IMAGE: ${{ secrets.QA_AWS_ACCOUNT_NUMBER }}.dkr.ecr.${{ secrets.QA_AWS_REGION }}.amazonaws.com/chainlink - jobs: - build-chainlink: - environment: integration - permissions: - id-token: write - contents: read - name: Build Chainlink Image - runs-on: ubuntu22.04-16cores-64GB - steps: - - name: Collect Metrics - id: collect-gha-metrics - uses: smartcontractkit/push-gha-metrics-action@d9da21a2747016b3e13de58c7d4115a3d5c97935 # v3.0.1 - with: - id: automation-nightly-build-chainlink - org-id: ${{ secrets.GRAFANA_INTERNAL_TENANT_ID }} - basic-auth: ${{ secrets.GRAFANA_INTERNAL_BASIC_AUTH }} - hostname: ${{ secrets.GRAFANA_INTERNAL_HOST }} - this-job-name: Build Chainlink Image - continue-on-error: true - - name: Checkout the repo - uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633 # v4.1.2 - with: - ref: ${{ github.event.pull_request.head.sha || github.event.merge_group.head_sha }} - - name: Build Chainlink Image - uses: ./.github/actions/build-chainlink-image - with: - tag_suffix: "" - dockerfile: core/chainlink.Dockerfile - git_commit_sha: ${{ github.sha }} - AWS_REGION: ${{ secrets.QA_AWS_REGION }} - AWS_ROLE_TO_ASSUME: ${{ secrets.QA_AWS_ROLE_TO_ASSUME }} - - automation-upgrade-tests: - environment: integration - permissions: - checks: write - pull-requests: write - id-token: write - contents: read - needs: [build-chainlink] - env: - CHAINLINK_COMMIT_SHA: ${{ github.sha }} - CHAINLINK_ENV_USER: ${{ github.actor }} - TEST_LOG_LEVEL: info - SELECTED_NETWORKS: "SIMULATED" - strategy: - fail-fast: false - matrix: - tests: - - name: Upgrade 2.0 - id: upgrade-2-0 - suite: smoke - nodes: 1 - os: ubuntu22.04-8cores-32GB - network: SIMULATED - command: -run ^TestAutomationNodeUpgrade/registry_2_0 ./smoke - - name: Upgrade 2.1 - id: upgrade-2-1 - suite: smoke - nodes: 5 - os: ubuntu22.04-8cores-32GB - network: SIMULATED - command: -run ^TestAutomationNodeUpgrade/registry_2_1 ./smoke - - name: Upgrade 2.2 - id: upgrade-2-2 - suite: smoke - nodes: 5 - os: ubuntu22.04-8cores-32GB - network: SIMULATED - command: -run ^TestAutomationNodeUpgrade/registry_2_2 ./smoke - runs-on: ${{ matrix.tests.os }} - name: Automation ${{ matrix.tests.name }} Test - steps: - - name: Checkout the repo - uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633 # v4.1.2 - with: - ref: ${{ github.head_ref || github.ref_name }} - - name: Setup GAP for Grafana - uses: smartcontractkit/.github/actions/setup-gap@d316f66b2990ea4daa479daa3de6fc92b00f863e # setup-gap@0.3.2 - with: - # aws inputs - aws-region: ${{ secrets.AWS_REGION }} - aws-role-arn: ${{ secrets.AWS_OIDC_IAM_ROLE_VALIDATION_PROD_ARN }} - api-gateway-host: ${{ secrets.AWS_API_GW_HOST_GRAFANA }} - # other inputs - duplicate-authorization-header: "true" - - name: Run Tests - uses: smartcontractkit/chainlink-github-actions/chainlink-testing-framework/run-tests@94cb11f4bd545607a2f221c6685052b3abee723d # v2.3.32 - env: - TEST_SUITE: ${{ matrix.tests.suite }} - E2E_TEST_SELECTED_NETWORK: ${{ env.SELECTED_NETWORKS }} - E2E_TEST_CHAINLINK_IMAGE: "public.ecr.aws/chainlink/chainlink" - E2E_TEST_CHAINLINK_VERSION: "latest" - E2E_TEST_CHAINLINK_UPGRADE_IMAGE: ${{ env.CHAINLINK_IMAGE }} - E2E_TEST_CHAINLINK_UPGRADE_VERSION: ${{ github.sha }} - E2E_TEST_LOGGING_RUN_ID: ${{ github.run_id }} - E2E_TEST_LOG_COLLECT: "true" - E2E_TEST_LOKI_TENANT_ID: ${{ secrets.GRAFANA_INTERNAL_TENANT_ID }} - E2E_TEST_LOKI_ENDPOINT: https://${{ secrets.GRAFANA_INTERNAL_HOST }}/loki/api/v1/push - E2E_TEST_LOKI_BASIC_AUTH: ${{ secrets.GRAFANA_INTERNAL_BASIC_AUTH }} - E2E_TEST_LOG_STREAM_LOG_TARGETS: ${{ vars.LOGSTREAM_LOG_TARGETS }} - E2E_TEST_GRAFANA_BASE_URL: "http://localhost:8080/primary" - E2E_TEST_GRAFANA_DASHBOARD_URL: "/d/ddf75041-1e39-42af-aa46-361fe4c36e9e/ci-e2e-tests-logs" - E2E_TEST_GRAFANA_BEARER_TOKEN: ${{ secrets.GRAFANA_INTERNAL_URL_SHORTENER_TOKEN }} - with: - test_command_to_run: cd ./integration-tests && go test -timeout 60m -count=1 -json -test.parallel=${{ matrix.tests.nodes }} ${{ matrix.tests.command }} 2>&1 | tee /tmp/gotest.log | gotestloghelper -ci -singlepackage -hidepassingtests=false - test_download_vendor_packages_command: cd ./integration-tests && go mod download - cl_repo: 'public.ecr.aws/chainlink/chainlink' - cl_image_tag: 'latest' - aws_registries: ${{ secrets.QA_AWS_ACCOUNT_NUMBER }} - artifacts_location: ./integration-tests/${{ matrix.tests.suite }}/logs - artifacts_name: testcontainers-logs-${{ matrix.tests.name }} - publish_check_name: Automation Results ${{ matrix.tests.name }} - token: ${{ secrets.GITHUB_TOKEN }} - go_mod_path: ./integration-tests/go.mod - QA_AWS_REGION: ${{ secrets.QA_AWS_REGION }} - QA_AWS_ROLE_TO_ASSUME: ${{ secrets.QA_AWS_ROLE_TO_ASSUME }} - QA_KUBECONFIG: ${{ secrets.QA_KUBECONFIG }} - - name: Upload test log - uses: actions/upload-artifact@5d5d22a31266ced268874388b861e4b58bb5c2f3 # v4.3.1 - if: failure() - with: - name: gotest-logs-${{ matrix.tests.name }} - path: /tmp/gotest.log - retention-days: 7 - continue-on-error: true - - name: Collect Metrics - if: always() - id: collect-gha-metrics - uses: smartcontractkit/push-gha-metrics-action@d9da21a2747016b3e13de58c7d4115a3d5c97935 # v3.0.1 - with: - id: automation-nightly-upgrade-tests-${{ matrix.tests.id }} - org-id: ${{ secrets.GRAFANA_INTERNAL_TENANT_ID }} - basic-auth: ${{ secrets.GRAFANA_INTERNAL_BASIC_AUTH }} - hostname: ${{ secrets.GRAFANA_INTERNAL_HOST }} - this-job-name: Automation ${{ matrix.tests.name }} Test - test-results-file: '{"testType":"go","filePath":"/tmp/gotest.log"}' - continue-on-error: true - - name: Print failed test summary - if: always() - uses: smartcontractkit/chainlink-github-actions/chainlink-testing-framework/show-test-summary@b49a9d04744b0237908831730f8553f26d73a94b # v2.3.17 - - test-notify: - name: Start Slack Thread - if: ${{ always() && needs.*.result != 'skipped' && needs.*.result != 'cancelled' }} - environment: integration - outputs: - thread_ts: ${{ steps.slack.outputs.thread_ts }} - permissions: - checks: write - pull-requests: write - id-token: write - contents: read - runs-on: ubuntu-latest - needs: [ automation-upgrade-tests ] - steps: - - name: Debug Result - run: echo ${{ join(needs.*.result, ',') }} - - name: Main Slack Notification - uses: slackapi/slack-github-action@6c661ce58804a1a20f6dc5fbee7f0381b469e001 # v1.25.0 - id: slack - with: - channel-id: C03KJ5S7KEK - payload: | - { - "attachments": [ - { - "color": "${{ contains(join(needs.*.result, ','), 'failure') && '#C62828' || '#2E7D32' }}", - "blocks": [ - { - "type": "header", - "text": { - "type": "plain_text", - "text": "Automation Nightly Tests ${{ contains(join(needs.*.result, ','), 'failure') && ':x:' || ':white_check_mark:'}}", - "emoji": true - } - }, - { - "type": "divider" - }, - { - "type": "section", - "text": { - "type": "mrkdwn", - "text": "<${{ github.server_url }}/${{ github.repository }}/releases/tag/${{ github.ref_name }}|${{ github.ref_name }}> | <${{ github.server_url }}/${{ github.repository }}/commit/${{ github.sha }}|${{ github.sha }}> | <${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }}|Run>" - } - } - ] - } - ] - } - env: - SLACK_BOT_TOKEN: ${{ secrets.QA_SLACK_API_KEY }} - - test-results: - name: Post Test Results for ${{ matrix.name }} - if: ${{ always() && needs.*.result != 'skipped' && needs.*.result != 'cancelled' }} - environment: integration - permissions: - checks: write - pull-requests: write - id-token: write - contents: read - runs-on: ubuntu-latest - needs: test-notify - strategy: - fail-fast: false - matrix: - name: [ Upgrade 2.0, Upgrade 2.1, Upgrade 2.2 ] - steps: - - name: Get Results - id: test-results - run: | - # I feel like there's some clever, fully jq way to do this, but I ain't got the motivation to figure it out - echo "Querying test results" - - PARSED_RESULTS=$(curl \ - -H "Authorization: Bearer ${{ github.token }}" \ - 'https://api.github.com/repos/${{github.repository}}/actions/runs/${{ github.run_id }}/jobs' \ - | jq -r --arg pattern "${{ matrix.name }} Test" '.jobs[] - | select(.name | test($pattern)) as $job - | $job.steps[] - | select(.name == "Run Tests") - | { conclusion: (if .conclusion == "success" then ":white_check_mark:" else ":x:" end), product: ("*" + ($job.name | capture($pattern).product) + "*") }') - - echo "Parsed Results:" - echo $PARSED_RESULTS - - ALL_SUCCESS=true - for row in $(echo "$PARSED_RESULTS" | jq -s | jq -r '.[] | select(.conclusion != ":white_check_mark:")'); do - success=false - break - done - - echo all_success=$ALL_SUCCESS >> $GITHUB_OUTPUT - - FORMATTED_RESULTS=$(echo $PARSED_RESULTS | jq -s '[.[] - | { - conclusion: .conclusion, - product: .product - } - ] - | map("{\"type\": \"section\", \"text\": {\"type\": \"mrkdwn\", \"text\": \"\(.product): \(.conclusion)\"}}") - | join(",")') - - echo "Formatted Results:" - echo $FORMATTED_RESULTS - - # Cleans out backslashes and quotes from jq - CLEAN_RESULTS=$(echo "$FORMATTED_RESULTS" | sed 's/\\\"/"/g' | sed 's/^"//;s/"$//') - - echo "Clean Results" - echo $CLEAN_RESULTS - - echo results=$CLEAN_RESULTS >> $GITHUB_OUTPUT - - - name: Test Details - uses: slackapi/slack-github-action@6c661ce58804a1a20f6dc5fbee7f0381b469e001 # v1.25.0 - with: - channel-id: C03KJ5S7KEK - payload: | - { - "thread_ts": "${{ needs.test-notify.outputs.thread_ts }}", - "attachments": [ - { - "color": "${{ steps.test-results.outputs.all_success && '#2E7D32' || '#C62828' }}", - "blocks": [ - { - "type": "header", - "text": { - "type": "plain_text", - "text": "${{ matrix.name }} ${{ steps.test-results.outputs.all_success && ':white_check_mark:' || ':x: Notifying <@U02Q14G80TY>'}}", - "emoji": true - } - }, - { - "type": "divider" - }, - ${{ steps.test-results.outputs.results }} - ] - } - ] - } - env: - SLACK_BOT_TOKEN: ${{ secrets.QA_SLACK_API_KEY }} \ No newline at end of file + run-e2e-tests-workflow: + name: Run E2E Tests + uses: ./.github/workflows/run-e2e-tests-reusable-workflow.yml + with: + test_workflow: Automation Nightly Tests + chainlink_version: ${{ github.sha }} + slack_notification_after_tests: true + slack_notification_after_tests_channel_id: "#automation-test-notifications" + slack_notification_after_tests_name: Automation Nightly E2E Tests + # slack_notification_after_tests_notify_user_id_on_failure: U0XXXXXXX + secrets: + QA_AWS_REGION: ${{ secrets.QA_AWS_REGION }} + QA_AWS_ROLE_TO_ASSUME: ${{ secrets.QA_AWS_ROLE_TO_ASSUME }} + QA_AWS_ACCOUNT_NUMBER: ${{ secrets.QA_AWS_ACCOUNT_NUMBER }} + QA_PYROSCOPE_INSTANCE: ${{ secrets.QA_PYROSCOPE_INSTANCE }} + QA_PYROSCOPE_KEY: ${{ secrets.QA_PYROSCOPE_KEY }} + QA_KUBECONFIG: ${{ secrets.QA_KUBECONFIG }} + GRAFANA_INTERNAL_TENANT_ID: ${{ secrets.GRAFANA_INTERNAL_TENANT_ID }} + GRAFANA_INTERNAL_BASIC_AUTH: ${{ secrets.GRAFANA_INTERNAL_BASIC_AUTH }} + GRAFANA_INTERNAL_HOST: ${{ secrets.GRAFANA_INTERNAL_HOST }} + GRAFANA_INTERNAL_URL_SHORTENER_TOKEN: ${{ secrets.GRAFANA_INTERNAL_URL_SHORTENER_TOKEN }} + LOKI_TENANT_ID: ${{ secrets.LOKI_TENANT_ID }} + LOKI_URL: ${{ secrets.LOKI_URL }} + LOKI_BASIC_AUTH: ${{ secrets.LOKI_BASIC_AUTH }} + GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} + AWS_REGION: ${{ secrets.QA_AWS_REGION }} + AWS_OIDC_IAM_ROLE_VALIDATION_PROD_ARN: ${{ secrets.AWS_OIDC_IAM_ROLE_VALIDATION_PROD_ARN }} + AWS_API_GW_HOST_GRAFANA: ${{ secrets.AWS_API_GW_HOST_GRAFANA }} + TEST_SECRETS_OVERRIDE_BASE64: ${{ secrets[inputs.test_secrets_override_key] }} + SLACK_BOT_TOKEN: ${{ secrets.QA_SLACK_API_KEY }} diff --git a/.github/workflows/automation-ondemand-tests.yml b/.github/workflows/automation-ondemand-tests.yml index ed69bd5ac05..a5b26744253 100644 --- a/.github/workflows/automation-ondemand-tests.yml +++ b/.github/workflows/automation-ondemand-tests.yml @@ -171,6 +171,9 @@ jobs: GRAFANA_INTERNAL_BASIC_AUTH: ${{ secrets.GRAFANA_INTERNAL_BASIC_AUTH }} GRAFANA_INTERNAL_HOST: ${{ secrets.GRAFANA_INTERNAL_HOST }} GRAFANA_INTERNAL_URL_SHORTENER_TOKEN: ${{ secrets.GRAFANA_INTERNAL_URL_SHORTENER_TOKEN }} + LOKI_TENANT_ID: ${{ secrets.LOKI_TENANT_ID }} + LOKI_URL: ${{ secrets.LOKI_URL }} + LOKI_BASIC_AUTH: ${{ secrets.LOKI_BASIC_AUTH }} GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} AWS_REGION: ${{ secrets.QA_AWS_REGION }} AWS_OIDC_IAM_ROLE_VALIDATION_PROD_ARN: ${{ secrets.AWS_OIDC_IAM_ROLE_VALIDATION_PROD_ARN }} diff --git a/.github/workflows/ccip-chaos-tests.yml b/.github/workflows/ccip-chaos-tests.yml index 1d37551d9fe..3a6cae796d2 100644 --- a/.github/workflows/ccip-chaos-tests.yml +++ b/.github/workflows/ccip-chaos-tests.yml @@ -34,6 +34,9 @@ jobs: GRAFANA_INTERNAL_BASIC_AUTH: ${{ secrets.GRAFANA_INTERNAL_BASIC_AUTH }} GRAFANA_INTERNAL_HOST: ${{ secrets.GRAFANA_INTERNAL_HOST }} GRAFANA_INTERNAL_URL_SHORTENER_TOKEN: ${{ secrets.GRAFANA_INTERNAL_URL_SHORTENER_TOKEN }} + LOKI_TENANT_ID: ${{ secrets.LOKI_TENANT_ID }} + LOKI_URL: ${{ secrets.LOKI_URL }} + LOKI_BASIC_AUTH: ${{ secrets.LOKI_BASIC_AUTH }} GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} AWS_REGION: ${{ secrets.QA_AWS_REGION }} AWS_OIDC_IAM_ROLE_VALIDATION_PROD_ARN: ${{ secrets.AWS_OIDC_IAM_ROLE_VALIDATION_PROD_ARN }} diff --git a/.github/workflows/ccip-load-tests.yml b/.github/workflows/ccip-load-tests.yml index ec65c2af105..235b9b0f67e 100644 --- a/.github/workflows/ccip-load-tests.yml +++ b/.github/workflows/ccip-load-tests.yml @@ -10,293 +10,54 @@ on: - '*' workflow_dispatch: inputs: - base64_test_input: # base64 encoded toml for test input - description: 'Base64 encoded toml test input' + test_config_override_path: + description: Path to a test config file used to override the default test config required: false + type: string test_secrets_override_key: description: 'Key to run tests with custom test secrets' required: false type: string + chainlink_version: + description: Chainlink image version to use. Commit sha if not provided + required: false + type: string # Only run 1 of this workflow at a time per PR concurrency: group: load-ccip-tests-chainlink-${{ github.ref }} cancel-in-progress: true -env: - # TODO: TT-1470 - Update image names as we solidify new realease strategy - CHAINLINK_IMAGE: ${{ secrets.QA_AWS_ACCOUNT_NUMBER }}.dkr.ecr.${{ secrets.QA_AWS_REGION }}.amazonaws.com/chainlink - CHAINLINK_VERSION: ${{ github.sha}} - INPUT_CHAINLINK_TEST_VERSION: ${{ github.sha}} - ENV_JOB_IMAGE: ${{ secrets.QA_AWS_ACCOUNT_NUMBER }}.dkr.ecr.${{ secrets.QA_AWS_REGION }}.amazonaws.com/chainlink-tests:${{ github.sha }} - INTERNAL_DOCKER_REPO: ${{ secrets.QA_AWS_ACCOUNT_NUMBER }}.dkr.ecr.${{ secrets.QA_AWS_REGION }}.amazonaws.com - AWS_ECR_REPO_PUBLIC_REGISTRY: public.ecr.aws - MOD_CACHE_VERSION: 1 - jobs: - build-chainlink: - environment: integration - permissions: - id-token: write - contents: read - name: Build Chainlink Image - runs-on: ubuntu20.04-16cores-64GB - steps: - - name: Checkout the repo - uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633 # v4.1.2 - - name: Check if image exists - id: check-image - uses: smartcontractkit/chainlink-github-actions/docker/image-exists@b49a9d04744b0237908831730f8553f26d73a94b # v2.3.17 - with: - repository: chainlink - tag: ${{ env.CHAINLINK_VERSION }} - AWS_REGION: ${{ secrets.QA_AWS_REGION }} - AWS_ROLE_TO_ASSUME: ${{ secrets.QA_AWS_ROLE_TO_ASSUME }} - - name: Build Image - if: steps.check-image.outputs.exists == 'false' - uses: smartcontractkit/chainlink-github-actions/chainlink-testing-framework/build-image@b49a9d04744b0237908831730f8553f26d73a94b # v2.3.17 - env: - GH_TOKEN: ${{ github.token }} - with: - cl_repo: smartcontractkit/chainlink - cl_ref: ${{ env.CHAINLINK_VERSION }} - push_tag: ${{ env.CHAINLINK_IMAGE }}:${{ env.CHAINLINK_VERSION }} - QA_AWS_REGION: ${{ secrets.QA_AWS_REGION }} - QA_AWS_ROLE_TO_ASSUME: ${{ secrets.QA_AWS_ROLE_TO_ASSUME }} - - name: Collect Metrics - if: always() - id: collect-gha-metrics - uses: smartcontractkit/push-gha-metrics-action@dea9b546553cb4ca936607c2267a09c004e4ab3f # v3.0.0 - with: - id: ccip-load-test-build-chainlink-image - org-id: ${{ secrets.GRAFANA_INTERNAL_TENANT_ID }} - basic-auth: ${{ secrets.GRAFANA_INTERNAL_BASIC_AUTH }} - hostname: ${{ secrets.GRAFANA_INTERNAL_HOST }} - this-job-name: Build Chainlink Image - continue-on-error: true - - build-test-image: - environment: integration - permissions: - id-token: write - contents: read - name: Build Test Image - runs-on: ubuntu20.04-8cores-32GB - steps: - - name: Collect Metrics - id: collect-gha-metrics - uses: smartcontractkit/push-gha-metrics-action@dea9b546553cb4ca936607c2267a09c004e4ab3f # v3.0.0 - with: - id: ccip-load-test-build-test-image - org-id: ${{ secrets.GRAFANA_INTERNAL_TENANT_ID }} - basic-auth: ${{ secrets.GRAFANA_INTERNAL_BASIC_AUTH }} - hostname: ${{ secrets.GRAFANA_INTERNAL_HOST }} - this-job-name: Build Test Image - continue-on-error: true - - name: Checkout the repo - uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633 # v4.1.2 - - name: Build Test Image - uses: ./.github/actions/build-test-image - with: - tag: ${{ env.INPUT_CHAINLINK_TEST_VERSION }} - QA_AWS_ROLE_TO_ASSUME: ${{ secrets.QA_AWS_ROLE_TO_ASSUME }} - QA_AWS_REGION: ${{ secrets.QA_AWS_REGION }} - QA_AWS_ACCOUNT_NUMBER: ${{ secrets.QA_AWS_ACCOUNT_NUMBER }} - suites: ccip-load - - ccip-load-test: - environment: integration - needs: [ build-chainlink, build-test-image ] - if: ${{ always() && !contains(needs.*.result, 'failure') }} - permissions: - issues: read - checks: write - pull-requests: write - id-token: write - contents: read - env: - CHAINLINK_ENV_USER: ${{ github.actor }} + run-e2e-tests-workflow: + name: Run E2E Tests + uses: ./.github/workflows/run-e2e-tests-reusable-workflow.yml + with: + test_workflow: E2E CCIP Load Tests + test_config_override_path: ${{ inputs.test_config_override_path }} + chainlink_version: ${{ inputs.chainlink_version || github.sha }} + slack_notification_after_tests: always + slack_notification_after_tests_channel_id: '#ccip-testing' + slack_notification_after_tests_name: CCIP E2E Load Tests + test_image_suites: ccip-load + secrets: + QA_AWS_REGION: ${{ secrets.QA_AWS_REGION }} + QA_AWS_ROLE_TO_ASSUME: ${{ secrets.QA_AWS_ROLE_TO_ASSUME }} + QA_AWS_ACCOUNT_NUMBER: ${{ secrets.QA_AWS_ACCOUNT_NUMBER }} + QA_PYROSCOPE_INSTANCE: ${{ secrets.QA_PYROSCOPE_INSTANCE }} + QA_PYROSCOPE_KEY: ${{ secrets.QA_PYROSCOPE_KEY }} + QA_KUBECONFIG: ${{ secrets.QA_KUBECONFIG }} + GRAFANA_INTERNAL_TENANT_ID: ${{ secrets.GRAFANA_INTERNAL_TENANT_ID }} + GRAFANA_INTERNAL_BASIC_AUTH: ${{ secrets.GRAFANA_INTERNAL_BASIC_AUTH }} + GRAFANA_INTERNAL_HOST: ${{ secrets.GRAFANA_INTERNAL_HOST }} + GRAFANA_INTERNAL_URL_SHORTENER_TOKEN: ${{ secrets.GRAFANA_INTERNAL_URL_SHORTENER_TOKEN }} + LOKI_TENANT_ID: ${{ secrets.LOKI_TENANT_ID }} + LOKI_URL: ${{ secrets.LOKI_URL }} + LOKI_BASIC_AUTH: ${{ secrets.LOKI_BASIC_AUTH }} + GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} + AWS_REGION: ${{ secrets.QA_AWS_REGION }} + AWS_OIDC_IAM_ROLE_VALIDATION_PROD_ARN: ${{ secrets.AWS_OIDC_IAM_ROLE_VALIDATION_PROD_ARN }} + AWS_API_GW_HOST_GRAFANA: ${{ secrets.AWS_API_GW_HOST_GRAFANA }} + TEST_SECRETS_OVERRIDE_BASE64: ${{ secrets[inputs.test_secrets_override_key] }} + SLACK_BOT_TOKEN: ${{ secrets.QA_SLACK_API_KEY }} SLACK_API_KEY: ${{ secrets.QA_SLACK_API_KEY }} - SLACK_CHANNEL: ${{ secrets.QA_SLACK_CHANNEL }} - TEST_LOG_LEVEL: info - REF_NAME: ${{ github.head_ref || github.ref_name }} - BASE64_NETWORK_CONFIG: ${{ secrets.BASE64_NETWORK_CONFIG }} - strategy: - fail-fast: false - matrix: - type: - - name: stable-load - run: ^TestLoadCCIPStableRPS$ - os: ubuntu-latest - # Enable when CCIP-2277 is resolved - # - name: load-with-arm-curse-uncurse - # run: ^TestLoadCCIPStableRPSAfterARMCurseAndUncurse$ - # config_path: ./integration-tests/ccip-tests/testconfig/tomls/load-with-arm-curse-uncurse.toml - # os: ubuntu-latest - runs-on: ${{ matrix.type.os }} - name: CCIP ${{ matrix.type.name }} - steps: - - name: Collect Metrics - id: collect-gha-metrics - uses: smartcontractkit/push-gha-metrics-action@dea9b546553cb4ca936607c2267a09c004e4ab3f # v3.0.0 - with: - id: ccip-load-test-${{ matrix.type.name }} - org-id: ${{ secrets.GRAFANA_INTERNAL_TENANT_ID }} - basic-auth: ${{ secrets.GRAFANA_INTERNAL_BASIC_AUTH }} - hostname: ${{ secrets.GRAFANA_INTERNAL_HOST }} - this-job-name: CCIP ${{ matrix.type.name }} - test-results-file: '{"testType":"go","filePath":"/tmp/gotest.log"}' - continue-on-error: true - - name: Checkout the repo - uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633 # v4.1.2 - with: - ref: ${{ env.REF_NAME }} - - name: Sets env vars - id: set_override_config - shell: bash - run: | - # if the matrix.type.config_path is set, use it as the override config - if [ -n "${{ matrix.type.config_path }}" ]; then - BASE64_CONFIG_OVERRIDE=$(base64 -w 0 -i ${{ matrix.type.config_path }}) - echo ::add-mask::$BASE64_CONFIG_OVERRIDE - echo "base_64_override=$BASE64_CONFIG_OVERRIDE" >> $GITHUB_OUTPUT - fi - if [[ "${{ github.event_name }}" == "workflow_dispatch" ]]; then - BASE64_CONFIG_OVERRIDE=$(jq -r '.inputs.base64_test_input' $GITHUB_EVENT_PATH) - echo ::add-mask::$BASE64_CONFIG_OVERRIDE - echo "base_64_override=$BASE64_CONFIG_OVERRIDE" >> $GITHUB_OUTPUT - fi - - name: step summary - shell: bash - run: | - echo "### chainlink image used for this test run :link:" >>$GITHUB_STEP_SUMMARY - echo "\`${{ env.CHAINLINK_VERSION }}\`" >> $GITHUB_STEP_SUMMARY - echo "### chainlink-tests image tag for this test run :ship:" >>$GITHUB_STEP_SUMMARY - echo "\`${{ env.INPUT_CHAINLINK_TEST_VERSION }}\`" >> $GITHUB_STEP_SUMMARY - - name: Prepare Base64 TOML override for CCIP secrets - uses: ./.github/actions/setup-create-base64-config-ccip - id: setup_create_base64_config_ccip - with: - runId: ${{ github.run_id }} - testLogCollect: ${{ vars.TEST_LOG_COLLECT }} - chainlinkVersion: ${{ github.sha }} - logstreamLogTargets: ${{ vars.LOGSTREAM_LOG_TARGETS }} - - name: Run Tests - uses: smartcontractkit/chainlink-github-actions/chainlink-testing-framework/run-tests@94cb11f4bd545607a2f221c6685052b3abee723d # v2.3.32 - env: - TEST_SUITE: load - TEST_ARGS: -test.timeout 900h - DATABASE_URL: postgresql://postgres:node@localhost:5432/chainlink_test?sslmode=disable - RR_MEM: 8Gi - RR_CPU: 4 - TEST_TRIGGERED_BY: ccip-load-test-ci-${{ matrix.type.name }} - BASE64_CONFIG_OVERRIDE: ${{ steps.setup_create_base64_config_ccip.outputs.base64_config }},${{ steps.set_override_config.outputs.base_64_override }} - TEST_BASE64_CONFIG_OVERRIDE: ${{ steps.setup_create_base64_config_ccip.outputs.base64_config }},${{ steps.set_override_config.outputs.base_64_override }} - E2E_TEST_CHAINLINK_IMAGE: ${{ env.CHAINLINK_IMAGE }} - E2E_TEST_CHAINLINK_VERSION: ${{ env.CHAINLINK_VERSION }} - E2E_TEST_LOKI_TENANT_ID: ${{ secrets.LOKI_TENANT_ID }} - E2E_TEST_LOKI_ENDPOINT: ${{ secrets.LOKI_URL }} - E2E_TEST_LOKI_BASIC_AUTH: ${{ secrets.LOKI_BASIC_AUTH }} - E2E_TEST_GRAFANA_BASE_URL: ${{ vars.GRAFANA_URL }} - E2E_TEST_GRAFANA_DASHBOARD_URL: "/d/6vjVx-1V8/ccip-long-running-tests" - E2E_TEST_GRAFANA_BEARER_TOKEN: ${{ secrets.GRAFANA_INTERNAL_URL_SHORTENER_TOKEN }} - with: - test_command_to_run: cd ./integration-tests/ccip-tests && go test -v -timeout 70m -count=1 -json -run ${{ matrix.type.run }} ./load 2>&1 | tee /tmp/gotest.log | gotestloghelper -ci - test_download_vendor_packages_command: cd ./integration-tests && go mod download - test_secrets_override_base64: ${{ secrets[inputs.test_secrets_override_key] }} - test_config_override_base64: ${{ env.BASE64_CONFIG_OVERRIDE }} - token: ${{ secrets.GITHUB_TOKEN }} - go_mod_path: ./integration-tests/go.mod - QA_AWS_REGION: ${{ secrets.QA_AWS_REGION }} - QA_AWS_ROLE_TO_ASSUME: ${{ secrets.QA_AWS_ROLE_TO_ASSUME }} - QA_KUBECONFIG: ${{ secrets.QA_KUBECONFIG }} - triggered_by: ${{ env.TEST_TRIGGERED_BY }} - publish_check_name: ${{ matrix.type.name }} - artifacts_location: ./integration-tests/load/logs/payload_ccip.json - aws_registries: ${{ secrets.QA_AWS_ACCOUNT_NUMBER }} - cache_key_id: ccip-load-${{ env.MOD_CACHE_VERSION }} - cache_restore_only: "true" - should_cleanup: "true" - - # Reporting Jobs - start-slack-thread: - name: Start Slack Thread - if: ${{ failure() && needs.ccip-load-test.result != 'skipped' && needs.ccip-load-test.result != 'cancelled' }} - environment: integration - outputs: - thread_ts: ${{ steps.slack.outputs.thread_ts }} - permissions: - checks: write - pull-requests: write - id-token: write - contents: read - runs-on: ubuntu-latest - needs: [ccip-load-test] - steps: - - name: Debug Result - run: echo ${{ join(needs.*.result, ',') }} - - name: Main Slack Notification - uses: slackapi/slack-github-action@6c661ce58804a1a20f6dc5fbee7f0381b469e001 # v1.25.0 - id: slack - with: - channel-id: "#ccip-testing" - payload: | - { - "attachments": [ - { - "color": "${{ contains(join(needs.*.result, ','), 'failure') && '#C62828' || '#2E7D32' }}", - "blocks": [ - { - "type": "header", - "text": { - "type": "plain_text", - "text": "CCIP load tests results ${{ contains(join(needs.*.result, ','), 'failure') && ':x:' || ':white_check_mark:'}}", - "emoji": true - } - }, - { - "type": "divider" - }, - { - "type": "section", - "text": { - "type": "mrkdwn", - "text": "<${{ github.server_url }}/${{ github.repository }}/${{contains(github.ref_name, 'release') && 'releases/tag' || 'tree'}}/${{ github.ref_name }}|${{ github.ref_name }}> | <${{ github.server_url }}/${{ github.repository }}/commit/${{ github.sha }}|${{ github.sha }}> | <${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }}|Run>" - } - } - ] - } - ] - } - env: - SLACK_BOT_TOKEN: ${{ secrets.QA_SLACK_API_KEY }} - - post-test-results-to-slack: - name: Post Test Results - if: ${{ failure() && needs.start-slack-thread.result != 'skipped' && needs.start-slack-thread.result != 'cancelled' }} - environment: integration - permissions: - checks: write - pull-requests: write - id-token: write - contents: read - runs-on: ubuntu-latest - needs: start-slack-thread - steps: - - name: Checkout the repo - uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633 # v4.1.2 - with: - ref: ${{ github.event.pull_request.head.sha || github.event.merge_group.head_sha }} - - name: Post Test Results - uses: ./.github/actions/notify-slack-jobs-result - with: - github_token: ${{ github.token }} - github_repository: ${{ github.repository }} - workflow_run_id: ${{ github.run_id }} - github_job_name_regex: ^CCIP (.*)$ - message_title: CCIP Jobs - slack_channel_id: "#ccip-testing" - slack_bot_token: ${{ secrets.QA_SLACK_API_KEY }} - slack_thread_ts: ${{ needs.start-slack-thread.outputs.thread_ts }} - - # End Reporting Jobs \ No newline at end of file diff --git a/.github/workflows/integration-chaos-tests.yml b/.github/workflows/integration-chaos-tests.yml index 1f7e6f40fb1..c9da7d84381 100644 --- a/.github/workflows/integration-chaos-tests.yml +++ b/.github/workflows/integration-chaos-tests.yml @@ -27,6 +27,9 @@ jobs: GRAFANA_INTERNAL_BASIC_AUTH: ${{ secrets.GRAFANA_INTERNAL_BASIC_AUTH }} GRAFANA_INTERNAL_HOST: ${{ secrets.GRAFANA_INTERNAL_HOST }} GRAFANA_INTERNAL_URL_SHORTENER_TOKEN: ${{ secrets.GRAFANA_INTERNAL_URL_SHORTENER_TOKEN }} + LOKI_TENANT_ID: ${{ secrets.LOKI_TENANT_ID }} + LOKI_URL: ${{ secrets.LOKI_URL }} + LOKI_BASIC_AUTH: ${{ secrets.LOKI_BASIC_AUTH }} GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} AWS_REGION: ${{ secrets.QA_AWS_REGION }} AWS_OIDC_IAM_ROLE_VALIDATION_PROD_ARN: ${{ secrets.AWS_OIDC_IAM_ROLE_VALIDATION_PROD_ARN }} diff --git a/.github/workflows/integration-tests-publish.yml b/.github/workflows/integration-tests-publish.yml index de551fedce1..b74902cdc43 100644 --- a/.github/workflows/integration-tests-publish.yml +++ b/.github/workflows/integration-tests-publish.yml @@ -40,7 +40,7 @@ jobs: run: | echo "other_tags=${ECR_TAG}" >> $GITHUB_OUTPUT - name: Build Image - uses: ./.github/actions/build-test-image + uses: smartcontractkit/.github/actions/ctf-build-test-image@a5e4f4c8fbb8e15ab2ad131552eca6ac83c4f4b3 # ctf-build-test-image@0.1.0 with: other_tags: ${{ steps.tags.outputs.other_tags }} QA_AWS_ROLE_TO_ASSUME: ${{ secrets.QA_AWS_ROLE_TO_ASSUME }} diff --git a/.github/workflows/integration-tests.yml b/.github/workflows/integration-tests.yml index a8884c306f9..032ec40b4ea 100644 --- a/.github/workflows/integration-tests.yml +++ b/.github/workflows/integration-tests.yml @@ -71,7 +71,7 @@ jobs: echo "should-enforce=$SHOULD_ENFORCE" >> $GITHUB_OUTPUT - name: Enforce CTF Version if: steps.condition-check.outputs.should-enforce == 'true' - uses: smartcontractkit/chainlink-github-actions/chainlink-testing-framework/mod-version@fc3e0df622521019f50d772726d6bf8dc919dd38 # v2.3.19 + uses: smartcontractkit/.github/actions/ctf-check-mod-version@21b0189c5fdca0318617d259634b1a91e6d80262 # ctf-check-mod-version@0.0.0 with: go-project-path: ./integration-tests module-name: github.com/smartcontractkit/chainlink-testing-framework/lib @@ -160,7 +160,7 @@ jobs: repository: smartcontractkit/chainlink ref: ${{ inputs.cl_ref }} - name: Setup Go - uses: smartcontractkit/chainlink-github-actions/chainlink-testing-framework/setup-go@0ce1e67b254a4f041e03cc6f0e3afc987b47c7bd # v2.3.30 + uses: smartcontractkit/.github/actions/ctf-setup-go@b0d756c57fcdbcff187e74166562a029fdd5d1b9 # ctf-setup-go@0.0.0 with: test_download_vendor_packages_command: cd ${{ matrix.project.path }} && go mod download go_mod_path: ${{ matrix.project.path }}/go.mod @@ -232,8 +232,8 @@ jobs: AWS_ROLE_TO_ASSUME: ${{ secrets.QA_AWS_ROLE_TO_ASSUME }} dep_evm_sha: ${{ inputs.evm-ref }} - run-core-e2e-tests-workflow: - name: Run Core E2E Tests + run-core-e2e-tests-for-pr: + name: Run Core E2E Tests For PR permissions: actions: read checks: write @@ -241,10 +241,10 @@ jobs: id-token: write contents: read needs: [build-chainlink, changes] - if: needs.changes.outputs.core_changes == 'true' || needs.changes.outputs.github_ci_changes == 'true' + if: github.event_name == 'pull_request' && ( needs.changes.outputs.core_changes == 'true' || needs.changes.outputs.github_ci_changes == 'true') uses: ./.github/workflows/run-e2e-tests-reusable-workflow.yml with: - workflow_name: Run Core E2E Tests + workflow_name: Run Core E2E Tests For PR chainlink_version: ${{ inputs.evm-ref || github.sha }} chainlink_upgrade_version: ${{ github.sha }} test_workflow: PR E2E Core Tests @@ -262,14 +262,17 @@ jobs: GRAFANA_INTERNAL_BASIC_AUTH: ${{ secrets.GRAFANA_INTERNAL_BASIC_AUTH }} GRAFANA_INTERNAL_HOST: ${{ secrets.GRAFANA_INTERNAL_HOST }} GRAFANA_INTERNAL_URL_SHORTENER_TOKEN: ${{ secrets.GRAFANA_INTERNAL_URL_SHORTENER_TOKEN }} + LOKI_TENANT_ID: ${{ secrets.LOKI_TENANT_ID }} + LOKI_URL: ${{ secrets.LOKI_URL }} + LOKI_BASIC_AUTH: ${{ secrets.LOKI_BASIC_AUTH }} GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} AWS_REGION: ${{ secrets.QA_AWS_REGION }} AWS_OIDC_IAM_ROLE_VALIDATION_PROD_ARN: ${{ secrets.AWS_OIDC_IAM_ROLE_VALIDATION_PROD_ARN }} AWS_API_GW_HOST_GRAFANA: ${{ secrets.AWS_API_GW_HOST_GRAFANA }} SLACK_BOT_TOKEN: ${{ secrets.QA_SLACK_API_KEY }} - run-ccip-e2e-tests-workflow: - name: Run CCIP E2E Tests + run-core-e2e-tests-for-merge-queue: + name: Run Core E2E Tests For Merge Queue permissions: actions: read checks: write @@ -277,10 +280,53 @@ jobs: id-token: write contents: read needs: [build-chainlink, changes] - if: needs.changes.outputs.ccip_changes == 'true' || needs.changes.outputs.github_ci_changes == 'true' + if: github.event_name == 'merge_group' uses: ./.github/workflows/run-e2e-tests-reusable-workflow.yml with: - workflow_name: Run CCIP E2E Tests + workflow_name: Run Core E2E Tests For Merge Queue + chainlink_version: ${{ inputs.evm-ref || github.sha }} + chainlink_upgrade_version: ${{ github.sha }} + test_workflow: Merge Queue E2E Core Tests + upload_cl_node_coverage_artifact: true + upload_cl_node_coverage_artifact_prefix: cl_node_coverage_data_ + enable_otel_traces_for_ocr2_plugins: ${{ contains(join(github.event.pull_request.labels.*.name, ' '), 'enable tracing') }} + # Notify Test Tooling team in slack when merge queue tests fail + slack_notification_after_tests: on_failure + slack_notification_after_tests_channel_id: "#team-test-tooling-internal" + slack_notification_after_tests_name: Core E2E Tests In Merge Queue + secrets: + QA_AWS_REGION: ${{ secrets.QA_AWS_REGION }} + QA_AWS_ROLE_TO_ASSUME: ${{ secrets.QA_AWS_ROLE_TO_ASSUME }} + QA_AWS_ACCOUNT_NUMBER: ${{ secrets.QA_AWS_ACCOUNT_NUMBER }} + QA_PYROSCOPE_INSTANCE: ${{ secrets.QA_PYROSCOPE_INSTANCE }} + QA_PYROSCOPE_KEY: ${{ secrets.QA_PYROSCOPE_KEY }} + QA_KUBECONFIG: ${{ secrets.QA_KUBECONFIG }} + GRAFANA_INTERNAL_TENANT_ID: ${{ secrets.GRAFANA_INTERNAL_TENANT_ID }} + GRAFANA_INTERNAL_BASIC_AUTH: ${{ secrets.GRAFANA_INTERNAL_BASIC_AUTH }} + GRAFANA_INTERNAL_HOST: ${{ secrets.GRAFANA_INTERNAL_HOST }} + GRAFANA_INTERNAL_URL_SHORTENER_TOKEN: ${{ secrets.GRAFANA_INTERNAL_URL_SHORTENER_TOKEN }} + LOKI_TENANT_ID: ${{ secrets.LOKI_TENANT_ID }} + LOKI_URL: ${{ secrets.LOKI_URL }} + LOKI_BASIC_AUTH: ${{ secrets.LOKI_BASIC_AUTH }} + GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} + AWS_REGION: ${{ secrets.QA_AWS_REGION }} + AWS_OIDC_IAM_ROLE_VALIDATION_PROD_ARN: ${{ secrets.AWS_OIDC_IAM_ROLE_VALIDATION_PROD_ARN }} + AWS_API_GW_HOST_GRAFANA: ${{ secrets.AWS_API_GW_HOST_GRAFANA }} + SLACK_BOT_TOKEN: ${{ secrets.QA_SLACK_API_KEY }} + + run-ccip-e2e-tests-for-pr: + name: Run CCIP E2E Tests For PR + permissions: + actions: read + checks: write + pull-requests: write + id-token: write + contents: read + needs: [build-chainlink, changes] + if: github.event_name == 'pull_request' && (needs.changes.outputs.ccip_changes == 'true' || needs.changes.outputs.github_ci_changes == 'true') + uses: ./.github/workflows/run-e2e-tests-reusable-workflow.yml + with: + workflow_name: Run CCIP E2E Tests For PR chainlink_version: ${{ inputs.evm-ref || github.sha }} chainlink_upgrade_version: ${{ github.sha }} test_workflow: PR E2E CCIP Tests @@ -298,6 +344,48 @@ jobs: GRAFANA_INTERNAL_BASIC_AUTH: ${{ secrets.GRAFANA_INTERNAL_BASIC_AUTH }} GRAFANA_INTERNAL_HOST: ${{ secrets.GRAFANA_INTERNAL_HOST }} GRAFANA_INTERNAL_URL_SHORTENER_TOKEN: ${{ secrets.GRAFANA_INTERNAL_URL_SHORTENER_TOKEN }} + LOKI_TENANT_ID: ${{ secrets.LOKI_TENANT_ID }} + LOKI_URL: ${{ secrets.LOKI_URL }} + LOKI_BASIC_AUTH: ${{ secrets.LOKI_BASIC_AUTH }} + GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} + AWS_REGION: ${{ secrets.QA_AWS_REGION }} + AWS_OIDC_IAM_ROLE_VALIDATION_PROD_ARN: ${{ secrets.AWS_OIDC_IAM_ROLE_VALIDATION_PROD_ARN }} + AWS_API_GW_HOST_GRAFANA: ${{ secrets.AWS_API_GW_HOST_GRAFANA }} + SLACK_BOT_TOKEN: ${{ secrets.QA_SLACK_API_KEY }} + + run-ccip-e2e-tests-for-merge-queue: + name: Run CCIP E2E Tests For Merge Queue + permissions: + actions: read + checks: write + pull-requests: write + id-token: write + contents: read + needs: [build-chainlink, changes] + if: github.event_name == 'merge_group' && (needs.changes.outputs.ccip_changes == 'true' || needs.changes.outputs.github_ci_changes == 'true') + uses: ./.github/workflows/run-e2e-tests-reusable-workflow.yml + with: + workflow_name: Run CCIP E2E Tests For Merge Queue + chainlink_version: ${{ inputs.evm-ref || github.sha }} + chainlink_upgrade_version: ${{ github.sha }} + test_workflow: Merge Queue E2E CCIP Tests + upload_cl_node_coverage_artifact: true + upload_cl_node_coverage_artifact_prefix: cl_node_coverage_data_ + enable_otel_traces_for_ocr2_plugins: ${{ contains(join(github.event.pull_request.labels.*.name, ' '), 'enable tracing') }} + secrets: + QA_AWS_REGION: ${{ secrets.QA_AWS_REGION }} + QA_AWS_ROLE_TO_ASSUME: ${{ secrets.QA_AWS_ROLE_TO_ASSUME }} + QA_AWS_ACCOUNT_NUMBER: ${{ secrets.QA_AWS_ACCOUNT_NUMBER }} + QA_PYROSCOPE_INSTANCE: ${{ secrets.QA_PYROSCOPE_INSTANCE }} + QA_PYROSCOPE_KEY: ${{ secrets.QA_PYROSCOPE_KEY }} + QA_KUBECONFIG: ${{ secrets.QA_KUBECONFIG }} + GRAFANA_INTERNAL_TENANT_ID: ${{ secrets.GRAFANA_INTERNAL_TENANT_ID }} + GRAFANA_INTERNAL_BASIC_AUTH: ${{ secrets.GRAFANA_INTERNAL_BASIC_AUTH }} + GRAFANA_INTERNAL_HOST: ${{ secrets.GRAFANA_INTERNAL_HOST }} + GRAFANA_INTERNAL_URL_SHORTENER_TOKEN: ${{ secrets.GRAFANA_INTERNAL_URL_SHORTENER_TOKEN }} + LOKI_TENANT_ID: ${{ secrets.LOKI_TENANT_ID }} + LOKI_URL: ${{ secrets.LOKI_URL }} + LOKI_BASIC_AUTH: ${{ secrets.LOKI_BASIC_AUTH }} GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} AWS_REGION: ${{ secrets.QA_AWS_REGION }} AWS_OIDC_IAM_ROLE_VALIDATION_PROD_ARN: ${{ secrets.AWS_OIDC_IAM_ROLE_VALIDATION_PROD_ARN }} @@ -308,12 +396,12 @@ jobs: if: always() name: ETH Smoke Tests runs-on: ubuntu-latest - needs: [lint-integration-tests, run-core-e2e-tests-workflow, run-ccip-e2e-tests-workflow] + needs: [lint-integration-tests, run-core-e2e-tests-for-pr, run-ccip-e2e-tests-for-pr, run-core-e2e-tests-for-merge-queue, run-ccip-e2e-tests-for-merge-queue] steps: - name: Check Core test results id: check_core_results run: | - results='${{ needs.run-core-e2e-tests-workflow.outputs.test_results }}' + results='${{ needs.run-core-e2e-tests-for-pr.outputs.test_results }}' echo "Core test results:" echo "$results" | jq . @@ -323,8 +411,8 @@ jobs: - name: Check CCIP test results id: check_ccip_results run: | - if [[ '${{ needs.run-ccip-e2e-tests-workflow.result }}' != 'skipped' ]]; then - results='${{ needs.run-ccip-e2e-tests-workflow.outputs.test_results }}' + if [[ '${{ needs.run-ccip-e2e-tests-for-pr.result }}' != 'skipped' ]]; then + results='${{ needs.run-ccip-e2e-tests-for-pr.outputs.test_results }}' echo "CCIP test results:" echo "$results" | jq . else @@ -340,10 +428,14 @@ jobs: channel-id: "#team-test-tooling-internal" slack-message: ":x: :mild-panic-intensifies: Node Migration Tests Failed: \n${{ format('https://github.com/{0}/actions/runs/{1}', github.repository, github.run_id) }}\n${{ format('Notifying ', secrets.GUARDIAN_SLACK_NOTIFICATION_HANDLE) }}" - - name: Fail the job if core tests not successful - if: always() && needs.run-core-e2e-tests-workflow.result == 'failure' + - name: Fail the job if core tests in PR not successful + if: always() && needs.run-core-e2e-tests-for-pr.result == 'failure' run: exit 1 + - name: Fail the job if core tests in merge queue not successful + if: always() && needs.run-core-e2e-tests-for-merge-queue.result == 'failure' + run: exit 1 + - name: Fail the job if lint not successful if: always() && needs.lint-integration-tests.result == 'failure' run: exit 1 @@ -351,7 +443,7 @@ jobs: cleanup: name: Clean up integration environment deployments if: always() - needs: [run-core-e2e-tests-workflow, run-ccip-e2e-tests-workflow] + needs: [run-core-e2e-tests-for-pr, run-ccip-e2e-tests-for-pr, run-core-e2e-tests-for-merge-queue, run-ccip-e2e-tests-for-merge-queue] runs-on: ubuntu-latest steps: - name: Checkout repo @@ -383,7 +475,7 @@ jobs: show-chainlink-node-coverage: name: Show Chainlink Node Go Coverage if: always() - needs: [run-core-e2e-tests-workflow, run-ccip-e2e-tests-workflow] + needs: [run-core-e2e-tests-for-pr, run-ccip-e2e-tests-for-pr, run-core-e2e-tests-for-merge-queue, run-ccip-e2e-tests-for-merge-queue] runs-on: ubuntu-latest steps: - name: Checkout the repo @@ -567,10 +659,9 @@ jobs: ref: ${{ needs.get_solana_sha.outputs.sha }} - name: Build Test Image if: (needs.changes.outputs.core_changes == 'true' || github.event_name == 'workflow_dispatch') && needs.solana-test-image-exists.outputs.exists == 'false' - uses: ./.github/actions/build-test-image + uses: smartcontractkit/.github/actions/ctf-build-test-image@a5e4f4c8fbb8e15ab2ad131552eca6ac83c4f4b3 # ctf-build-test-image@0.1.0 with: tag: ${{ needs.get_solana_sha.outputs.sha }} - artifacts_path: ${{ env.CONTRACT_ARTIFACTS_PATH }} QA_AWS_ROLE_TO_ASSUME: ${{ secrets.QA_AWS_ROLE_TO_ASSUME }} QA_AWS_REGION: ${{ secrets.QA_AWS_REGION }} QA_AWS_ACCOUNT_NUMBER: ${{ secrets.QA_AWS_ACCOUNT_NUMBER }} @@ -620,7 +711,7 @@ jobs: ref: ${{ needs.get_solana_sha.outputs.sha }} - name: Run Setup if: needs.changes.outputs.core_changes == 'true' || github.event_name == 'workflow_dispatch' - uses: smartcontractkit/chainlink-github-actions/chainlink-testing-framework/setup-run-tests-environment@0ce1e67b254a4f041e03cc6f0e3afc987b47c7bd # v2.3.30 + uses: smartcontractkit/.github/actions/ctf-setup-run-tests-environment@49cb1613e96c9ce17f7290e4dabd38f43aa9bd4d # ctf-setup-run-tests-environment@0.0.0 with: go_mod_path: ./integration-tests/go.mod cache_restore_only: true @@ -672,7 +763,7 @@ jobs: echo "BASE64_CONFIG_OVERRIDE=$BASE64_CONFIG_OVERRIDE" >> $GITHUB_ENV - name: Run Tests if: needs.changes.outputs.core_changes == 'true' || github.event_name == 'workflow_dispatch' - uses: smartcontractkit/chainlink-github-actions/chainlink-testing-framework/run-tests@d2f9642bcc24a73400568756f24b72c188ac7a9a # v2.3.31 + uses: smartcontractkit/.github/actions/ctf-run-tests@b8731364b119e88983e94b0c4da87fc27ddb41b8 # ctf-run-tests@0.0.0 with: test_command_to_run: export ENV_JOB_IMAGE=${{ secrets.QA_AWS_ACCOUNT_NUMBER }}.dkr.ecr.${{ secrets.QA_AWS_REGION }}.amazonaws.com/chainlink-solana-tests:${{ needs.get_solana_sha.outputs.sha }} && make test_smoke test_config_override_base64: ${{ env.BASE64_CONFIG_OVERRIDE }} diff --git a/.github/workflows/live-testnet-tests.yml b/.github/workflows/live-testnet-tests.yml deleted file mode 100644 index bcf4dfea199..00000000000 --- a/.github/workflows/live-testnet-tests.yml +++ /dev/null @@ -1,1119 +0,0 @@ -# *** -# This workflow is a monstrosity of copy-paste, and that's to increase legibility in reporting and running, so the code be damned. -# I suspect this can be cleaned up significantly with some clever trickery of the GitHub actions matrices, but I am not that clever. -# We want each chain to run in parallel, but each test within the chain needs to be able to run sequentially -# (we're trying to eliminate this as a requirement, should make it a lot easier). -# Each chain can have a variety of tests to run. -# We also want reporting to be clear in the start-slack-thread and post-test-results-to-slack jobs. -# Funding address: 0xC1107e57082945E28d3202A81B1520DEA3AE6AEC -# *** - -name: Live Testnet Tests -on: - # Disable refular runs for now until we can fix some test client flakiness and improve stability - # schedule: - # - cron: "0 5 * * *" # Run every night at midnight EST - # push: - # tags: - # - "*" - workflow_dispatch: - inputs: - slack_user_id: - description: "The Slack member ID to notify" - required: true - type: string - network: - description: "The network to run tests on" - required: true - type: choice - options: - - "All" - - "Sepolia" - - "Optimism Sepolia" - - "Arbitrum Sepolia" - - "Base Sepolia" - - "Polygon Mumbai" - - "Avalanche Fuji" - - "Fantom Testnet" - - "Celo Alfajores" - - "Linea Goerli" - - "BSC Testnet" - -env: - CHAINLINK_IMAGE: ${{ secrets.QA_AWS_ACCOUNT_NUMBER }}.dkr.ecr.${{ secrets.QA_AWS_REGION }}.amazonaws.com/chainlink - INTERNAL_DOCKER_REPO: ${{ secrets.QA_AWS_ACCOUNT_NUMBER }}.dkr.ecr.${{ secrets.QA_AWS_REGION }}.amazonaws.com - MOD_CACHE_VERSION: 2 - CHAINLINK_NODE_FUNDING: .5 - PYROSCOPE_KEY: ${{ secrets.QA_PYROSCOPE_KEY }} - LOKI_TENANT_ID: ${{ vars.LOKI_TENANT_ID }} - LOKI_URL: ${{ secrets.LOKI_URL }} - LOKI_BASIC_AUTH: ${{ secrets.LOKI_BASIC_AUTH }} - LOGSTREAM_LOG_TARGETS: loki - GRAFANA_URL: ${{ vars.GRAFANA_URL }} - RUN_ID: ${{ github.run_id }} - - CHAINLINK_COMMIT_SHA: ${{ github.sha }} - CHAINLINK_ENV_USER: ${{ github.actor }} - TEST_LOG_LEVEL: debug - -jobs: - - # Build Test Dependencies - - build-chainlink: - environment: integration - permissions: - id-token: write - contents: read - name: Build Chainlink Image - runs-on: ubuntu-latest - steps: - - name: Collect Metrics - id: collect-gha-metrics - uses: smartcontractkit/push-gha-metrics-action@d9da21a2747016b3e13de58c7d4115a3d5c97935 # v3.0.1 - with: - id: live-testnet-build-chainlink - org-id: ${{ secrets.GRAFANA_INTERNAL_TENANT_ID }} - basic-auth: ${{ secrets.GRAFANA_INTERNAL_BASIC_AUTH }} - hostname: ${{ secrets.GRAFANA_INTERNAL_HOST }} - this-job-name: Build Chainlink Image - continue-on-error: true - - name: Checkout the repo - uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633 # v4.1.2 - with: - ref: ${{ github.event.pull_request.head.sha || github.event.merge_group.head_sha }} - - name: Build Chainlink Image - uses: ./.github/actions/build-chainlink-image - with: - tag_suffix: "" - dockerfile: core/chainlink.Dockerfile - git_commit_sha: ${{ github.sha }} - AWS_REGION: ${{ secrets.QA_AWS_REGION }} - AWS_ROLE_TO_ASSUME: ${{ secrets.QA_AWS_ROLE_TO_ASSUME }} - - build-tests: - environment: integration - permissions: - id-token: write - contents: read - name: Build Tests Binary - runs-on: ubuntu-latest - steps: - - name: Collect Metrics - id: collect-gha-metrics - uses: smartcontractkit/push-gha-metrics-action@d9da21a2747016b3e13de58c7d4115a3d5c97935 # v3.0.1 - with: - id: live-testnet-build-test-image - org-id: ${{ secrets.GRAFANA_INTERNAL_TENANT_ID }} - basic-auth: ${{ secrets.GRAFANA_INTERNAL_BASIC_AUTH }} - hostname: ${{ secrets.GRAFANA_INTERNAL_HOST }} - this-job-name: Build Tests Binary - continue-on-error: true - - name: Checkout the repo - uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633 # v4.1.2 - with: - ref: ${{ github.event.pull_request.head.sha || github.event.merge_group.head_sha }} - - name: Build Tests - uses: smartcontractkit/chainlink-github-actions/chainlink-testing-framework/build-tests@75a9005952a9e905649cfb5a6971fd9429436acd # v2.3.25 - with: - test_download_vendor_packages_command: cd ./integration-tests && go mod download - token: ${{ secrets.GITHUB_TOKEN }} - go_mod_path: ./integration-tests/go.mod - go_tags: embed - cache_key_id: core-e2e-${{ env.MOD_CACHE_VERSION }} - cache_restore_only: "true" - binary_name: tests - - # End Build Test Dependencies - - # Reporting Jobs - - start-slack-thread: - name: Start Slack Thread - if: ${{ always() && needs.*.result != 'skipped' && needs.*.result != 'cancelled' }} - environment: integration - outputs: - thread_ts: ${{ steps.slack.outputs.thread_ts }} - permissions: - checks: write - pull-requests: write - id-token: write - contents: read - runs-on: ubuntu-latest - needs: [sepolia-smoke-tests, optimism-sepolia-smoke-tests, arbitrum-sepolia-smoke-tests, base-sepolia-smoke-tests, polygon-mumbai-smoke-tests, avalanche-fuji-smoke-tests, fantom-testnet-smoke-tests, celo-alfajores-smoke-tests, linea-goerli-smoke-tests, bsc-testnet-smoke-tests] - steps: - - name: Debug Result - run: echo ${{ join(needs.*.result, ',') }} - - name: Main Slack Notification - uses: slackapi/slack-github-action@6c661ce58804a1a20f6dc5fbee7f0381b469e001 # v1.25.0 - id: slack - with: - channel-id: ${{ secrets.QA_SLACK_CHANNEL }} - payload: | - { - "attachments": [ - { - "color": "${{ contains(join(needs.*.result, ','), 'failure') && '#C62828' || '#2E7D32' }}", - "blocks": [ - { - "type": "header", - "text": { - "type": "plain_text", - "text": "Live Smoke Test Results ${{ contains(join(needs.*.result, ','), 'failure') && ':x:' || ':white_check_mark:'}}", - "emoji": true - } - }, - { - "type": "section", - "text": { - "type": "mrkdwn", - "text": "Notifying <@${{ inputs.slack_user_id }}>" - } - }, - { - "type": "divider" - }, - { - "type": "section", - "text": { - "type": "mrkdwn", - "text": "<${{ github.server_url }}/${{ github.repository }}/releases/tag/${{ github.ref_name }}|${{ github.ref_name }}> | <${{ github.server_url }}/${{ github.repository }}/commit/${{ github.sha }}|${{ github.sha }}> | <${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }}|Run>\nThe funding address for all tests and networks is `0xC1107e57082945E28d3202A81B1520DEA3AE6AEC`" - } - } - ] - } - ] - } - env: - SLACK_BOT_TOKEN: ${{ secrets.QA_SLACK_API_KEY }} - - post-test-results-to-slack: - name: Post Test Results for ${{ matrix.network }} - if: ${{ always() && needs.*.result != 'skipped' && needs.*.result != 'cancelled' }} - environment: integration - permissions: - checks: write - pull-requests: write - id-token: write - contents: read - runs-on: ubuntu-latest - needs: start-slack-thread - strategy: - fail-fast: false - matrix: - network: [Sepolia, Optimism Sepolia, Arbitrum Sepolia, Base Sepolia, Polygon Mumbai, Avalanche Fuji, Fantom Testnet, Celo Alfajores, Linea Goerli, BSC Testnet] - steps: - - name: Checkout the repo - uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633 # v4.1.2 - with: - ref: ${{ github.event.pull_request.head.sha || github.event.merge_group.head_sha }} - - name: Post Test Results - uses: ./.github/actions/notify-slack-jobs-result - with: - github_token: ${{ github.token }} - github_repository: ${{ github.repository }} - workflow_run_id: ${{ github.run_id }} - github_job_name_regex: ^${{ matrix.network }} (.*?) Tests$ - message_title: ${{ matrix.network }} - slack_channel_id: ${{ secrets.QA_SLACK_CHANNEL }} - slack_bot_token: ${{ secrets.QA_SLACK_API_KEY }} - slack_thread_ts: ${{ needs.start-slack-thread.outputs.thread_ts }} - - # End Reporting Jobs - - sepolia-smoke-tests: - environment: integration - if: ${{ (github.event.inputs.network == 'All' || github.event.inputs.network == 'Sepolia') }} - permissions: - checks: write - pull-requests: write - id-token: write - contents: read - needs: [build-chainlink, build-tests] - strategy: - max-parallel: 1 - fail-fast: false - matrix: - include: # https://docs.github.com/en/actions/using-jobs/using-a-matrix-for-your-jobs#example-adding-configurations - - product: OCR - test: TestOCRBasic - - product: Automation Conditional - test: TestAutomationBasic/registry_2_1_conditional - - product: Automation Log Trigger - test: TestAutomationBasic/registry_2_1_logtrigger - name: Sepolia ${{ matrix.product }} Tests - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633 # v4.1.2 - with: - fetch-depth: 0 - - name: Setup GAP for Grafana - uses: smartcontractkit/.github/actions/setup-gap@d316f66b2990ea4daa479daa3de6fc92b00f863e # setup-gap@0.3.2 - with: - # aws inputs - aws-region: ${{ secrets.AWS_REGION }} - aws-role-arn: ${{ secrets.AWS_OIDC_IAM_ROLE_VALIDATION_PROD_ARN }} - api-gateway-host: ${{ secrets.AWS_API_GW_HOST_GRAFANA }} - # other inputs - duplicate-authorization-header: "true" - - name: Prepare Base64 TOML override - uses: ./.github/actions/setup-create-base64-config-live-testnets - with: - runId: ${{ github.run_id }} - testLogCollect: ${{ vars.TEST_LOG_COLLECT }} - chainlinkImage: ${{ env.CHAINLINK_IMAGE }} - chainlinkVersion: ${{ github.sha }} - pyroscopeServer: ${{ matrix.product.pyroscope_env == '' && '' || !startsWith(github.ref, 'refs/tags/') && '' || secrets.QA_PYROSCOPE_INSTANCE }} # Avoid sending blank envs https://github.com/orgs/community/discussions/25725 - pyroscopeEnvironment: ci-smoke-${{ matrix.product }}-sepolia - pyroscopeKey: ${{ secrets.QA_PYROSCOPE_KEY }} - lokiEndpoint: ${{ secrets.LOKI_URL }} - lokiTenantId: ${{ vars.LOKI_TENANT_ID }} - lokiBasicAuth: ${{ secrets.LOKI_BASIC_AUTH }} - logstreamLogTargets: ${{ vars.LOGSTREAM_LOG_TARGETS }} - grafanaUrl: "http://localhost:8080/primary" - grafanaDashboardUrl: "/d/ddf75041-1e39-42af-aa46-361fe4c36e9e/ci-e2e-tests-logs" - grafanaBearerToken: ${{ secrets.GRAFANA_INTERNAL_URL_SHORTENER_TOKEN }} - network: "sepolia" - httpEndpoints: ${{ secrets.QA_SEPOLIA_HTTP_URLS }} - wsEndpoints: ${{ secrets.QA_SEPOLIA_URLS }} - fundingKeys: ${{ secrets.QA_EVM_KEYS }} - - name: Download Tests Binary - uses: actions/download-artifact@c850b930e6ba138125429b7e5c93fc707a7f8427 # v4.1.4 - with: - name: tests - - name: Run Tests - uses: smartcontractkit/chainlink-github-actions/chainlink-testing-framework/run-tests-binary@75a9005952a9e905649cfb5a6971fd9429436acd # v2.3.25 - with: - test_command_to_run: ./tests -test.timeout 30m -test.count=1 -test.parallel=1 -test.run ${{ matrix.test }} - binary_name: tests - cl_repo: ${{ env.CHAINLINK_IMAGE }} - cl_image_tag: ${{ github.sha }} - aws_registries: ${{ secrets.QA_AWS_ACCOUNT_NUMBER }} - dockerhub_username: ${{ secrets.DOCKERHUB_READONLY_USERNAME }} - dockerhub_password: ${{ secrets.DOCKERHUB_READONLY_PASSWORD }} - artifacts_location: ./logs - token: ${{ secrets.GITHUB_TOKEN }} - cache_key_id: core-e2e-${{ env.MOD_CACHE_VERSION }} - cache_restore_only: "true" - QA_AWS_REGION: ${{ secrets.QA_AWS_REGION }} - QA_AWS_ROLE_TO_ASSUME: ${{ secrets.QA_AWS_ROLE_TO_ASSUME }} - QA_KUBECONFIG: ${{ secrets.QA_KUBECONFIG }} - - name: Print failed test summary - if: always() - uses: smartcontractkit/chainlink-github-actions/chainlink-testing-framework/show-test-summary@75a9005952a9e905649cfb5a6971fd9429436acd # v2.3.25 - with: - test_directories: "./" - - bsc-testnet-smoke-tests: - environment: integration - if: ${{ github.event.inputs.network == 'All' || github.event.inputs.network == 'BSC Testnet' }} - permissions: - checks: write - pull-requests: write - id-token: write - contents: read - needs: [build-chainlink, build-tests] - strategy: - max-parallel: 1 - fail-fast: false - matrix: - include: # https://docs.github.com/en/actions/using-jobs/using-a-matrix-for-your-jobs#example-adding-configurations - - product: OCR - test: TestOCRBasic - - product: Automation Conditional - test: TestAutomationBasic/registry_2_1_conditional - - product: Automation Log Trigger - test: TestAutomationBasic/registry_2_1_logtrigger - name: BSC Testnet ${{ matrix.product }} Tests - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633 # v4.1.2 - with: - fetch-depth: 0 - - name: Setup GAP for Grafana - uses: smartcontractkit/.github/actions/setup-gap@d316f66b2990ea4daa479daa3de6fc92b00f863e # setup-gap@0.3.2 - with: - # aws inputs - aws-region: ${{ secrets.AWS_REGION }} - aws-role-arn: ${{ secrets.AWS_OIDC_IAM_ROLE_VALIDATION_PROD_ARN }} - api-gateway-host: ${{ secrets.AWS_API_GW_HOST_GRAFANA }} - # other inputs - duplicate-authorization-header: "true" - - name: Prepare Base64 TOML override - uses: ./.github/actions/setup-create-base64-config-live-testnets - with: - runId: ${{ github.run_id }} - testLogCollect: ${{ vars.TEST_LOG_COLLECT }} - chainlinkImage: ${{ env.CHAINLINK_IMAGE }} - chainlinkVersion: ${{ github.sha }} - pyroscopeServer: ${{ matrix.product.pyroscope_env == '' && '' || !startsWith(github.ref, 'refs/tags/') && '' || secrets.QA_PYROSCOPE_INSTANCE }} # Avoid sending blank envs https://github.com/orgs/community/discussions/25725 - pyroscopeEnvironment: ci-smoke-${{ matrix.product }}-bsc-testnet - pyroscopeKey: ${{ secrets.QA_PYROSCOPE_KEY }} - lokiEndpoint: ${{ secrets.LOKI_URL }} - lokiTenantId: ${{ vars.LOKI_TENANT_ID }} - lokiBasicAuth: ${{ secrets.LOKI_BASIC_AUTH }} - logstreamLogTargets: ${{ vars.LOGSTREAM_LOG_TARGETS }} - grafanaUrl: "http://localhost:8080/primary" - grafanaDashboardUrl: "/d/ddf75041-1e39-42af-aa46-361fe4c36e9e/ci-e2e-tests-logs" - grafanaBearerToken: ${{ secrets.GRAFANA_INTERNAL_URL_SHORTENER_TOKEN }} - network: "bsc_testnet" - httpEndpoints: ${{ secrets.QA_BSC_TESTNET_HTTP_URLS }} - wsEndpoints: ${{ secrets.QA_BSC_TESTNET_URLS }} - fundingKeys: ${{ secrets.QA_EVM_KEYS }} - - name: Download Tests Binary - uses: actions/download-artifact@c850b930e6ba138125429b7e5c93fc707a7f8427 # v4.1.4 - with: - name: tests - - name: Run Tests - uses: smartcontractkit/chainlink-github-actions/chainlink-testing-framework/run-tests-binary@75a9005952a9e905649cfb5a6971fd9429436acd # v2.3.25 - with: - test_command_to_run: ./tests -test.timeout 30m -test.count=1 -test.parallel=1 -test.run ${{ matrix.test }} - binary_name: tests - cl_repo: ${{ env.CHAINLINK_IMAGE }} - cl_image_tag: ${{ github.sha }} - aws_registries: ${{ secrets.QA_AWS_ACCOUNT_NUMBER }} - dockerhub_username: ${{ secrets.DOCKERHUB_READONLY_USERNAME }} - dockerhub_password: ${{ secrets.DOCKERHUB_READONLY_PASSWORD }} - artifacts_location: ./logs - token: ${{ secrets.GITHUB_TOKEN }} - cache_key_id: core-e2e-${{ env.MOD_CACHE_VERSION }} - cache_restore_only: "true" - QA_AWS_REGION: ${{ secrets.QA_AWS_REGION }} - QA_AWS_ROLE_TO_ASSUME: ${{ secrets.QA_AWS_ROLE_TO_ASSUME }} - QA_KUBECONFIG: ${{ secrets.QA_KUBECONFIG }} - - name: Print failed test summary - if: always() - uses: smartcontractkit/chainlink-github-actions/chainlink-testing-framework/show-test-summary@75a9005952a9e905649cfb5a6971fd9429436acd # v2.3.25 - with: - test_directory: "./" - - optimism-sepolia-smoke-tests: - environment: integration - if: ${{ github.event.inputs.network == 'All' || github.event.inputs.network == 'Optimism Sepolia' }} - permissions: - checks: write - pull-requests: write - id-token: write - contents: read - needs: [build-chainlink, build-tests] - strategy: - max-parallel: 1 - fail-fast: false - matrix: - include: # https://docs.github.com/en/actions/using-jobs/using-a-matrix-for-your-jobs#example-adding-configurations - - product: OCR - test: TestOCRBasic - - product: Automation Conditional - test: TestAutomationBasic/registry_2_1_conditional - - product: Automation Log Trigger - test: TestAutomationBasic/registry_2_1_logtrigger - name: Optimism Sepolia ${{ matrix.product }} Tests - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633 # v4.1.2 - with: - fetch-depth: 0 - - name: Setup GAP for Grafana - uses: smartcontractkit/.github/actions/setup-gap@d316f66b2990ea4daa479daa3de6fc92b00f863e # setup-gap@0.3.2 - with: - # aws inputs - aws-region: ${{ secrets.AWS_REGION }} - aws-role-arn: ${{ secrets.AWS_OIDC_IAM_ROLE_VALIDATION_PROD_ARN }} - api-gateway-host: ${{ secrets.AWS_API_GW_HOST_GRAFANA }} - # other inputs - duplicate-authorization-header: "true" - - name: Prepare Base64 TOML override - uses: ./.github/actions/setup-create-base64-config-live-testnets - with: - runId: ${{ github.run_id }} - testLogCollect: ${{ vars.TEST_LOG_COLLECT }} - chainlinkImage: ${{ env.CHAINLINK_IMAGE }} - chainlinkVersion: ${{ github.sha }} - pyroscopeServer: ${{ matrix.product.pyroscope_env == '' && '' || !startsWith(github.ref, 'refs/tags/') && '' || secrets.QA_PYROSCOPE_INSTANCE }} # Avoid sending blank envs https://github.com/orgs/community/discussions/25725 - pyroscopeEnvironment: ci-smoke-${{ matrix.product }}-optimism-sepolia - pyroscopeKey: ${{ secrets.QA_PYROSCOPE_KEY }} - lokiEndpoint: ${{ secrets.LOKI_URL }} - lokiTenantId: ${{ vars.LOKI_TENANT_ID }} - lokiBasicAuth: ${{ secrets.LOKI_BASIC_AUTH }} - logstreamLogTargets: ${{ vars.LOGSTREAM_LOG_TARGETS }} - grafanaUrl: "http://localhost:8080/primary" - grafanaDashboardUrl: "/d/ddf75041-1e39-42af-aa46-361fe4c36e9e/ci-e2e-tests-logs" - grafanaBearerToken: ${{ secrets.GRAFANA_INTERNAL_URL_SHORTENER_TOKEN }} - network: "optimism_sepolia" - httpEndpoints: ${{ secrets.QA_OPTIMISM_SEPOLIA_HTTP_URLS }} - wsEndpoints: ${{ secrets.QA_OPTIMISM_SEPOLIA_URLS }} - fundingKeys: ${{ secrets.QA_EVM_KEYS }} - - name: Download Tests Binary - uses: actions/download-artifact@c850b930e6ba138125429b7e5c93fc707a7f8427 # v4.1.4 - with: - name: tests - - name: Run Tests - uses: smartcontractkit/chainlink-github-actions/chainlink-testing-framework/run-tests-binary@75a9005952a9e905649cfb5a6971fd9429436acd # v2.3.25 - with: - test_command_to_run: ./tests -test.timeout 30m -test.count=1 -test.parallel=1 -test.run ${{ matrix.test }} - binary_name: tests - cl_repo: ${{ env.CHAINLINK_IMAGE }} - cl_image_tag: ${{ github.sha }} - aws_registries: ${{ secrets.QA_AWS_ACCOUNT_NUMBER }} - dockerhub_username: ${{ secrets.DOCKERHUB_READONLY_USERNAME }} - dockerhub_password: ${{ secrets.DOCKERHUB_READONLY_PASSWORD }} - artifacts_location: ./logs - token: ${{ secrets.GITHUB_TOKEN }} - cache_key_id: core-e2e-${{ env.MOD_CACHE_VERSION }} - cache_restore_only: "true" - QA_AWS_REGION: ${{ secrets.QA_AWS_REGION }} - QA_AWS_ROLE_TO_ASSUME: ${{ secrets.QA_AWS_ROLE_TO_ASSUME }} - QA_KUBECONFIG: ${{ secrets.QA_KUBECONFIG }} - - name: Print failed test summary - if: always() - uses: smartcontractkit/chainlink-github-actions/chainlink-testing-framework/show-test-summary@75a9005952a9e905649cfb5a6971fd9429436acd # v2.3.25 - with: - test_directory: "./" - - arbitrum-sepolia-smoke-tests: - environment: integration - if: ${{ github.event.inputs.network == 'All' || github.event.inputs.network == 'Arbitrum Sepolia' }} - permissions: - checks: write - pull-requests: write - id-token: write - contents: read - needs: [build-chainlink, build-tests] - strategy: - max-parallel: 1 - fail-fast: false - matrix: - include: # https://docs.github.com/en/actions/using-jobs/using-a-matrix-for-your-jobs#example-adding-configurations - - product: OCR - test: TestOCRBasic - - product: Automation Conditional - test: TestAutomationBasic/registry_2_1_conditional - - product: Automation Log Trigger - test: TestAutomationBasic/registry_2_1_logtrigger - name: Arbitrum Sepolia ${{ matrix.product }} Tests - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633 # v4.1.2 - with: - fetch-depth: 0 - - name: Setup GAP for Grafana - uses: smartcontractkit/.github/actions/setup-gap@d316f66b2990ea4daa479daa3de6fc92b00f863e # setup-gap@0.3.2 - with: - # aws inputs - aws-region: ${{ secrets.AWS_REGION }} - aws-role-arn: ${{ secrets.AWS_OIDC_IAM_ROLE_VALIDATION_PROD_ARN }} - api-gateway-host: ${{ secrets.AWS_API_GW_HOST_GRAFANA }} - # other inputs - duplicate-authorization-header: "true" - - name: Prepare Base64 TOML override - uses: ./.github/actions/setup-create-base64-config-live-testnets - with: - runId: ${{ github.run_id }} - testLogCollect: ${{ vars.TEST_LOG_COLLECT }} - chainlinkImage: ${{ env.CHAINLINK_IMAGE }} - chainlinkVersion: ${{ github.sha }} - pyroscopeServer: ${{ matrix.product.pyroscope_env == '' && '' || !startsWith(github.ref, 'refs/tags/') && '' || secrets.QA_PYROSCOPE_INSTANCE }} # Avoid sending blank envs https://github.com/orgs/community/discussions/25725 - pyroscopeEnvironment: ci-smoke-${{ matrix.product }}-arbitrum-sepolia - pyroscopeKey: ${{ secrets.QA_PYROSCOPE_KEY }} - lokiEndpoint: ${{ secrets.LOKI_URL }} - lokiTenantId: ${{ vars.LOKI_TENANT_ID }} - lokiBasicAuth: ${{ secrets.LOKI_BASIC_AUTH }} - logstreamLogTargets: ${{ vars.LOGSTREAM_LOG_TARGETS }} - grafanaUrl: "http://localhost:8080/primary" - grafanaDashboardUrl: "/d/ddf75041-1e39-42af-aa46-361fe4c36e9e/ci-e2e-tests-logs" - grafanaBearerToken: ${{ secrets.GRAFANA_INTERNAL_URL_SHORTENER_TOKEN }} - network: "arbitrum_sepolia" - httpEndpoints: ${{ secrets.QA_ARBITRUM_SEPOLIA_HTTP_URLS }} - wsEndpoints: ${{ secrets.QA_ARBITRUM_SEPOLIA_URLS }} - fundingKeys: ${{ secrets.QA_EVM_KEYS }} - - name: Download Tests Binary - uses: actions/download-artifact@c850b930e6ba138125429b7e5c93fc707a7f8427 # v4.1.4 - with: - name: tests - - name: Run Tests - uses: smartcontractkit/chainlink-github-actions/chainlink-testing-framework/run-tests-binary@75a9005952a9e905649cfb5a6971fd9429436acd # v2.3.25 - with: - test_command_to_run: ./tests -test.timeout 30m -test.count=1 -test.parallel=1 -test.run ${{ matrix.test }} - binary_name: tests - cl_repo: ${{ env.CHAINLINK_IMAGE }} - cl_image_tag: ${{ github.sha }} - aws_registries: ${{ secrets.QA_AWS_ACCOUNT_NUMBER }} - dockerhub_username: ${{ secrets.DOCKERHUB_READONLY_USERNAME }} - dockerhub_password: ${{ secrets.DOCKERHUB_READONLY_PASSWORD }} - artifacts_location: ./logs - token: ${{ secrets.GITHUB_TOKEN }} - cache_key_id: core-e2e-${{ env.MOD_CACHE_VERSION }} - cache_restore_only: "true" - QA_AWS_REGION: ${{ secrets.QA_AWS_REGION }} - QA_AWS_ROLE_TO_ASSUME: ${{ secrets.QA_AWS_ROLE_TO_ASSUME }} - QA_KUBECONFIG: ${{ secrets.QA_KUBECONFIG }} - - name: Print failed test summary - if: always() - uses: smartcontractkit/chainlink-github-actions/chainlink-testing-framework/show-test-summary@75a9005952a9e905649cfb5a6971fd9429436acd # v2.3.25 - with: - test_directory: "./" - - base-sepolia-smoke-tests: - environment: integration - if: ${{ github.event.inputs.network == 'All' || github.event.inputs.network == 'Base Sepolia' }} - permissions: - checks: write - pull-requests: write - id-token: write - contents: read - needs: [build-chainlink, build-tests] - strategy: - max-parallel: 1 - fail-fast: false - matrix: - include: # https://docs.github.com/en/actions/using-jobs/using-a-matrix-for-your-jobs#example-adding-configurations - - product: OCR - test: TestOCRBasic - name: Base Sepolia ${{ matrix.product }} Tests - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633 # v4.1.2 - with: - fetch-depth: 0 - - name: Setup GAP for Grafana - uses: smartcontractkit/.github/actions/setup-gap@d316f66b2990ea4daa479daa3de6fc92b00f863e # setup-gap@0.3.2 - with: - # aws inputs - aws-region: ${{ secrets.AWS_REGION }} - aws-role-arn: ${{ secrets.AWS_OIDC_IAM_ROLE_VALIDATION_PROD_ARN }} - api-gateway-host: ${{ secrets.AWS_API_GW_HOST_GRAFANA }} - # other inputs - duplicate-authorization-header: "true" - - name: Prepare Base64 TOML override - uses: ./.github/actions/setup-create-base64-config-live-testnets - with: - runId: ${{ github.run_id }} - testLogCollect: ${{ vars.TEST_LOG_COLLECT }} - chainlinkImage: ${{ env.CHAINLINK_IMAGE }} - chainlinkVersion: ${{ github.sha }} - pyroscopeServer: ${{ matrix.product.pyroscope_env == '' && '' || !startsWith(github.ref, 'refs/tags/') && '' || secrets.QA_PYROSCOPE_INSTANCE }} # Avoid sending blank envs https://github.com/orgs/community/discussions/25725 - pyroscopeEnvironment: ci-smoke-${{ matrix.product }}-base-sepolia - pyroscopeKey: ${{ secrets.QA_PYROSCOPE_KEY }} - lokiEndpoint: ${{ secrets.LOKI_URL }} - lokiTenantId: ${{ vars.LOKI_TENANT_ID }} - lokiBasicAuth: ${{ secrets.LOKI_BASIC_AUTH }} - logstreamLogTargets: ${{ vars.LOGSTREAM_LOG_TARGETS }} - grafanaUrl: "http://localhost:8080/primary" - grafanaDashboardUrl: "/d/ddf75041-1e39-42af-aa46-361fe4c36e9e/ci-e2e-tests-logs" - grafanaBearerToken: ${{ secrets.GRAFANA_INTERNAL_URL_SHORTENER_TOKEN }} - network: "base_sepolia" - httpEndpoints: ${{ secrets.QA_BASE_SEPOLIA_HTTP_URLS }} - wsEndpoints: ${{ secrets.QA_BASE_SEPOLIA_URLS }} - fundingKeys: ${{ secrets.QA_EVM_KEYS }} - - name: Download Tests Binary - uses: actions/download-artifact@c850b930e6ba138125429b7e5c93fc707a7f8427 # v4.1.4 - with: - name: tests - - name: Run Tests - uses: smartcontractkit/chainlink-github-actions/chainlink-testing-framework/run-tests-binary@75a9005952a9e905649cfb5a6971fd9429436acd # v2.3.25 - with: - test_command_to_run: ./tests -test.timeout 30m -test.count=1 -test.parallel=1 -test.run ${{ matrix.test }} - binary_name: tests - cl_repo: ${{ env.CHAINLINK_IMAGE }} - cl_image_tag: ${{ github.sha }} - aws_registries: ${{ secrets.QA_AWS_ACCOUNT_NUMBER }} - dockerhub_username: ${{ secrets.DOCKERHUB_READONLY_USERNAME }} - dockerhub_password: ${{ secrets.DOCKERHUB_READONLY_PASSWORD }} - artifacts_location: ./logs - token: ${{ secrets.GITHUB_TOKEN }} - cache_key_id: core-e2e-${{ env.MOD_CACHE_VERSION }} - cache_restore_only: "true" - QA_AWS_REGION: ${{ secrets.QA_AWS_REGION }} - QA_AWS_ROLE_TO_ASSUME: ${{ secrets.QA_AWS_ROLE_TO_ASSUME }} - QA_KUBECONFIG: ${{ secrets.QA_KUBECONFIG }} - - name: Print failed test summary - if: always() - uses: smartcontractkit/chainlink-github-actions/chainlink-testing-framework/show-test-summary@75a9005952a9e905649cfb5a6971fd9429436acd # v2.3.25 - with: - test_directory: "./" - - polygon-mumbai-smoke-tests: - environment: integration - if: ${{ github.event.inputs.network == 'All' || github.event.inputs.network == 'Polygon Mumbai' }} - permissions: - checks: write - pull-requests: write - id-token: write - contents: read - needs: [build-chainlink, build-tests] - strategy: - max-parallel: 1 - fail-fast: false - matrix: - include: # https://docs.github.com/en/actions/using-jobs/using-a-matrix-for-your-jobs#example-adding-configurations - - product: OCR - test: TestOCRBasic - - product: Automation Conditional - test: TestAutomationBasic/registry_2_1_conditional - - product: Automation Log Trigger - test: TestAutomationBasic/registry_2_1_logtrigger - name: Polygon Mumbai ${{ matrix.product }} Tests - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633 # v4.1.2 - with: - fetch-depth: 0 - - name: Setup GAP for Grafana - uses: smartcontractkit/.github/actions/setup-gap@d316f66b2990ea4daa479daa3de6fc92b00f863e # setup-gap@0.3.2 - with: - # aws inputs - aws-region: ${{ secrets.AWS_REGION }} - aws-role-arn: ${{ secrets.AWS_OIDC_IAM_ROLE_VALIDATION_PROD_ARN }} - api-gateway-host: ${{ secrets.AWS_API_GW_HOST_GRAFANA }} - # other inputs - duplicate-authorization-header: "true" - - name: Prepare Base64 TOML override - uses: ./.github/actions/setup-create-base64-config-live-testnets - with: - runId: ${{ github.run_id }} - testLogCollect: ${{ vars.TEST_LOG_COLLECT }} - chainlinkImage: ${{ env.CHAINLINK_IMAGE }} - chainlinkVersion: ${{ github.sha }} - pyroscopeServer: ${{ matrix.product.pyroscope_env == '' && '' || !startsWith(github.ref, 'refs/tags/') && '' || secrets.QA_PYROSCOPE_INSTANCE }} # Avoid sending blank envs https://github.com/orgs/community/discussions/25725 - pyroscopeEnvironment: ci-smoke-${{ matrix.product }}-polygon-mumbai - pyroscopeKey: ${{ secrets.QA_PYROSCOPE_KEY }} - lokiEndpoint: ${{ secrets.LOKI_URL }} - lokiTenantId: ${{ vars.LOKI_TENANT_ID }} - lokiBasicAuth: ${{ secrets.LOKI_BASIC_AUTH }} - logstreamLogTargets: ${{ vars.LOGSTREAM_LOG_TARGETS }} - grafanaUrl: "http://localhost:8080/primary" - grafanaDashboardUrl: "/d/ddf75041-1e39-42af-aa46-361fe4c36e9e/ci-e2e-tests-logs" - grafanaBearerToken: ${{ secrets.GRAFANA_INTERNAL_URL_SHORTENER_TOKEN }} - network: "polygon_mumbai" - httpEndpoints: ${{ secrets.QA_POLYGON_MUMBAI_HTTP_URLS }} - wsEndpoints: ${{ secrets.QA_POLYGON_MUMBAI_URLS }} - fundingKeys: ${{ secrets.QA_EVM_KEYS }} - - name: Download Tests Binary - uses: actions/download-artifact@c850b930e6ba138125429b7e5c93fc707a7f8427 # v4.1.4 - with: - name: tests - - name: Run Tests - uses: smartcontractkit/chainlink-github-actions/chainlink-testing-framework/run-tests-binary@75a9005952a9e905649cfb5a6971fd9429436acd # v2.3.25 - with: - test_command_to_run: ./tests -test.timeout 30m -test.count=1 -test.parallel=1 -test.run ${{ matrix.test }} - binary_name: tests - cl_repo: ${{ env.CHAINLINK_IMAGE }} - cl_image_tag: ${{ github.sha }} - aws_registries: ${{ secrets.QA_AWS_ACCOUNT_NUMBER }} - dockerhub_username: ${{ secrets.DOCKERHUB_READONLY_USERNAME }} - dockerhub_password: ${{ secrets.DOCKERHUB_READONLY_PASSWORD }} - artifacts_location: ./logs - token: ${{ secrets.GITHUB_TOKEN }} - cache_key_id: core-e2e-${{ env.MOD_CACHE_VERSION }} - cache_restore_only: "true" - QA_AWS_REGION: ${{ secrets.QA_AWS_REGION }} - QA_AWS_ROLE_TO_ASSUME: ${{ secrets.QA_AWS_ROLE_TO_ASSUME }} - QA_KUBECONFIG: ${{ secrets.QA_KUBECONFIG }} - - name: Print failed test summary - if: always() - uses: smartcontractkit/chainlink-github-actions/chainlink-testing-framework/show-test-summary@75a9005952a9e905649cfb5a6971fd9429436acd # v2.3.25 - with: - test_directory: "./" - - avalanche-fuji-smoke-tests: - environment: integration - if: ${{ github.event.inputs.network == 'All' || github.event.inputs.network == 'Avalanche Fuji' }} - permissions: - checks: write - pull-requests: write - id-token: write - contents: read - needs: [build-chainlink, build-tests] - strategy: - max-parallel: 1 - fail-fast: false - matrix: - include: # https://docs.github.com/en/actions/using-jobs/using-a-matrix-for-your-jobs#example-adding-configurations - - product: OCR - test: TestOCRBasic - - product: Automation Conditional - test: TestAutomationBasic/registry_2_1_conditional - - product: Automation Log Trigger - test: TestAutomationBasic/registry_2_1_logtrigger - name: Avalanche Fuji ${{ matrix.product }} Tests - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633 # v4.1.2 - with: - fetch-depth: 0 - - name: Setup GAP for Grafana - uses: smartcontractkit/.github/actions/setup-gap@d316f66b2990ea4daa479daa3de6fc92b00f863e # setup-gap@0.3.2 - with: - # aws inputs - aws-region: ${{ secrets.AWS_REGION }} - aws-role-arn: ${{ secrets.AWS_OIDC_IAM_ROLE_VALIDATION_PROD_ARN }} - api-gateway-host: ${{ secrets.AWS_API_GW_HOST_GRAFANA }} - # other inputs - duplicate-authorization-header: "true" - - name: Prepare Base64 TOML override - uses: ./.github/actions/setup-create-base64-config-live-testnets - with: - runId: ${{ github.run_id }} - testLogCollect: ${{ vars.TEST_LOG_COLLECT }} - chainlinkImage: ${{ env.CHAINLINK_IMAGE }} - chainlinkVersion: ${{ github.sha }} - pyroscopeServer: ${{ matrix.product.pyroscope_env == '' && '' || !startsWith(github.ref, 'refs/tags/') && '' || secrets.QA_PYROSCOPE_INSTANCE }} # Avoid sending blank envs https://github.com/orgs/community/discussions/25725 - pyroscopeEnvironment: ci-smoke-${{ matrix.product }}-avalanche-fuji - pyroscopeKey: ${{ secrets.QA_PYROSCOPE_KEY }} - lokiEndpoint: ${{ secrets.LOKI_URL }} - lokiTenantId: ${{ vars.LOKI_TENANT_ID }} - lokiBasicAuth: ${{ secrets.LOKI_BASIC_AUTH }} - logstreamLogTargets: ${{ vars.LOGSTREAM_LOG_TARGETS }} - grafanaUrl: "http://localhost:8080/primary" - grafanaDashboardUrl: "/d/ddf75041-1e39-42af-aa46-361fe4c36e9e/ci-e2e-tests-logs" - grafanaBearerToken: ${{ secrets.GRAFANA_INTERNAL_URL_SHORTENER_TOKEN }} - network: "avalanche_fuji" - httpEndpoints: ${{ secrets.QA_AVALANCHE_FUJI_HTTP_URLS }} - wsEndpoints: ${{ secrets.QA_AVALANCHE_FUJI_URLS }} - fundingKeys: ${{ secrets.QA_EVM_KEYS }} - - name: Download Tests Binary - uses: actions/download-artifact@c850b930e6ba138125429b7e5c93fc707a7f8427 # v4.1.4 - with: - name: tests - - name: Run Tests - uses: smartcontractkit/chainlink-github-actions/chainlink-testing-framework/run-tests-binary@75a9005952a9e905649cfb5a6971fd9429436acd # v2.3.25 - with: - test_command_to_run: ./tests -test.timeout 30m -test.count=1 -test.parallel=1 -test.run ${{ matrix.test }} - binary_name: tests - cl_repo: ${{ env.CHAINLINK_IMAGE }} - cl_image_tag: ${{ github.sha }} - aws_registries: ${{ secrets.QA_AWS_ACCOUNT_NUMBER }} - dockerhub_username: ${{ secrets.DOCKERHUB_READONLY_USERNAME }} - dockerhub_password: ${{ secrets.DOCKERHUB_READONLY_PASSWORD }} - artifacts_location: ./logs - token: ${{ secrets.GITHUB_TOKEN }} - cache_key_id: core-e2e-${{ env.MOD_CACHE_VERSION }} - cache_restore_only: "true" - QA_AWS_REGION: ${{ secrets.QA_AWS_REGION }} - QA_AWS_ROLE_TO_ASSUME: ${{ secrets.QA_AWS_ROLE_TO_ASSUME }} - QA_KUBECONFIG: ${{ secrets.QA_KUBECONFIG }} - - name: Print failed test summary - if: always() - uses: smartcontractkit/chainlink-github-actions/chainlink-testing-framework/show-test-summary@75a9005952a9e905649cfb5a6971fd9429436acd # v2.3.25 - with: - test_directory: "./" - - fantom-testnet-smoke-tests: - environment: integration - if: ${{ github.event.inputs.network == 'All' || github.event.inputs.network == 'Fantom Testnet' }} - permissions: - checks: write - pull-requests: write - id-token: write - contents: read - needs: [build-chainlink, build-tests] - strategy: - max-parallel: 1 - fail-fast: false - matrix: - include: # https://docs.github.com/en/actions/using-jobs/using-a-matrix-for-your-jobs#example-adding-configurations - - product: OCR - test: TestOCRBasic - - product: Automation Conditional - test: TestAutomationBasic/registry_2_1_conditional - - product: Automation Log Trigger - test: TestAutomationBasic/registry_2_1_logtrigger - name: Fantom Testnet ${{ matrix.product }} Tests - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633 # v4.1.2 - with: - fetch-depth: 0 - - name: Setup GAP for Grafana - uses: smartcontractkit/.github/actions/setup-gap@d316f66b2990ea4daa479daa3de6fc92b00f863e # setup-gap@0.3.2 - with: - # aws inputs - aws-region: ${{ secrets.AWS_REGION }} - aws-role-arn: ${{ secrets.AWS_OIDC_IAM_ROLE_VALIDATION_PROD_ARN }} - api-gateway-host: ${{ secrets.AWS_API_GW_HOST_GRAFANA }} - # other inputs - duplicate-authorization-header: "true" - - name: Prepare Base64 TOML override - uses: ./.github/actions/setup-create-base64-config-live-testnets - with: - runId: ${{ github.run_id }} - testLogCollect: ${{ vars.TEST_LOG_COLLECT }} - chainlinkImage: ${{ env.CHAINLINK_IMAGE }} - chainlinkVersion: ${{ github.sha }} - pyroscopeServer: ${{ matrix.product.pyroscope_env == '' && '' || !startsWith(github.ref, 'refs/tags/') && '' || secrets.QA_PYROSCOPE_INSTANCE }} # Avoid sending blank envs https://github.com/orgs/community/discussions/25725 - pyroscopeEnvironment: ci-smoke-${{ matrix.product }}-fantom-testnet - pyroscopeKey: ${{ secrets.QA_PYROSCOPE_KEY }} - lokiEndpoint: ${{ secrets.LOKI_URL }} - lokiTenantId: ${{ vars.LOKI_TENANT_ID }} - lokiBasicAuth: ${{ secrets.LOKI_BASIC_AUTH }} - logstreamLogTargets: ${{ vars.LOGSTREAM_LOG_TARGETS }} - grafanaUrl: "http://localhost:8080/primary" - grafanaDashboardUrl: "/d/ddf75041-1e39-42af-aa46-361fe4c36e9e/ci-e2e-tests-logs" - grafanaBearerToken: ${{ secrets.GRAFANA_INTERNAL_URL_SHORTENER_TOKEN }} - network: "fantom_testnet" - httpEndpoints: ${{ secrets.QA_FANTOM_TESTNET_HTTP_URLS }} - wsEndpoints: ${{ secrets.QA_FANTOM_TESTNET_URLS }} - fundingKeys: ${{ secrets.QA_EVM_KEYS }} - - name: Download Tests Binary - uses: actions/download-artifact@c850b930e6ba138125429b7e5c93fc707a7f8427 # v4.1.4 - with: - name: tests - - name: Run Tests - uses: smartcontractkit/chainlink-github-actions/chainlink-testing-framework/run-tests-binary@75a9005952a9e905649cfb5a6971fd9429436acd # v2.3.25 - with: - test_command_to_run: ./tests -test.timeout 30m -test.count=1 -test.parallel=1 -test.run ${{ matrix.test }} - binary_name: tests - cl_repo: ${{ env.CHAINLINK_IMAGE }} - cl_image_tag: ${{ github.sha }} - aws_registries: ${{ secrets.QA_AWS_ACCOUNT_NUMBER }} - dockerhub_username: ${{ secrets.DOCKERHUB_READONLY_USERNAME }} - dockerhub_password: ${{ secrets.DOCKERHUB_READONLY_PASSWORD }} - artifacts_location: ./logs - token: ${{ secrets.GITHUB_TOKEN }} - cache_key_id: core-e2e-${{ env.MOD_CACHE_VERSION }} - cache_restore_only: "true" - QA_AWS_REGION: ${{ secrets.QA_AWS_REGION }} - QA_AWS_ROLE_TO_ASSUME: ${{ secrets.QA_AWS_ROLE_TO_ASSUME }} - QA_KUBECONFIG: ${{ secrets.QA_KUBECONFIG }} - - name: Print failed test summary - if: always() - uses: smartcontractkit/chainlink-github-actions/chainlink-testing-framework/show-test-summary@75a9005952a9e905649cfb5a6971fd9429436acd # v2.3.25 - with: - test_directory: "./" - - celo-alfajores-smoke-tests: - environment: integration - if: ${{ github.event.inputs.network == 'All' || github.event.inputs.network == 'Celo Alfajores' }} - permissions: - checks: write - pull-requests: write - id-token: write - contents: read - needs: [build-chainlink, build-tests] - strategy: - max-parallel: 1 - fail-fast: false - matrix: - include: # https://docs.github.com/en/actions/using-jobs/using-a-matrix-for-your-jobs#example-adding-configurations - - product: OCR - test: TestOCRBasic - name: Celo Alfajores ${{ matrix.product }} Tests - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633 # v4.1.2 - with: - fetch-depth: 0 - - name: Setup GAP for Grafana - uses: smartcontractkit/.github/actions/setup-gap@d316f66b2990ea4daa479daa3de6fc92b00f863e # setup-gap@0.3.2 - with: - # aws inputs - aws-region: ${{ secrets.AWS_REGION }} - aws-role-arn: ${{ secrets.AWS_OIDC_IAM_ROLE_VALIDATION_PROD_ARN }} - api-gateway-host: ${{ secrets.AWS_API_GW_HOST_GRAFANA }} - # other inputs - duplicate-authorization-header: "true" - - name: Prepare Base64 TOML override - uses: ./.github/actions/setup-create-base64-config-live-testnets - with: - runId: ${{ github.run_id }} - testLogCollect: ${{ vars.TEST_LOG_COLLECT }} - chainlinkImage: ${{ env.CHAINLINK_IMAGE }} - chainlinkVersion: ${{ github.sha }} - pyroscopeServer: ${{ matrix.product.pyroscope_env == '' && '' || !startsWith(github.ref, 'refs/tags/') && '' || secrets.QA_PYROSCOPE_INSTANCE }} # Avoid sending blank envs https://github.com/orgs/community/discussions/25725 - pyroscopeEnvironment: ci-smoke-${{ matrix.product }}-celo-alfajores - pyroscopeKey: ${{ secrets.QA_PYROSCOPE_KEY }} - lokiEndpoint: ${{ secrets.LOKI_URL }} - lokiTenantId: ${{ vars.LOKI_TENANT_ID }} - lokiBasicAuth: ${{ secrets.LOKI_BASIC_AUTH }} - logstreamLogTargets: ${{ vars.LOGSTREAM_LOG_TARGETS }} - grafanaUrl: "http://localhost:8080/primary" - grafanaDashboardUrl: "/d/ddf75041-1e39-42af-aa46-361fe4c36e9e/ci-e2e-tests-logs" - grafanaBearerToken: ${{ secrets.GRAFANA_INTERNAL_URL_SHORTENER_TOKEN }} - network: "celo_alfajores" - httpEndpoints: ${{ secrets.QA_CELO_ALFAJORES_HTTP_URLS }} - wsEndpoints: ${{ secrets.QA_CELO_ALFAJORES_URLS }} - fundingKeys: ${{ secrets.QA_EVM_KEYS }} - - name: Download Tests Binary - uses: actions/download-artifact@c850b930e6ba138125429b7e5c93fc707a7f8427 # v4.1.4 - with: - name: tests - - name: Run Tests - uses: smartcontractkit/chainlink-github-actions/chainlink-testing-framework/run-tests-binary@75a9005952a9e905649cfb5a6971fd9429436acd # v2.3.25 - with: - test_command_to_run: ./tests -test.timeout 30m -test.count=1 -test.parallel=1 -test.run ${{ matrix.test }} - binary_name: tests - cl_repo: ${{ env.CHAINLINK_IMAGE }} - cl_image_tag: ${{ github.sha }} - aws_registries: ${{ secrets.QA_AWS_ACCOUNT_NUMBER }} - dockerhub_username: ${{ secrets.DOCKERHUB_READONLY_USERNAME }} - dockerhub_password: ${{ secrets.DOCKERHUB_READONLY_PASSWORD }} - artifacts_location: ./logs - token: ${{ secrets.GITHUB_TOKEN }} - cache_key_id: core-e2e-${{ env.MOD_CACHE_VERSION }} - cache_restore_only: "true" - QA_AWS_REGION: ${{ secrets.QA_AWS_REGION }} - QA_AWS_ROLE_TO_ASSUME: ${{ secrets.QA_AWS_ROLE_TO_ASSUME }} - QA_KUBECONFIG: ${{ secrets.QA_KUBECONFIG }} - - name: Print failed test summary - if: always() - uses: smartcontractkit/chainlink-github-actions/chainlink-testing-framework/show-test-summary@75a9005952a9e905649cfb5a6971fd9429436acd # v2.3.25 - with: - test_directory: "./" - - scroll-sepolia-smoke-tests: - if: false - environment: integration - permissions: - checks: write - pull-requests: write - id-token: write - contents: read - needs: [build-chainlink, build-tests] - strategy: - max-parallel: 1 - fail-fast: false - matrix: - include: # https://docs.github.com/en/actions/using-jobs/using-a-matrix-for-your-jobs#example-adding-configurations - - product: OCR - test: TestOCRBasic - name: Scroll Sepolia ${{ matrix.product }} Tests - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633 # v4.1.2 - with: - fetch-depth: 0 - - name: Setup GAP for Grafana - uses: smartcontractkit/.github/actions/setup-gap@d316f66b2990ea4daa479daa3de6fc92b00f863e # setup-gap@0.3.2 - with: - # aws inputs - aws-region: ${{ secrets.AWS_REGION }} - aws-role-arn: ${{ secrets.AWS_OIDC_IAM_ROLE_VALIDATION_PROD_ARN }} - api-gateway-host: ${{ secrets.AWS_API_GW_HOST_GRAFANA }} - # other inputs - duplicate-authorization-header: "true" - - name: Prepare Base64 TOML override - uses: ./.github/actions/setup-create-base64-config-live-testnets - with: - runId: ${{ github.run_id }} - testLogCollect: ${{ vars.TEST_LOG_COLLECT }} - chainlinkImage: ${{ env.CHAINLINK_IMAGE }} - chainlinkVersion: ${{ github.sha }} - pyroscopeServer: ${{ matrix.product.pyroscope_env == '' && '' || !startsWith(github.ref, 'refs/tags/') && '' || secrets.QA_PYROSCOPE_INSTANCE }} # Avoid sending blank envs https://github.com/orgs/community/discussions/25725 - pyroscopeEnvironment: ci-smoke-${{ matrix.product }}-scroll-sepolia - pyroscopeKey: ${{ secrets.QA_PYROSCOPE_KEY }} - lokiEndpoint: ${{ secrets.LOKI_URL }} - lokiTenantId: ${{ vars.LOKI_TENANT_ID }} - lokiBasicAuth: ${{ secrets.LOKI_BASIC_AUTH }} - logstreamLogTargets: ${{ vars.LOGSTREAM_LOG_TARGETS }} - grafanaUrl: "http://localhost:8080/primary" - grafanaDashboardUrl: "/d/ddf75041-1e39-42af-aa46-361fe4c36e9e/ci-e2e-tests-logs" - grafanaBearerToken: ${{ secrets.GRAFANA_INTERNAL_URL_SHORTENER_TOKEN }} - network: "scroll_sepolia" - httpEndpoints: ${{ secrets.QA_SCROLL_SEPOLIA_HTTP_URLS }} - wsEndpoints: ${{ secrets.QA_SCROLL_SEPOLIA_URLS }} - fundingKeys: ${{ secrets.QA_EVM_KEYS }} - - name: Download Tests Binary - uses: actions/download-artifact@c850b930e6ba138125429b7e5c93fc707a7f8427 # v4.1.4 - with: - name: tests - - name: Run Tests - uses: smartcontractkit/chainlink-github-actions/chainlink-testing-framework/run-tests-binary@75a9005952a9e905649cfb5a6971fd9429436acd # v2.3.25 - with: - test_command_to_run: ./tests -test.timeout 30m -test.count=1 -test.parallel=1 -test.run ${{ matrix.test }} - binary_name: tests - cl_repo: ${{ env.CHAINLINK_IMAGE }} - cl_image_tag: ${{ github.sha }} - aws_registries: ${{ secrets.QA_AWS_ACCOUNT_NUMBER }} - dockerhub_username: ${{ secrets.DOCKERHUB_READONLY_USERNAME }} - dockerhub_password: ${{ secrets.DOCKERHUB_READONLY_PASSWORD }} - artifacts_location: ./logs - token: ${{ secrets.GITHUB_TOKEN }} - cache_key_id: core-e2e-${{ env.MOD_CACHE_VERSION }} - cache_restore_only: "true" - QA_AWS_REGION: ${{ secrets.QA_AWS_REGION }} - QA_AWS_ROLE_TO_ASSUME: ${{ secrets.QA_AWS_ROLE_TO_ASSUME }} - QA_KUBECONFIG: ${{ secrets.QA_KUBECONFIG }} - - name: Print failed test summary - if: always() - uses: smartcontractkit/chainlink-github-actions/chainlink-testing-framework/show-test-summary@75a9005952a9e905649cfb5a6971fd9429436acd # v2.3.25 - with: - test_directory: "./" - - linea-goerli-smoke-tests: - environment: integration - if: ${{ github.event.inputs.network == 'All' || github.event.inputs.network == 'Linea Goerli' }} - permissions: - checks: write - pull-requests: write - id-token: write - contents: read - needs: [build-chainlink, build-tests] - strategy: - max-parallel: 1 - fail-fast: false - matrix: - include: # https://docs.github.com/en/actions/using-jobs/using-a-matrix-for-your-jobs#example-adding-configurations - - product: OCR - test: TestOCRBasic - name: Linea Goerli ${{ matrix.product }} Tests - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633 # v4.1.2 - with: - fetch-depth: 0 - - name: Setup GAP for Grafana - uses: smartcontractkit/.github/actions/setup-gap@d316f66b2990ea4daa479daa3de6fc92b00f863e # setup-gap@0.3.2 - with: - # aws inputs - aws-region: ${{ secrets.AWS_REGION }} - aws-role-arn: ${{ secrets.AWS_OIDC_IAM_ROLE_VALIDATION_PROD_ARN }} - api-gateway-host: ${{ secrets.AWS_API_GW_HOST_GRAFANA }} - # other inputs - duplicate-authorization-header: "true" - - name: Prepare Base64 TOML override - uses: ./.github/actions/setup-create-base64-config-live-testnets - with: - runId: ${{ github.run_id }} - testLogCollect: ${{ vars.TEST_LOG_COLLECT }} - chainlinkImage: ${{ env.CHAINLINK_IMAGE }} - chainlinkVersion: ${{ github.sha }} - pyroscopeServer: ${{ matrix.product.pyroscope_env == '' && '' || !startsWith(github.ref, 'refs/tags/') && '' || secrets.QA_PYROSCOPE_INSTANCE }} # Avoid sending blank envs https://github.com/orgs/community/discussions/25725 - pyroscopeEnvironment: ci-smoke-${{ matrix.product }}-linea-goerli - pyroscopeKey: ${{ secrets.QA_PYROSCOPE_KEY }} - lokiEndpoint: ${{ secrets.LOKI_URL }} - lokiTenantId: ${{ vars.LOKI_TENANT_ID }} - lokiBasicAuth: ${{ secrets.LOKI_BASIC_AUTH }} - logstreamLogTargets: ${{ vars.LOGSTREAM_LOG_TARGETS }} - grafanaUrl: "http://localhost:8080/primary" - grafanaDashboardUrl: "/d/ddf75041-1e39-42af-aa46-361fe4c36e9e/ci-e2e-tests-logs" - grafanaBearerToken: ${{ secrets.GRAFANA_INTERNAL_URL_SHORTENER_TOKEN }} - network: "linea_goerli" - httpEndpoints: ${{ secrets.QA_LINEA_GOERLI_HTTP_URLS }} - wsEndpoints: ${{ secrets.QA_LINEA_GOERLI_URLS }} - fundingKeys: ${{ secrets.QA_EVM_KEYS }} - - name: Download Tests Binary - uses: actions/download-artifact@c850b930e6ba138125429b7e5c93fc707a7f8427 # v4.1.4 - with: - name: tests - - name: Run Tests - uses: smartcontractkit/chainlink-github-actions/chainlink-testing-framework/run-tests-binary@75a9005952a9e905649cfb5a6971fd9429436acd # v2.3.25 - with: - test_command_to_run: ./tests -test.timeout 30m -test.count=1 -test.parallel=1 -test.run ${{ matrix.test }} - binary_name: tests - cl_repo: ${{ env.CHAINLINK_IMAGE }} - cl_image_tag: ${{ github.sha }} - aws_registries: ${{ secrets.QA_AWS_ACCOUNT_NUMBER }} - dockerhub_username: ${{ secrets.DOCKERHUB_READONLY_USERNAME }} - dockerhub_password: ${{ secrets.DOCKERHUB_READONLY_PASSWORD }} - artifacts_location: ./logs - token: ${{ secrets.GITHUB_TOKEN }} - cache_key_id: core-e2e-${{ env.MOD_CACHE_VERSION }} - cache_restore_only: "true" - QA_AWS_REGION: ${{ secrets.QA_AWS_REGION }} - QA_AWS_ROLE_TO_ASSUME: ${{ secrets.QA_AWS_ROLE_TO_ASSUME }} - QA_KUBECONFIG: ${{ secrets.QA_KUBECONFIG }} - - name: Print failed test summary - if: always() - uses: smartcontractkit/chainlink-github-actions/chainlink-testing-framework/show-test-summary@75a9005952a9e905649cfb5a6971fd9429436acd # v2.3.25 - with: - test_directory: "./" diff --git a/.github/workflows/on-demand-ocr-soak-test.yml b/.github/workflows/on-demand-ocr-soak-test.yml index ffd4f6887a5..cdb0b5da01a 100644 --- a/.github/workflows/on-demand-ocr-soak-test.yml +++ b/.github/workflows/on-demand-ocr-soak-test.yml @@ -3,116 +3,66 @@ on: workflow_dispatch: inputs: testToRun: - description: Select a test to run + description: Select a test to run from .github/e2e-tests.yml required: true default: TestOCRSoak type: choice options: - - TestOCRv1Soak - - TestOCRv2Soak - - TestForwarderOCRv1Soak - - TestForwarderOCRv2Soak - - TestOCRSoak_GethReorgBelowFinality_FinalityTagDisabled - - TestOCRSoak_GethReorgBelowFinality_FinalityTagEnabled - - TestOCRSoak_GasSpike - - TestOCRSoak_ChangeBlockGasLimit - - TestOCRSoak_RPCDownForAllCLNodes - - TestOCRSoak_RPCDownForHalfCLNodes - base64Config: - description: base64-ed config + - soak/ocr_test.go:TestOCRv1Soak + - soak/ocr_test.go:TestOCRv2Soak + - soak/ocr_test.go:TestForwarderOCRv1Soak + - soak/ocr_test.go:TestForwarderOCRv2Soak + - soak/ocr_test.go:TestOCRSoak_GethReorgBelowFinality_FinalityTagDisabled + - soak/ocr_test.go:TestOCRSoak_GethReorgBelowFinality_FinalityTagEnabled + - soak/ocr_test.go:TestOCRSoak_GasSpike + - soak/ocr_test.go:TestOCRSoak_ChangeBlockGasLimit + - soak/ocr_test.go:TestOCRSoak_RPCDownForAllCLNodes + - soak/ocr_test.go:TestOCRSoak_RPCDownForHalfCLNodes + test_config_override_path: + description: Path to a test config file used to override the default test config + required: false + type: string + test_secrets_override_key: + description: 'Key to run tests with custom test secrets' + required: false + type: string + chainlink_version: + description: Chainlink image version to use + default: develop required: true type: string slackMemberID: description: Slack Member ID (Not your @) required: true - default: U01A2B2C3D4 - type: string - test_secrets_override_key: - description: 'Key to run tests with custom test secrets' - required: false - type: string + default: U01A2B2C3D4 jobs: - ocr_soak_test: - name: OCR Soak Test - environment: integration - runs-on: ubuntu-latest - permissions: - checks: write - pull-requests: write - id-token: write - contents: read - env: - CHAINLINK_ENV_USER: ${{ github.actor }} + run-e2e-tests-workflow: + name: Run E2E Tests + uses: ./.github/workflows/run-e2e-tests-reusable-workflow.yml + with: + test_ids: ${{ inputs.testToRun}} + test_config_override_path: ${{ inputs.test_config_override_path }} + chainlink_version: ${{ inputs.chainlink_version }} + SLACK_USER: ${{ inputs.slackMemberID }} + secrets: + QA_AWS_REGION: ${{ secrets.QA_AWS_REGION }} + QA_AWS_ROLE_TO_ASSUME: ${{ secrets.QA_AWS_ROLE_TO_ASSUME }} + QA_AWS_ACCOUNT_NUMBER: ${{ secrets.QA_AWS_ACCOUNT_NUMBER }} + QA_PYROSCOPE_INSTANCE: ${{ secrets.QA_PYROSCOPE_INSTANCE }} + QA_PYROSCOPE_KEY: ${{ secrets.QA_PYROSCOPE_KEY }} + QA_KUBECONFIG: ${{ secrets.QA_KUBECONFIG }} + GRAFANA_INTERNAL_TENANT_ID: ${{ secrets.GRAFANA_INTERNAL_TENANT_ID }} + GRAFANA_INTERNAL_BASIC_AUTH: ${{ secrets.GRAFANA_INTERNAL_BASIC_AUTH }} + GRAFANA_INTERNAL_HOST: ${{ secrets.GRAFANA_INTERNAL_HOST }} + GRAFANA_INTERNAL_URL_SHORTENER_TOKEN: ${{ secrets.GRAFANA_INTERNAL_URL_SHORTENER_TOKEN }} + LOKI_TENANT_ID: ${{ secrets.LOKI_TENANT_ID }} + LOKI_URL: ${{ secrets.LOKI_URL }} + LOKI_BASIC_AUTH: ${{ secrets.LOKI_BASIC_AUTH }} + GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} + AWS_REGION: ${{ secrets.QA_AWS_REGION }} + AWS_OIDC_IAM_ROLE_VALIDATION_PROD_ARN: ${{ secrets.AWS_OIDC_IAM_ROLE_VALIDATION_PROD_ARN }} + AWS_API_GW_HOST_GRAFANA: ${{ secrets.AWS_API_GW_HOST_GRAFANA }} + TEST_SECRETS_OVERRIDE_BASE64: ${{ secrets[inputs.test_secrets_override_key] }} SLACK_API_KEY: ${{ secrets.QA_SLACK_API_KEY }} SLACK_CHANNEL: ${{ secrets.QA_SLACK_CHANNEL }} - TEST_LOG_LEVEL: debug - REF_NAME: ${{ github.head_ref || github.ref_name }} - ENV_JOB_IMAGE_BASE: ${{ secrets.QA_AWS_ACCOUNT_NUMBER }}.dkr.ecr.${{ secrets.QA_AWS_REGION }}.amazonaws.com/chainlink-tests - steps: - - name: Collect Metrics - id: collect-gha-metrics - uses: smartcontractkit/push-gha-metrics-action@d9da21a2747016b3e13de58c7d4115a3d5c97935 # v3.0.1 - with: - id: on-demand-ocr-soak-test - org-id: ${{ secrets.GRAFANA_INTERNAL_TENANT_ID }} - basic-auth: ${{ secrets.GRAFANA_INTERNAL_BASIC_AUTH }} - hostname: ${{ secrets.GRAFANA_INTERNAL_HOST }} - this-job-name: ${{ inputs.network }} OCR Soak Test - continue-on-error: true - - name: Checkout the repo - uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633 # v4.1.2 - with: - ref: ${{ env.REF_NAME }} - - name: Get Slack config and mask base64 config - run: | - SLACK_USER=$(jq -r '.inputs.slackMemberID' $GITHUB_EVENT_PATH) - echo ::add-mask::$SLACK_USER - echo SLACK_USER=$SLACK_USER >> $GITHUB_ENV - - BASE64_CONFIG_OVERRIDE=$(jq -r '.inputs.base64Config' $GITHUB_EVENT_PATH) - echo ::add-mask::$BASE64_CONFIG_OVERRIDE - echo "BASE64_CONFIG_OVERRIDE=$BASE64_CONFIG_OVERRIDE" >> $GITHUB_ENV - - name: Parse base64 config - uses: ./.github/actions/setup-parse-base64-config - with: - base64Config: ${{ env.BASE64_CONFIG_OVERRIDE }} - - name: Setup Push Tag - shell: bash - run: | - echo "### chainlink image used for this test run :link:" >>$GITHUB_STEP_SUMMARY - echo "\`${{ env.CHAINLINK_IMAGE }}\`" >>$GITHUB_STEP_SUMMARY - echo "### chainlink-tests image tag for this test run :ship:" >>$GITHUB_STEP_SUMMARY - echo "\`${GITHUB_SHA}\`" >>$GITHUB_STEP_SUMMARY - echo "### Networks on which test was run" >>$GITHUB_STEP_SUMMARY - echo "\`${{ env.NETWORKS }}\`" >>$GITHUB_STEP_SUMMARY - - name: Build Image - id: build-test-image - uses: ./.github/actions/build-test-image - with: - QA_AWS_ROLE_TO_ASSUME: ${{ secrets.QA_AWS_ROLE_TO_ASSUME }} - QA_AWS_REGION: ${{ secrets.QA_AWS_REGION }} - QA_AWS_ACCOUNT_NUMBER: ${{ secrets.QA_AWS_ACCOUNT_NUMBER }} - - name: Run Tests - uses: smartcontractkit/chainlink-github-actions/chainlink-testing-framework/run-tests@94cb11f4bd545607a2f221c6685052b3abee723d # v2.3.32 - env: - DETACH_RUNNER: true - TEST_SUITE: soak - TEST_ARGS: -test.timeout 900h -test.memprofile memprofile.out -test.cpuprofile profile.out - ENV_JOB_IMAGE: ${{ steps.build-test-image.outputs.test_image }} - # We can comment these out when we have a stable soak test and aren't worried about resource consumption - TEST_UPLOAD_CPU_PROFILE: true - TEST_UPLOAD_MEM_PROFILE: true - with: - test_command_to_run: cd ./integration-tests && go test -v -count=1 -run ^${{ github.event.inputs.testToRun }}$ ./soak - test_download_vendor_packages_command: make gomod - test_secrets_override_base64: ${{ secrets[inputs.test_secrets_override_key] }} - test_config_override_base64: ${{ env.BASE64_CONFIG_OVERRIDE }} - cl_repo: ${{ env.CHAINLINK_IMAGE }} - cl_image_tag: ${{ env.CHAINLINK_VERSION }} - token: ${{ secrets.GITHUB_TOKEN }} - should_cleanup: false - go_mod_path: ./integration-tests/go.mod - QA_AWS_REGION: ${{ secrets.QA_AWS_REGION }} - QA_AWS_ROLE_TO_ASSUME: ${{ secrets.QA_AWS_ROLE_TO_ASSUME }} - QA_KUBECONFIG: ${{ secrets.QA_KUBECONFIG }} diff --git a/.github/workflows/on-demand-vrfv2-performance-test.yml b/.github/workflows/on-demand-vrfv2-performance-test.yml index ad8640ccfac..d5eefcc348f 100644 --- a/.github/workflows/on-demand-vrfv2-performance-test.yml +++ b/.github/workflows/on-demand-vrfv2-performance-test.yml @@ -85,6 +85,9 @@ jobs: GRAFANA_INTERNAL_BASIC_AUTH: ${{ secrets.GRAFANA_INTERNAL_BASIC_AUTH }} GRAFANA_INTERNAL_HOST: ${{ secrets.GRAFANA_INTERNAL_HOST }} GRAFANA_INTERNAL_URL_SHORTENER_TOKEN: ${{ secrets.GRAFANA_INTERNAL_URL_SHORTENER_TOKEN }} + LOKI_TENANT_ID: ${{ secrets.LOKI_TENANT_ID }} + LOKI_URL: ${{ secrets.LOKI_URL }} + LOKI_BASIC_AUTH: ${{ secrets.LOKI_BASIC_AUTH }} GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} AWS_REGION: ${{ secrets.QA_AWS_REGION }} AWS_OIDC_IAM_ROLE_VALIDATION_PROD_ARN: ${{ secrets.AWS_OIDC_IAM_ROLE_VALIDATION_PROD_ARN }} diff --git a/.github/workflows/on-demand-vrfv2-smoke-tests.yml b/.github/workflows/on-demand-vrfv2-smoke-tests.yml index ede71267b47..ea42a9014da 100644 --- a/.github/workflows/on-demand-vrfv2-smoke-tests.yml +++ b/.github/workflows/on-demand-vrfv2-smoke-tests.yml @@ -88,6 +88,9 @@ jobs: GRAFANA_INTERNAL_BASIC_AUTH: ${{ secrets.GRAFANA_INTERNAL_BASIC_AUTH }} GRAFANA_INTERNAL_HOST: ${{ secrets.GRAFANA_INTERNAL_HOST }} GRAFANA_INTERNAL_URL_SHORTENER_TOKEN: ${{ secrets.GRAFANA_INTERNAL_URL_SHORTENER_TOKEN }} + LOKI_TENANT_ID: ${{ secrets.LOKI_TENANT_ID }} + LOKI_URL: ${{ secrets.LOKI_URL }} + LOKI_BASIC_AUTH: ${{ secrets.LOKI_BASIC_AUTH }} GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} AWS_REGION: ${{ secrets.QA_AWS_REGION }} AWS_OIDC_IAM_ROLE_VALIDATION_PROD_ARN: ${{ secrets.AWS_OIDC_IAM_ROLE_VALIDATION_PROD_ARN }} diff --git a/.github/workflows/on-demand-vrfv2plus-performance-test.yml b/.github/workflows/on-demand-vrfv2plus-performance-test.yml index 0744a93b26e..f026086fff7 100644 --- a/.github/workflows/on-demand-vrfv2plus-performance-test.yml +++ b/.github/workflows/on-demand-vrfv2plus-performance-test.yml @@ -85,6 +85,9 @@ jobs: GRAFANA_INTERNAL_BASIC_AUTH: ${{ secrets.GRAFANA_INTERNAL_BASIC_AUTH }} GRAFANA_INTERNAL_HOST: ${{ secrets.GRAFANA_INTERNAL_HOST }} GRAFANA_INTERNAL_URL_SHORTENER_TOKEN: ${{ secrets.GRAFANA_INTERNAL_URL_SHORTENER_TOKEN }} + LOKI_TENANT_ID: ${{ secrets.LOKI_TENANT_ID }} + LOKI_URL: ${{ secrets.LOKI_URL }} + LOKI_BASIC_AUTH: ${{ secrets.LOKI_BASIC_AUTH }} GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} AWS_REGION: ${{ secrets.QA_AWS_REGION }} AWS_OIDC_IAM_ROLE_VALIDATION_PROD_ARN: ${{ secrets.AWS_OIDC_IAM_ROLE_VALIDATION_PROD_ARN }} diff --git a/.github/workflows/on-demand-vrfv2plus-smoke-tests.yml b/.github/workflows/on-demand-vrfv2plus-smoke-tests.yml index 5330edf294b..e1821336c63 100644 --- a/.github/workflows/on-demand-vrfv2plus-smoke-tests.yml +++ b/.github/workflows/on-demand-vrfv2plus-smoke-tests.yml @@ -88,6 +88,9 @@ jobs: GRAFANA_INTERNAL_BASIC_AUTH: ${{ secrets.GRAFANA_INTERNAL_BASIC_AUTH }} GRAFANA_INTERNAL_HOST: ${{ secrets.GRAFANA_INTERNAL_HOST }} GRAFANA_INTERNAL_URL_SHORTENER_TOKEN: ${{ secrets.GRAFANA_INTERNAL_URL_SHORTENER_TOKEN }} + LOKI_TENANT_ID: ${{ secrets.LOKI_TENANT_ID }} + LOKI_URL: ${{ secrets.LOKI_URL }} + LOKI_BASIC_AUTH: ${{ secrets.LOKI_BASIC_AUTH }} GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} AWS_REGION: ${{ secrets.QA_AWS_REGION }} AWS_OIDC_IAM_ROLE_VALIDATION_PROD_ARN: ${{ secrets.AWS_OIDC_IAM_ROLE_VALIDATION_PROD_ARN }} diff --git a/.github/workflows/run-e2e-tests-reusable-workflow.yml b/.github/workflows/run-e2e-tests-reusable-workflow.yml index 6eaa3f27988..8278828a38d 100644 --- a/.github/workflows/run-e2e-tests-reusable-workflow.yml +++ b/.github/workflows/run-e2e-tests-reusable-workflow.yml @@ -63,6 +63,11 @@ on: description: 'Use the existing remote runner version for k8s tests. Example: "d3bf5044af33e08be788a2df31c4a745cf69d787"' required: false type: string + test_image_suites: + description: 'Suites to build in the test image. Space separated' + required: false + type: string + default: chaos migration reorg smoke soak benchmark load require_chainlink_image_versions_in_qa_ecr: description: 'Check Chainlink image versions to be present in QA ECR. If not, build and push the image to QA ECR. Takes comma separated list of Chainlink image versions. Example: "5733cdcda9a9fc6da6343798b119b2ae136146cd,0b7d2c497a508efa5a827714780d908b7b8eda19"' required: false @@ -96,7 +101,7 @@ on: description: 'Number of days to retain the test log. Default is 3 days' required: false type: number - default: 3 + default: 5 test_log_level: description: 'Set the log level for the tests. Default is "debug"' required: false @@ -116,13 +121,21 @@ on: required: false type: boolean default: false + SLACK_CHANNEL: + description: 'SLACK_CHANNEL env used to send Slack notifications from test code' + required: false + type: string + SLACK_USER: + description: 'SLACK_USER env used to send Slack notifications from test code' + required: false + type: string outputs: test_results: description: 'Test results from all executed tests' value: ${{ jobs.after_tests.outputs.test_results }} secrets: TEST_SECRETS_OVERRIDE_BASE64: - required: false + required: false QA_AWS_REGION: required: true QA_AWS_ROLE_TO_ASSUME: @@ -135,6 +148,12 @@ on: required: true QA_KUBECONFIG: required: true + LOKI_TENANT_ID: + required: true + LOKI_URL: + required: true + LOKI_BASIC_AUTH: + required: true GRAFANA_INTERNAL_TENANT_ID: required: true GRAFANA_INTERNAL_BASIC_AUTH: @@ -166,16 +185,17 @@ on: env: CHAINLINK_IMAGE: ${{ secrets.QA_AWS_ACCOUNT_NUMBER }}.dkr.ecr.${{ secrets.QA_AWS_REGION }}.amazonaws.com/chainlink QA_CHAINLINK_IMAGE: ${{ secrets.QA_AWS_ACCOUNT_NUMBER }}.dkr.ecr.${{ secrets.QA_AWS_REGION }}.amazonaws.com/chainlink - DEFAULT_CHAINLINK_VERSION: ${{ inputs.chainlink_version || github.sha }} - DEFAULT_CHAINLINK_PLUGINS_VERSION: ${{ inputs.chainlink_version != '' && format('{0}-plugins', inputs.chainlink_version) || format('{0}-plugins', github.sha) }} + DEFAULT_CHAINLINK_VERSION: ${{ inputs.chainlink_version }} + DEFAULT_CHAINLINK_PLUGINS_VERSION: ${{ inputs.chainlink_version != '' && format('{0}-plugins', inputs.chainlink_version) }} + DEFAULT_CHAINLINK_UPGRADE_VERSION: ${{ inputs.chainlink_version }} CHAINLINK_ENV_USER: ${{ github.actor }} - CHAINLINK_COMMIT_SHA: ${{ inputs.evm-ref || github.sha }} - SELECTED_NETWORKS: SIMULATED + CHAINLINK_COMMIT_SHA: ${{ inputs.evm-ref }} MOD_CACHE_VERSION: 1 TEST_LOG_LEVEL: ${{ inputs.test_log_level }} METRICS_COLLECTION_ID: chainlink-e2e-tests SLACK_API_KEY: ${{ secrets.SLACK_API_KEY }} - SLACK_CHANNEL: ${{ secrets.SLACK_CHANNEL }} + SLACK_CHANNEL: ${{ inputs.slack_notification_after_tests_channel_id || inputs.SLACK_CHANNEL || secrets.SLACK_CHANNEL }} + SLACK_USER: ${{ inputs.SLACK_USER }} jobs: validate-inputs: @@ -239,7 +259,7 @@ jobs: path: core - name: Install citool shell: bash - run: go install github.com/smartcontractkit/chainlink-testing-framework/tools/citool@a49f2dff000fcf020ab9978b33e3726d7df5bf96 # v1.34.4 + run: go install github.com/smartcontractkit/chainlink-testing-framework/tools/citool@1d8d5b55bf6379b969dbcde99abc87faa5963ea1 # v1.34.4 - name: Run Check Tests Command run: | if ! citool check-tests ${{ github.workspace }}/integration-tests ${{ github.workspace }}/.github/e2e-tests.yml; then @@ -286,7 +306,7 @@ jobs: check-latest: true - name: Install citool shell: bash - run: go install github.com/smartcontractkit/chainlink-testing-framework/tools/citool@a49f2dff000fcf020ab9978b33e3726d7df5bf96 # v1.34.4 + run: go install github.com/smartcontractkit/chainlink-testing-framework/tools/citool@1d8d5b55bf6379b969dbcde99abc87faa5963ea1 # v1.34.4 - name: Install jq run: sudo apt-get install jq @@ -352,29 +372,6 @@ jobs: fi shell: bash - - name: Check if test config override is required for any test - shell: bash - run: | - # Parse the JSON to check for test_config_override_required in Docker matrix - DOCKER_TESTS_REQUIRING_CONFIG_OVERRIDE=$(echo '${{ steps.set-docker-matrix.outputs.matrix }}' | jq 'if .tests then .tests[] | select(has("test_config_override_required") and .test_config_override_required) | .id else empty end' -r) - # Parse the JSON to check for test_config_override_required in Kubernetes matrix - K8S_TESTS_REQUIRING_CONFIG_OVERRIDE=$(echo '${{ steps.set-k8s-runner-matrix.outputs.matrix }}' | jq 'if .tests then .tests[] | select(has("test_config_override_required") and .test_config_override_required) | .id else empty end' -r) - - # Determine if any tests require a configuration override - if [ ! -z "$DOCKER_TESTS_REQUIRING_CONFIG_OVERRIDE" ] || [ ! -z "$K8S_TESTS_REQUIRING_CONFIG_OVERRIDE" ]; then - echo "Tests in .github/e2e-tests.yml requiring test config override:" - if [ ! -z "$DOCKER_TESTS_REQUIRING_CONFIG_OVERRIDE" ]; then - echo $DOCKER_TESTS_REQUIRING_CONFIG_OVERRIDE - fi - if [ ! -z "$K8S_TESTS_REQUIRING_CONFIG_OVERRIDE" ]; then - echo $K8S_TESTS_REQUIRING_CONFIG_OVERRIDE - fi - echo "::error::Error: Some of the tests require a test config override. Please see workflow logs and set 'test_config_override_path' to run these tests." - exit 1 - else - echo "No tests require a configuration override. Proceeding without overrides." - fi - - name: Check if test secrets are required for any test shell: bash run: | @@ -514,6 +511,7 @@ jobs: echo "[${{ env.TEST_CONFIG_OVERRIDE_PATH }}]($GITHUB_SERVER_URL/$GITHUB_REPOSITORY/blob/${{ github.sha }}/${{ env.TEST_CONFIG_OVERRIDE_PATH }})" >> $GITHUB_STEP_SUMMARY - name: Show chainlink version in summary + if: ${{ matrix.tests.test_env_vars.E2E_TEST_CHAINLINK_VERSION || env.DEFAULT_CHAINLINK_VERSION }} run: | echo "### Chainlink version" >> $GITHUB_STEP_SUMMARY echo "${{ matrix.tests.test_env_vars.E2E_TEST_CHAINLINK_VERSION || env.DEFAULT_CHAINLINK_VERSION }}" >> $GITHUB_STEP_SUMMARY @@ -559,27 +557,26 @@ jobs: - name: Run tests id: run_tests - uses: smartcontractkit/chainlink-github-actions/chainlink-testing-framework/run-tests@4f377a6b1cc07f0eca82745782736b4908a1da30 # v2.3.32 + uses: smartcontractkit/.github/actions/ctf-run-tests@b8731364b119e88983e94b0c4da87fc27ddb41b8 # ctf-run-tests@0.0.0 env: DETACH_RUNNER: true E2E_TEST_CHAINLINK_VERSION: ${{ matrix.tests.test_env_vars.E2E_TEST_CHAINLINK_VERSION || env.DEFAULT_CHAINLINK_VERSION }} E2E_TEST_CHAINLINK_UPGRADE_VERSION: ${{ matrix.tests.test_env_vars.E2E_TEST_CHAINLINK_UPGRADE_VERSION }} E2E_TEST_CHAINLINK_POSTGRES_VERSION: ${{ matrix.tests.test_env_vars.E2E_TEST_CHAINLINK_POSTGRES_VERSION }} - E2E_TEST_SELECTED_NETWORK: ${{ matrix.tests.test_env_vars.E2E_TEST_SELECTED_NETWORK || env.SELECTED_NETWORKS }} + E2E_TEST_SELECTED_NETWORK: ${{ matrix.tests.test_env_vars.E2E_TEST_SELECTED_NETWORK }} E2E_TEST_LOGGING_RUN_ID: ${{ github.run_id }} E2E_TEST_LOG_STREAM_LOG_TARGETS: ${{ vars.LOGSTREAM_LOG_TARGETS }} E2E_TEST_LOG_COLLECT: ${{ vars.TEST_LOG_COLLECT }} - E2E_TEST_LOKI_TENANT_ID: ${{ secrets.GRAFANA_INTERNAL_TENANT_ID }} - E2E_TEST_LOKI_ENDPOINT: https://${{ secrets.GRAFANA_INTERNAL_HOST }}/loki/api/v1/push - E2E_TEST_LOKI_BASIC_AUTH: ${{ secrets.GRAFANA_INTERNAL_BASIC_AUTH }} - E2E_TEST_GRAFANA_DASHBOARD_URL: ${{ matrix.tests.test_env_vars.E2E_TEST_GRAFANA_DASHBOARD_URL }} + E2E_TEST_LOKI_TENANT_ID: ${{ secrets.LOKI_TENANT_ID }} + E2E_TEST_LOKI_ENDPOINT: ${{ secrets.LOKI_URL }} + E2E_TEST_LOKI_BASIC_AUTH: ${{ secrets.LOKI_BASIC_AUTH }} + E2E_TEST_GRAFANA_DASHBOARD_URL: ${{ matrix.tests.test_env_vars.E2E_TEST_GRAFANA_DASHBOARD_URL }} E2E_TEST_GRAFANA_BEARER_TOKEN: ${{ secrets.GRAFANA_INTERNAL_URL_SHORTENER_TOKEN }} E2E_TEST_PYROSCOPE_ENVIRONMENT: ${{ matrix.tests.pyroscope_env }} E2E_TEST_PYROSCOPE_SERVER_URL: ${{ matrix.tests.pyroscope_env != '' && secrets.QA_PYROSCOPE_INSTANCE || '' }} E2E_TEST_PYROSCOPE_KEY: ${{ matrix.tests.pyroscope_env != '' && secrets.QA_PYROSCOPE_KEY || '' }} - E2E_TEST_PYROSCOPE_ENABLED: ${{ matrix.tests.pyroscope_env != '' && 'true' || '' }} with: - test_command_to_run: ${{ matrix.tests.test_cmd }} 2>&1 | tee /tmp/gotest.log | gotestloghelper -ci -singlepackage -hidepassingtests=false + test_command_to_run: ${{ matrix.tests.test_cmd }} ${{ matrix.tests.test_cmd_opts || '2>&1 | tee /tmp/gotest.log | gotestloghelper -ci -singlepackage -hidepassingtests=false' }} test_download_vendor_packages_command: cd $(dirname ${{ matrix.tests.path }}) && go mod download test_secrets_override_base64: ${{ secrets.TEST_SECRETS_OVERRIDE_BASE64 }} test_config_override_path: ${{ env.TEST_CONFIG_OVERRIDE_PATH }} @@ -614,14 +611,14 @@ jobs: run: | ls -l ./integration-tests/smoke/traces - - name: Upload trace data as Github artifact + - name: Upload trace data as artifact if: inputs.enable_otel_traces_for_ocr2_plugins && matrix.tests.test_env_vars.ENABLE_OTEL_TRACES == 'true' uses: actions/upload-artifact@5d5d22a31266ced268874388b861e4b58bb5c2f3 # v4.3.1 with: name: trace-data path: ./integration-tests/smoke/traces/trace-data.json - - name: Upload test log as Github artifact + - name: Upload test log as artifact uses: actions/upload-artifact@5d5d22a31266ced268874388b861e4b58bb5c2f3 # v4.3.1 if: inputs.test_log_upload_on_failure && failure() with: @@ -630,7 +627,7 @@ jobs: retention-days: ${{ inputs.test_log_upload_retention_days }} continue-on-error: true - - name: Upload cl node coverage data as Github artifact + - name: Upload cl node coverage data as artifact if: inputs.upload_cl_node_coverage_artifact uses: actions/upload-artifact@65462800fd760344b1a7b4382951275a0abb4808 # v4.3.3 timeout-minutes: 2 @@ -662,17 +659,16 @@ jobs: path: ${{ matrix.tests.test_artifacts_on_failure }} retention-days: 1 - - name: Print failed test summary + - name: Show Grafana url in test summary if: always() - uses: smartcontractkit/chainlink-github-actions/chainlink-testing-framework/show-test-summary@70ccaef155381025e411cf7cd1fa5ef8f668ed75 # v2.3.25 - + uses: smartcontractkit/.github/actions/ctf-show-grafana-in-test-summary@70fcaef0bf3a5a7d8aa681861d2f76e4188863d9 # ctf-show-grafana-in-test-summary@0.0.0 # Run K8s tests using old remote runner - prepare-remote-runner-test-image: - needs: [load-test-configurations, require-chainlink-image-versions-in-qa-ecr, require-chainlink-plugin-versions-in-qa-ecr] + get-remote-runner-test-image: + needs: [load-test-configurations] if: ${{ needs.load-test-configurations.outputs.run-k8s-tests == 'true' && always() && !failure() && !cancelled() }} - name: Prepare remote runner test image + name: Get remote runner test image runs-on: ubuntu-latest environment: integration permissions: @@ -690,12 +686,13 @@ jobs: uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633 # v4.1.2 - name: Build Test Runner Image id: build-test-runner-image - uses: ./.github/actions/build-test-image + uses: smartcontractkit/.github/actions/ctf-build-test-image@a5e4f4c8fbb8e15ab2ad131552eca6ac83c4f4b3 # ctf-build-test-image@0.1.0 if: ${{ inputs.with_existing_remote_runner_version == '' }} with: QA_AWS_ROLE_TO_ASSUME: ${{ secrets.QA_AWS_ROLE_TO_ASSUME }} QA_AWS_REGION: ${{ secrets.QA_AWS_REGION }} QA_AWS_ACCOUNT_NUMBER: ${{ secrets.QA_AWS_ACCOUNT_NUMBER }} + suites: ${{ inputs.test_image_suites }} - name: Set Remote Runner Version id: set-remote-runner-version run: | @@ -708,7 +705,7 @@ jobs: fi run-k8s-runner-tests: - needs: [load-test-configurations, prepare-remote-runner-test-image, require-chainlink-image-versions-in-qa-ecr, require-chainlink-plugin-versions-in-qa-ecr, get_latest_chainlink_release_version] + needs: [load-test-configurations, get-remote-runner-test-image, require-chainlink-image-versions-in-qa-ecr, require-chainlink-plugin-versions-in-qa-ecr, get_latest_chainlink_release_version] if: ${{ needs.load-test-configurations.outputs.run-k8s-tests == 'true' && always() && !failure() && !cancelled() }} name: ${{ matrix.tests.id }} runs-on: ${{ matrix.tests.runs_on }} @@ -751,51 +748,51 @@ jobs: echo "[${{ env.TEST_CONFIG_OVERRIDE_PATH }}]($GITHUB_SERVER_URL/$GITHUB_REPOSITORY/blob/${{ github.sha }}/${{ env.TEST_CONFIG_OVERRIDE_PATH }})" >> $GITHUB_STEP_SUMMARY - name: Show chainlink version in summary + if: ${{ matrix.tests.test_env_vars.E2E_TEST_CHAINLINK_VERSION || env.DEFAULT_CHAINLINK_VERSION }} run: | echo "### Chainlink version" >> $GITHUB_STEP_SUMMARY echo "${{ matrix.tests.test_env_vars.E2E_TEST_CHAINLINK_VERSION || env.DEFAULT_CHAINLINK_VERSION }}" >> $GITHUB_STEP_SUMMARY - name: Show remote runner version in summary run: | - echo "Remote Runner Version: ${{ needs.prepare-remote-runner-test-image.outputs.remote-runner-version }}" + echo "Remote Runner Version: ${{ needs.get-remote-runner-test-image.outputs.remote-runner-version }}" echo "### Remote Runner Version" >> $GITHUB_STEP_SUMMARY - echo "${{ needs.prepare-remote-runner-test-image.outputs.remote-runner-version }}" >> $GITHUB_STEP_SUMMARY + echo "${{ needs.get-remote-runner-test-image.outputs.remote-runner-version }}" >> $GITHUB_STEP_SUMMARY - name: Show test configuration in logs run: echo '${{ toJson(matrix.tests) }}' | jq . - name: Run tests id: run_tests - uses: smartcontractkit/chainlink-github-actions/chainlink-testing-framework/run-tests@4f377a6b1cc07f0eca82745782736b4908a1da30 # v2.3.32 + uses: smartcontractkit/.github/actions/ctf-run-tests@b8731364b119e88983e94b0c4da87fc27ddb41b8 # ctf-run-tests@0.0.0 env: DETACH_RUNNER: true RR_MEM: ${{ matrix.tests.remote_runner_memory }} TEST_ARGS: -test.timeout 900h -test.memprofile memprofile.out -test.cpuprofile profile.out - ENV_JOB_IMAGE: ${{ secrets.QA_AWS_ACCOUNT_NUMBER }}.dkr.ecr.${{ secrets.QA_AWS_REGION }}.amazonaws.com/chainlink-tests:${{ needs.prepare-remote-runner-test-image.outputs.remote-runner-version }} + ENV_JOB_IMAGE: ${{ secrets.QA_AWS_ACCOUNT_NUMBER }}.dkr.ecr.${{ secrets.QA_AWS_REGION }}.amazonaws.com/chainlink-tests:${{ needs.get-remote-runner-test-image.outputs.remote-runner-version }} INTERNAL_DOCKER_REPO: ${{ secrets.QA_AWS_ACCOUNT_NUMBER }}.dkr.ecr.${{ secrets.QA_AWS_REGION }}.amazonaws.com # We can comment these out when we have a stable soak test and aren't worried about resource consumption - TEST_UPLOAD_CPU_PROFILE: true - TEST_UPLOAD_MEM_PROFILE: true + TEST_UPLOAD_CPU_PROFILE: ${{ matrix.tests.test_env_vars.TEST_UPLOAD_CPU_PROFILE }} + TEST_UPLOAD_MEM_PROFILE: ${{ matrix.tests.test_env_vars.TEST_UPLOAD_MEM_PROFILE }} REF_NAME: ${{ github.head_ref || github.ref_name }} E2E_TEST_CHAINLINK_VERSION: ${{ matrix.tests.test_env_vars.E2E_TEST_CHAINLINK_VERSION || env.DEFAULT_CHAINLINK_VERSION }} E2E_TEST_CHAINLINK_UPGRADE_VERSION: ${{ matrix.tests.test_env_vars.E2E_TEST_CHAINLINK_UPGRADE_VERSION }} E2E_TEST_CHAINLINK_POSTGRES_VERSION: ${{ matrix.tests.test_env_vars.E2E_TEST_CHAINLINK_POSTGRES_VERSION }} - E2E_TEST_SELECTED_NETWORK: ${{ matrix.tests.test_env_vars.E2E_TEST_SELECTED_NETWORK || env.SELECTED_NETWORKS }} + E2E_TEST_SELECTED_NETWORK: ${{ matrix.tests.test_env_vars.E2E_TEST_SELECTED_NETWORK }} E2E_TEST_LOGGING_RUN_ID: ${{ github.run_id }} E2E_TEST_LOG_STREAM_LOG_TARGETS: ${{ vars.LOGSTREAM_LOG_TARGETS }} E2E_TEST_LOG_COLLECT: ${{ vars.TEST_LOG_COLLECT }} - E2E_TEST_LOKI_TENANT_ID: ${{ secrets.GRAFANA_INTERNAL_TENANT_ID }} - E2E_TEST_LOKI_ENDPOINT: https://${{ secrets.GRAFANA_INTERNAL_HOST }}/loki/api/v1/push - E2E_TEST_LOKI_BASIC_AUTH: ${{ secrets.GRAFANA_INTERNAL_BASIC_AUTH }} - E2E_TEST_GRAFANA_DASHBOARD_URL: ${{ matrix.tests.test_env_vars.E2E_TEST_GRAFANA_DASHBOARD_URL }} + E2E_TEST_LOKI_TENANT_ID: ${{ secrets.LOKI_TENANT_ID }} + E2E_TEST_LOKI_ENDPOINT: ${{ secrets.LOKI_URL }} + E2E_TEST_LOKI_BASIC_AUTH: ${{ secrets.LOKI_BASIC_AUTH }} + E2E_TEST_GRAFANA_DASHBOARD_URL: ${{ matrix.tests.test_env_vars.E2E_TEST_GRAFANA_DASHBOARD_URL }} E2E_TEST_GRAFANA_BEARER_TOKEN: ${{ secrets.GRAFANA_INTERNAL_URL_SHORTENER_TOKEN }} E2E_TEST_PYROSCOPE_ENVIRONMENT: ${{ matrix.tests.pyroscope_env }} E2E_TEST_PYROSCOPE_SERVER_URL: ${{ matrix.tests.pyroscope_env != '' && secrets.QA_PYROSCOPE_INSTANCE || '' }} E2E_TEST_PYROSCOPE_KEY: ${{ matrix.tests.pyroscope_env != '' && secrets.QA_PYROSCOPE_KEY || '' }} - E2E_TEST_PYROSCOPE_ENABLED: ${{ matrix.tests.pyroscope_env != '' && 'true' || '' }} DATABASE_URL: postgresql://postgres:node@localhost:5432/chainlink_test?sslmode=disable with: - test_command_to_run: ${{ matrix.tests.test_cmd }} 2>&1 | tee /tmp/gotest.log | gotestloghelper -ci -singlepackage -hidepassingtests=false + test_command_to_run: ${{ matrix.tests.test_cmd }} ${{ matrix.tests.test_cmd_opts || '2>&1 | tee /tmp/gotest.log | gotestloghelper -ci -singlepackage -hidepassingtests=false' }} test_download_vendor_packages_command: make gomod test_secrets_override_base64: ${{ secrets.TEST_SECRETS_OVERRIDE_BASE64 }} test_config_override_path: ${{ env.TEST_CONFIG_OVERRIDE_PATH }} @@ -811,7 +808,7 @@ jobs: QA_AWS_ROLE_TO_ASSUME: ${{ secrets.QA_AWS_ROLE_TO_ASSUME }} QA_KUBECONFIG: ${{ secrets.QA_KUBECONFIG }} - - name: Upload test log as Github artifact + - name: Upload test log as artifact uses: actions/upload-artifact@65462800fd760344b1a7b4382951275a0abb4808 # v4.3.3 if: inputs.test_log_upload_on_failure && failure() with: @@ -828,10 +825,9 @@ jobs: path: ${{ matrix.tests.test_artifacts_on_failure }} retention-days: 1 - # TODO: move to run-tests GHA - - name: Print failed test summary + - name: Show Grafana url in test summary if: always() - uses: smartcontractkit/chainlink-github-actions/chainlink-testing-framework/show-test-summary@70ccaef155381025e411cf7cd1fa5ef8f668ed75 # v2.3.25 + uses: smartcontractkit/.github/actions/ctf-show-grafana-in-test-summary@70fcaef0bf3a5a7d8aa681861d2f76e4188863d9 # ctf-show-grafana-in-test-summary@0.0.0 after_tests: needs: [load-test-configurations, run-docker-tests, run-k8s-runner-tests] @@ -866,6 +862,11 @@ jobs: echo "results=[]" >> $GITHUB_OUTPUT fi + - name: Set short SHA + id: set_short_sha + shell: bash + run: echo "short_sha=$(echo ${{ github.sha }} | cut -c1-7)" >> $GITHUB_OUTPUT + - name: Send Slack notification uses: slackapi/slack-github-action@6c661ce58804a1a20f6dc5fbee7f0381b469e001 # v1.25.0 if: ${{ inputs.slack_notification_after_tests == 'true' || inputs.slack_notification_after_tests == 'always' || (inputs.slack_notification_after_tests == 'on_failure' && contains(join(needs.*.result, ','), 'failure')) }} @@ -878,20 +879,20 @@ jobs: { "attachments": [ { - "color": "${{ contains(join(needs.*.result, ','), 'failure') && '#C62828' || contains(join(needs.*.result, ','), 'cancelled') && '#FFA000' || '2E7D32' }}", + "color": "${{ contains(join(needs.*.result, ','), 'failure') && '#C62828' || contains(join(needs.*.result, ','), 'cancelled') && '#FFA000' || contains(join(needs.*.result, ','), 'skipped') && '#FFA000' || '2E7D32' }}", "blocks": [ { "type": "section", "text": { "type": "mrkdwn", - "text": "${{ inputs.slack_notification_after_tests_name }} - ${{ contains(join(needs.*.result, ','), 'failure') && 'Failed :x:' || contains(join(needs.*.result, ','), 'cancelled') && 'Cancelled :warning:' || 'Passed :white_check_mark:' }}" + "text": "${{ inputs.slack_notification_after_tests_name }} - ${{ contains(join(needs.*.result, ','), 'failure') && 'Failed :x:' || contains(join(needs.*.result, ','), 'cancelled') && 'Cancelled :warning:' || contains(join(needs.*.result, ','), 'skipped') && 'Skipped :warning:' || 'Passed :white_check_mark:' }}" } }, { "type": "section", "text": { "type": "mrkdwn", - "text": "<${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }}|View Build Details>" + "text": "${{ github.ref_name }} | <${{ github.server_url }}/${{ github.repository }}/commit/${{ github.sha }}|${{ steps.set_short_sha.outputs.short_sha }}> | <${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }}|View Details>" } } ] diff --git a/.github/workflows/run-nightly-e2e-tests.yml b/.github/workflows/run-nightly-e2e-tests.yml index 6d7056ed04d..151217180b0 100644 --- a/.github/workflows/run-nightly-e2e-tests.yml +++ b/.github/workflows/run-nightly-e2e-tests.yml @@ -27,6 +27,9 @@ jobs: GRAFANA_INTERNAL_BASIC_AUTH: ${{ secrets.GRAFANA_INTERNAL_BASIC_AUTH }} GRAFANA_INTERNAL_HOST: ${{ secrets.GRAFANA_INTERNAL_HOST }} GRAFANA_INTERNAL_URL_SHORTENER_TOKEN: ${{ secrets.GRAFANA_INTERNAL_URL_SHORTENER_TOKEN }} + LOKI_TENANT_ID: ${{ secrets.LOKI_TENANT_ID }} + LOKI_URL: ${{ secrets.LOKI_URL }} + LOKI_BASIC_AUTH: ${{ secrets.LOKI_BASIC_AUTH }} GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} AWS_REGION: ${{ secrets.QA_AWS_REGION }} AWS_OIDC_IAM_ROLE_VALIDATION_PROD_ARN: ${{ secrets.AWS_OIDC_IAM_ROLE_VALIDATION_PROD_ARN }} diff --git a/.github/workflows/run-selected-e2e-tests.yml b/.github/workflows/run-selected-e2e-tests.yml index c204be6a564..2ff3fbb979d 100644 --- a/.github/workflows/run-selected-e2e-tests.yml +++ b/.github/workflows/run-selected-e2e-tests.yml @@ -59,10 +59,12 @@ jobs: GRAFANA_INTERNAL_BASIC_AUTH: ${{ secrets.GRAFANA_INTERNAL_BASIC_AUTH }} GRAFANA_INTERNAL_HOST: ${{ secrets.GRAFANA_INTERNAL_HOST }} GRAFANA_INTERNAL_URL_SHORTENER_TOKEN: ${{ secrets.GRAFANA_INTERNAL_URL_SHORTENER_TOKEN }} + LOKI_TENANT_ID: ${{ secrets.LOKI_TENANT_ID }} + LOKI_URL: ${{ secrets.LOKI_URL }} + LOKI_BASIC_AUTH: ${{ secrets.LOKI_BASIC_AUTH }} GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} AWS_REGION: ${{ secrets.QA_AWS_REGION }} AWS_OIDC_IAM_ROLE_VALIDATION_PROD_ARN: ${{ secrets.AWS_OIDC_IAM_ROLE_VALIDATION_PROD_ARN }} AWS_API_GW_HOST_GRAFANA: ${{ secrets.AWS_API_GW_HOST_GRAFANA }} TEST_SECRETS_OVERRIDE_BASE64: ${{ secrets[inputs.test_secrets_override_key] }} SLACK_BOT_TOKEN: ${{ secrets.QA_SLACK_API_KEY }} - diff --git a/integration-tests/scripts/buildTests b/integration-tests/scripts/buildTests index 04e9cea94b3..b5e8fc3080e 100755 --- a/integration-tests/scripts/buildTests +++ b/integration-tests/scripts/buildTests @@ -28,6 +28,7 @@ do elif [ "$x" = "ccip-load" ]; then echo "Changing directory and executing go test -c ./... for 'ccip-load' package" pushd "./ccip-tests/load" && go test -c -tags embed -o ../ ./... + mv ../load.test ../ccip-load.test # rename the binary to match the suite name popd else go test -c -tags embed ./"${x}" diff --git a/integration-tests/testconfig/automation/automation.toml b/integration-tests/testconfig/automation/automation.toml index f07136e998e..b2e87fa34f9 100644 --- a/integration-tests/testconfig/automation/automation.toml +++ b/integration-tests/testconfig/automation/automation.toml @@ -2,6 +2,9 @@ [Common] chainlink_node_funding = 2.0 +[Pyroscope] +enabled=false + [NodeConfig] BaseConfigTOML = """ [Feature] @@ -207,6 +210,10 @@ max_perform_data_size=5_000 max_revert_data_size=5_000 # load test specific overrides +[Load.Logging.Grafana] +base_url="https://grafana.ops.prod.cldev.sh" +dashboard_url="/d/a4899f53-f709-430a-aec2-24f32198dcc9/chainlink-automation-v2-load-test" + [Load.Seth] ephemeral_addresses_number = 100 root_key_funds_buffer = 1_000_000 @@ -301,6 +308,9 @@ max_revert_data_size=5_000 enabled=false # automation benchmark test specific overrides +[Benchmark.Logging.Grafana] +base_url="https://grafana.ops.prod.cldev.sh" +dashboard_url="/d/Q8n6m1unz/chainlink-automation-benchmark-test" # will retry roughly for 1h before giving up (900 * 4s) [Benchmark.Automation.Resiliency] @@ -420,6 +430,9 @@ max_perform_data_size=5_000 max_revert_data_size=5_000 # automation soak test specific overrides +[Soak.Logging.Grafana] +base_url="https://grafana.ops.prod.cldev.sh" +dashboard_url="/d/Q8n6m1unz/chainlink-automation-benchmark-test" # will retry roughly for 1h before giving up (900 * 4s) [Soak.Automation.Resiliency] diff --git a/integration-tests/testconfig/automation/example.toml b/integration-tests/testconfig/automation/example.toml index 3b48e89a548..3bbe78d693d 100644 --- a/integration-tests/testconfig/automation/example.toml +++ b/integration-tests/testconfig/automation/example.toml @@ -1,7 +1,6 @@ # Example of full config with all fields # General part [ChainlinkImage] -image="public.ecr.aws/chainlink/chainlink" version="2.7.0" [Logging] @@ -16,38 +15,10 @@ log_producer_timeout="10s" # number of retries before log producer gives up and stops listening to logs log_producer_retry_limit=10 -[Logging.Loki] -tenant_id="tenant_id" -# full URL of Loki ingest endpoint -endpoint="https://loki.url/api/v3/push" -# currently only needed when using public instance -basic_auth_secret="loki-basic-auth" -# only needed for cloud grafana -bearer_token_secret="bearer_token" - -# LogStream will try to shorten Grafana URLs by default (if all 3 variables are set) -[Logging.Grafana] -# grafana url (trailing "/" will be stripped) -base_url="http://grafana.url" -# url of your grafana dashboard (prefix and suffix "/" are stirpped), example: /d/ad61652-2712-1722/my-dashboard -dashboard_url="/d/your-dashboard" -# Grafana dashboard uid to annotate. Find it in Dashboard Settings -> JSON Model -dashboard_uid="dashboard-uid-to-annotate" -bearer_token_secret="my-awesome-token" - # if you want to use polygon_mumbial [Network] selected_networks=["polygon_mumbai"] -[Network.RpcHttpUrls] -polygon_mumbai = ["https://my-rpc-endpoint.io"] - -[Network.RpcWsUrls] -polygon_mumbai = ["https://my-rpc-endpoint.io"] - -[Network.WalletKeys] -polygon_mumbai = ["change-me-to-your-PK"] - [PrivateEthereumNetwork] # pos or pow consensus_type="pos" @@ -163,5 +134,4 @@ use_prometheus=false # upgrade test specific override [TestAutomationNodeUpgrade.ChainlinkUpgradeImage] -image="public.ecr.aws/chainlink/chainlink" version="2.8.0" \ No newline at end of file diff --git a/integration-tests/testconfig/automation/overrides/benchmark/1000Upkeeps-1h-2_1.toml b/integration-tests/testconfig/automation/overrides/benchmark/1000Upkeeps-1h-2_1.toml new file mode 100644 index 00000000000..0b704524e9d --- /dev/null +++ b/integration-tests/testconfig/automation/overrides/benchmark/1000Upkeeps-1h-2_1.toml @@ -0,0 +1,15 @@ +[ChainlinkImage] +version="latest" + +[Benchmark.Automation.Benchmark] +registry_to_test = "2_1" +number_of_registries = 1 +number_of_nodes = 6 +number_of_upkeeps = 1000 +upkeep_gas_limit = 1500000 +check_gas_to_burn = 10000 +perform_gas_to_burn = 1000 +block_range = 3600 +block_interval = 60 +forces_single_tx_key = false +delete_jobs_on_end = true \ No newline at end of file diff --git a/integration-tests/testconfig/automation/overrides/benchmark/1000Upkeeps-1h-2_3.toml b/integration-tests/testconfig/automation/overrides/benchmark/1000Upkeeps-1h-2_3.toml new file mode 100644 index 00000000000..f00bb5ed47b --- /dev/null +++ b/integration-tests/testconfig/automation/overrides/benchmark/1000Upkeeps-1h-2_3.toml @@ -0,0 +1,15 @@ +[ChainlinkImage] +version="latest" + +[Benchmark.Automation.Benchmark] +registry_to_test = "2_3" +number_of_registries = 1 +number_of_nodes = 6 +number_of_upkeeps = 1000 +upkeep_gas_limit = 1500000 +check_gas_to_burn = 10000 +perform_gas_to_burn = 1000 +block_range = 3600 +block_interval = 60 +forces_single_tx_key = false +delete_jobs_on_end = true \ No newline at end of file diff --git a/integration-tests/testconfig/automation/overrides/load/500Upkeeps-1x-1h.toml b/integration-tests/testconfig/automation/overrides/load/500Upkeeps-1x-1h.toml new file mode 100644 index 00000000000..6d58253f526 --- /dev/null +++ b/integration-tests/testconfig/automation/overrides/load/500Upkeeps-1x-1h.toml @@ -0,0 +1,47 @@ +[ChainlinkImage] +version="latest" + +[Load.Seth] +root_key_funds_buffer = 1_000_000 +ephemeral_addresses_number = 300 + +[Load.Seth.nonce_manager] +key_sync_timeout = "100s" + +[Load.Common] +chainlink_node_funding = 1000 + +[Load.Automation.AutomationConfig] +use_log_buffer_v1=false + +[Load.Automation.AutomationConfig.PluginConfig.LogProviderConfig] +block_rate=1 +log_limit=2 + +[Load.Automation] +[Load.Automation.General] +number_of_nodes=6 +duration=3600 +block_time=1 +spec_type="recommended" +chainlink_node_log_level="debug" +use_prometheus=true +remove_namespace = true + +[Load.Automation.DataStreams] +enabled=false + +[[Load.Automation.Load]] +number_of_upkeeps=500 +number_of_events = 1 +number_of_spam_matching_events = 0 +number_of_spam_non_matching_events = 0 +check_burn_amount = 0 +perform_burn_amount = 0 +upkeep_gas_limit = 1000000 +shared_trigger = false +is_streams_lookup = false +feeds = [] + +[Pyroscope] +enabled=false \ No newline at end of file diff --git a/integration-tests/testconfig/automation/overrides/load/50Upkeeps-1x-12h.toml b/integration-tests/testconfig/automation/overrides/load/50Upkeeps-1x-12h.toml new file mode 100644 index 00000000000..8991a6eb903 --- /dev/null +++ b/integration-tests/testconfig/automation/overrides/load/50Upkeeps-1x-12h.toml @@ -0,0 +1,43 @@ +[ChainlinkImage] +version="latest" + +[Load.Seth] +root_key_funds_buffer = 1_000_000 + +[Load.Common] +chainlink_node_funding = 10000 + +[Load.Automation.AutomationConfig] +use_log_buffer_v1=false + +[Load.Automation.AutomationConfig.PluginConfig.LogProviderConfig] +block_rate=1 +log_limit=2 + +[Load.Automation] +[Load.Automation.General] +number_of_nodes=6 +duration=43200 +block_time=1 +spec_type="recommended" +chainlink_node_log_level="debug" +use_prometheus=true +remove_namespace = true + +[Load.Automation.DataStreams] +enabled=false + +[[Load.Automation.Load]] +number_of_upkeeps=50 +number_of_events = 1 +number_of_spam_matching_events = 0 +number_of_spam_non_matching_events = 0 +check_burn_amount = 0 +perform_burn_amount = 0 +upkeep_gas_limit = 1000000 +shared_trigger = false +is_streams_lookup = false +feeds = [] + +[Pyroscope] +enabled=false \ No newline at end of file diff --git a/integration-tests/testconfig/automation/overrides/load/50Upkeeps-1x-1h.toml b/integration-tests/testconfig/automation/overrides/load/50Upkeeps-1x-1h.toml new file mode 100644 index 00000000000..a133fce6dcf --- /dev/null +++ b/integration-tests/testconfig/automation/overrides/load/50Upkeeps-1x-1h.toml @@ -0,0 +1,43 @@ +[ChainlinkImage] +version="latest" + +[Load.Seth] +root_key_funds_buffer = 1_000_000 + +[Load.Common] +chainlink_node_funding = 1000 + +[Load.Automation.AutomationConfig] +use_log_buffer_v1=false + +[Load.Automation.AutomationConfig.PluginConfig.LogProviderConfig] +block_rate=1 +log_limit=2 + +[Load.Automation] +[Load.Automation.General] +number_of_nodes=6 +duration=3600 +block_time=1 +spec_type="recommended" +chainlink_node_log_level="debug" +use_prometheus=true +remove_namespace = true + +[Load.Automation.DataStreams] +enabled=false + +[[Load.Automation.Load]] +number_of_upkeeps=50 +number_of_events = 1 +number_of_spam_matching_events = 0 +number_of_spam_non_matching_events = 0 +check_burn_amount = 0 +perform_burn_amount = 0 +upkeep_gas_limit = 1000000 +shared_trigger = false +is_streams_lookup = false +feeds = [] + +[Pyroscope] +enabled=false \ No newline at end of file diff --git a/integration-tests/testconfig/automation/overrides/soak/50Upkeeps-8h-2_1.toml b/integration-tests/testconfig/automation/overrides/soak/50Upkeeps-8h-2_1.toml new file mode 100644 index 00000000000..fda5cb6ea29 --- /dev/null +++ b/integration-tests/testconfig/automation/overrides/soak/50Upkeeps-8h-2_1.toml @@ -0,0 +1,15 @@ +[ChainlinkImage] +version="latest" + +[Soak.Automation.Benchmark] +registry_to_test = "2_1" +number_of_registries = 1 +number_of_nodes = 6 +number_of_upkeeps = 50 +upkeep_gas_limit = 1500000 +check_gas_to_burn = 10000 +perform_gas_to_burn = 1000 +block_range = 28800 +block_interval = 300 +forces_single_tx_key = false +delete_jobs_on_end = true \ No newline at end of file diff --git a/integration-tests/testconfig/automation/overrides/soak/50Upkeeps-8h-2_3.toml b/integration-tests/testconfig/automation/overrides/soak/50Upkeeps-8h-2_3.toml new file mode 100644 index 00000000000..46ba1ad3e85 --- /dev/null +++ b/integration-tests/testconfig/automation/overrides/soak/50Upkeeps-8h-2_3.toml @@ -0,0 +1,15 @@ +[ChainlinkImage] +version="latest" + +[Soak.Automation.Benchmark] +registry_to_test = "2_3" +number_of_registries = 1 +number_of_nodes = 6 +number_of_upkeeps = 50 +upkeep_gas_limit = 1500000 +check_gas_to_burn = 10000 +perform_gas_to_burn = 1000 +block_range = 28800 +block_interval = 300 +forces_single_tx_key = false +delete_jobs_on_end = true \ No newline at end of file diff --git a/integration-tests/testconfig/default.toml b/integration-tests/testconfig/default.toml index 1d84b1d0282..496a11b64b1 100644 --- a/integration-tests/testconfig/default.toml +++ b/integration-tests/testconfig/default.toml @@ -259,66 +259,14 @@ gas_tip_cap = 1_800_000_000 [[Seth.networks]] name = "Sepolia Testnet" transaction_timeout = "3m" -eip_1559_dynamic_fees = false - -# automated gas estimation for live networks -# if set to true we will dynamically estimate gas for every transaction (based on suggested values, priority and congestion rate for last X blocks) -# gas_price_estimation_enabled = true -# number of blocks to use for congestion rate estimation (it will determine buffer added on top of suggested values) -# gas_price_estimation_blocks = 100 -# transaction priority, which determines adjustment factor multiplier applied to suggested values (fast - 1.2x, standard - 1x, slow - 0.8x) -# gas_price_estimation_tx_priority = "standard" - -# URLs -# if set they will overwrite URLs from EVMNetwork that Seth uses, can be either WS(S) or HTTP(S) -# urls_secret = ["ws://your-ws-url:8546"] - -# gas_limits -# gas limit should be explicitly set only if you are connecting to a node that's incapable of estimating gas limit itself (should only happen for very old versions) -# gas_limit = 14_000_000 -# transfer_gas_fee is gas limit that will be used, when funding CL nodes and returning funds from there and when funding and returning funds from ephemeral keys -# we use hardcoded value in order to be estimate how much funds are available for sending or returning after tx costs have been paid -transfer_gas_fee = 21_000 - -# manual settings, used when gas_price_estimation_enabled is false or when it fails -# legacy transactions -gas_price = 50_000_000_000 - -# EIP-1559 transactions -gas_fee_cap = 45_000_000_000 -gas_tip_cap = 10_000_000_000 - -[[Seth.networks]] -name = "Polygon Mumbai" -transaction_timeout = "3m" eip_1559_dynamic_fees = true - -# automated gas estimation for live networks -# if set to true we will dynamically estimate gas for every transaction (based on suggested values, priority and congestion rate for last X blocks) -# gas_price_estimation_enabled = true -# number of blocks to use for congestion rate estimation (it will determine buffer added on top of suggested values) -# gas_price_estimation_blocks = 100 -# transaction priority, which determines adjustment factor multiplier applied to suggested values (fast - 1.2x, standard - 1x, slow - 0.8x) -# gas_price_estimation_tx_priority = "standard" - -# URLs -# if set they will overwrite URLs from EVMNetwork that Seth uses, can be either WS(S) or HTTP(S) -# urls_secret = ["ws://your-ws-url:8546"] - -# gas_limits -# gas limit should be explicitly set only if you are connecting to a node that's incapable of estimating gas limit itself (should only happen for very old versions) -# gas_limit = 6_000_000 -# transfer_gas_fee is gas limit that will be used, when funding CL nodes and returning funds from there and when funding and returning funds from ephemeral keys -# we use hardcoded value in order to be estimate how much funds are available for sending or returning after tx costs have been paid transfer_gas_fee = 21_000 - -# manual settings, used when gas_price_estimation_enabled is false or when it fails -# legacy transactions -gas_price = 1_800_000_000 - -# EIP-1559 transactions -gas_fee_cap = 3_800_000_000 -gas_tip_cap = 1_800_000_000 +gas_price = 105_000_000_000 +gas_fee_cap = 150_312_843_059 +gas_tip_cap = 40_416_094 +gas_price_estimation_enabled = true +gas_price_estimation_blocks = 1000 +gas_price_estimation_tx_priority = "standard" [[Seth.networks]] name = "Polygon Amoy" @@ -352,38 +300,6 @@ gas_price = 200_000_000_000 gas_fee_cap = 200_000_000_000 gas_tip_cap = 25_000_000_000 -[[Seth.networks]] -name = "Polygon zkEVM Goerli" -transaction_timeout = "3m" -eip_1559_dynamic_fees = false - -# automated gas estimation for live networks -# if set to true we will dynamically estimate gas for every transaction (based on suggested values, priority and congestion rate for last X blocks) -# gas_price_estimation_enabled = true -# number of blocks to use for congestion rate estimation (it will determine buffer added on top of suggested values) -# gas_price_estimation_blocks = 100 -# transaction priority, which determines adjustment factor multiplier applied to suggested values (fast - 1.2x, standard - 1x, slow - 0.8x) -# gas_price_estimation_tx_priority = "standard" - -# URLs -# if set they will overwrite URLs from EVMNetwork that Seth uses, can be either WS(S) or HTTP(S) -# urls_secret = ["ws://your-ws-url:8546"] - -# gas_limits -# gas limit should be explicitly set only if you are connecting to a node that's incapable of estimating gas limit itself (should only happen for very old versions) -# gas_limit = 9_000_000 -# transfer_gas_fee is gas limit that will be used, when funding CL nodes and returning funds from there and when funding and returning funds from ephemeral keys -# we use hardcoded value in order to be estimate how much funds are available for sending or returning after tx costs have been paid -transfer_gas_fee = 21_000 - -# manual settings, used when gas_price_estimation_enabled is false or when it fails -# legacy transactions -gas_price = 50_000_000 - -# EIP-1559 transactions -gas_fee_cap = 3_800_000_000 -gas_tip_cap = 1_800_000_000 - [[Seth.networks]] name = "Optimism Sepolia" transaction_timeout = "3m" @@ -686,7 +602,6 @@ gas_price_estimation_tx_priority = "standard" [[Seth.networks]] name = "TREASURE_RUBY" -chain_id = "978657" transaction_timeout = "10m" transfer_gas_fee = 21_000 gas_price = 100_000_000 @@ -699,7 +614,6 @@ gas_price_estimation_tx_priority = "standard" [[Seth.networks]] name = "XLAYER_MAINNET" -chain_id = "196" transaction_timeout = "10m" transfer_gas_fee = 21_000 gas_price = 13_400_000_000 @@ -712,7 +626,6 @@ gas_price_estimation_tx_priority = "standard" [[Seth.networks]] name = "XLAYER_SEPOLIA" -chain_id = "195" transaction_timeout = "10m" transfer_gas_fee = 21_000 gas_price = 200_000_000_000 @@ -723,6 +636,28 @@ gas_price_estimation_enabled = true gas_price_estimation_blocks = 500 gas_price_estimation_tx_priority = "standard" +[[Seth.networks]] +name = "POLYGON_MAINNET" +transaction_timeout = "3m" +transfer_gas_fee = 21_000 +gas_price = 31_000_000_000 +eip_1559_dynamic_fees = true +gas_fee_cap = 61_000_000_000 +gas_tip_cap = 30_000_000_000 +gas_price_estimation_enabled = true +gas_price_estimation_blocks = 1000 + +[[Seth.networks]] +name = "ARBITRUM_MAINNET" +transaction_timeout = "10m" +transfer_gas_fee = 21_000 +gas_price = 10_000_000 +eip_1559_dynamic_fees = true +gas_fee_cap = 10_000_000 +gas_tip_cap = 0 +gas_price_estimation_enabled = true +gas_price_estimation_blocks = 1000 + #### [Network.EVMNetworks.SONEIUM_SEPOLIA] @@ -731,4 +666,4 @@ evm_chain_id = 1946 client_implementation = "Optimism" evm_simulated = false -#### \ No newline at end of file +#### diff --git a/integration-tests/testconfig/forwarder_ocr/example.toml b/integration-tests/testconfig/forwarder_ocr/example.toml index 75143d7b77f..517a341f803 100644 --- a/integration-tests/testconfig/forwarder_ocr/example.toml +++ b/integration-tests/testconfig/forwarder_ocr/example.toml @@ -1,7 +1,6 @@ # Example of full config with all fields # General part [ChainlinkImage] -image="public.ecr.aws/chainlink/chainlink" version="2.7.0" [Logging] @@ -39,15 +38,6 @@ bearer_token_secret="my-awesome-token" [Network] selected_networks=["polygon_mumbai"] -[Network.RpcHttpUrls] -polygon_mumbai = ["https://my-rpc-endpoint.io"] - -[Network.RpcWsUrls] -polygon_mumbai = ["https://my-rpc-endpoint.io"] - -[Network.WalletKeys] -polygon_mumbai = ["change-me-to-your-PK"] - [PrivateEthereumNetwork] # pos or pow consensus_type="pos" diff --git a/integration-tests/testconfig/forwarder_ocr2/example.toml b/integration-tests/testconfig/forwarder_ocr2/example.toml index 4941c49a983..3ec3e4c690a 100644 --- a/integration-tests/testconfig/forwarder_ocr2/example.toml +++ b/integration-tests/testconfig/forwarder_ocr2/example.toml @@ -39,15 +39,6 @@ bearer_token_secret="my-awesome-token" [Network] selected_networks=["polygon_mumbai"] -[Network.RpcHttpUrls] -polygon_mumbai = ["https://my-rpc-endpoint.io"] - -[Network.RpcWsUrls] -polygon_mumbai = ["https://my-rpc-endpoint.io"] - -[Network.WalletKeys] -polygon_mumbai = ["change-me-to-your-PK"] - [PrivateEthereumNetwork] # pos or pow consensus_type="pos" diff --git a/integration-tests/testconfig/functions/example.toml b/integration-tests/testconfig/functions/example.toml index 7502a6fc440..74d931632a8 100644 --- a/integration-tests/testconfig/functions/example.toml +++ b/integration-tests/testconfig/functions/example.toml @@ -1,7 +1,6 @@ # Example of full config with all fields # General part [ChainlinkImage] -image="public.ecr.aws/chainlink/chainlink" version="2.7.0" [Logging] @@ -16,38 +15,10 @@ log_producer_timeout="10s" # number of retries before log producer gives up and stops listening to logs log_producer_retry_limit=10 -[Logging.Loki] -tenant_id="tenant_id" -# full URL of Loki ingest endpoint -endpoint="https://loki.url/api/v3/push" -# currently only needed when using public instance -basic_auth_secret="loki-basic-auth" -# only needed for cloud grafana -bearer_token_secret="bearer_token" - -# LogStream will try to shorten Grafana URLs by default (if all 3 variables are set) -[Logging.Grafana] -# grafana url (trailing "/" will be stripped) -base_url="http://grafana.url" -# url of your grafana dashboard (prefix and suffix "/" are stirpped), example: /d/ad61652-2712-1722/my-dashboard -dashboard_url="/d/your-dashboard" -# Grafana dashboard uid to annotate. Find it in Dashboard Settings -> JSON Model -dashboard_uid="dashboard-uid-to-annotate" -bearer_token_secret="my-awesome-token" - # if you want to use simulated network [Network] selected_networks=["polygon_mumbai"] -[Network.RpcHttpUrls] -polygon_mumbai = ["https://my-rpc-endpoint.io"] - -[Network.RpcWsUrls] -polygon_mumbai = ["https://my-rpc-endpoint.io"] - -[Network.WalletKeys] -polygon_mumbai = ["change-me-to-your-PK"] - [PrivateEthereumNetwork] # pos or pow consensus_type="pos" diff --git a/integration-tests/testconfig/keeper/example.toml b/integration-tests/testconfig/keeper/example.toml index 5abb5835629..4efbf974827 100644 --- a/integration-tests/testconfig/keeper/example.toml +++ b/integration-tests/testconfig/keeper/example.toml @@ -1,7 +1,6 @@ # Example of full config with all fields # General part [ChainlinkImage] -image="public.ecr.aws/chainlink/chainlink" version="2.7.0" [Logging] @@ -16,38 +15,10 @@ log_producer_timeout="10s" # number of retries before log producer gives up and stops listening to logs log_producer_retry_limit=10 -[Logging.Loki] -tenant_id="tenant_id" -# full URL of Loki ingest endpoint -endpoint="https://loki.url/api/v3/push" -# currently only needed when using public instance -basic_auth_secret="loki-basic-auth" -# only needed for cloud grafana -bearer_token_secret="bearer_token" - -# LogStream will try to shorten Grafana URLs by default (if all 3 variables are set) -[Logging.Grafana] -# grafana url (trailing "/" will be stripped) -base_url="http://grafana.url" -# url of your grafana dashboard (prefix and suffix "/" are stirpped), example: /d/ad61652-2712-1722/my-dashboard -dashboard_url="/d/your-dashboard" -# Grafana dashboard uid to annotate. Find it in Dashboard Settings -> JSON Model -dashboard_uid="dashboard-uid-to-annotate" -bearer_token_secret="my-awesome-token" - # if you want to use polygon_mumbial [Network] selected_networks=["polygon_mumbai"] -[Network.RpcHttpUrls] -polygon_mumbai = ["https://my-rpc-endpoint.io"] - -[Network.RpcWsUrls] -polygon_mumbai = ["https://my-rpc-endpoint.io"] - -[Network.WalletKeys] -polygon_mumbai = ["change-me-to-your-PK"] - [PrivateEthereumNetwork] # pos or pow consensus_type="pos" diff --git a/integration-tests/testconfig/keeper/keeper.toml b/integration-tests/testconfig/keeper/keeper.toml index b4f544165a9..d483d6df493 100644 --- a/integration-tests/testconfig/keeper/keeper.toml +++ b/integration-tests/testconfig/keeper/keeper.toml @@ -32,4 +32,4 @@ HTTPSPort = 0 [Keeper] TurnLookBack = 0 -""" +""" \ No newline at end of file diff --git a/integration-tests/testconfig/log_poller/example.toml b/integration-tests/testconfig/log_poller/example.toml index c28d36ae12f..78f3b5482d9 100644 --- a/integration-tests/testconfig/log_poller/example.toml +++ b/integration-tests/testconfig/log_poller/example.toml @@ -1,7 +1,6 @@ # Example of full config with all fields # General part [ChainlinkImage] -image="public.ecr.aws/chainlink/chainlink" version="2.7.0" [Logging] @@ -16,38 +15,10 @@ log_producer_timeout="10s" # number of retries before log producer gives up and stops listening to logs log_producer_retry_limit=10 -[Logging.Loki] -tenant_id="tenant_id" -# full URL of Loki ingest endpoint -endpoint="https://loki.url/api/v3/push" -# currently only needed when using public instance -basic_auth_secret="loki-basic-auth" -# only needed for cloud grafana -bearer_token_secret="bearer_token" - -# LogStream will try to shorten Grafana URLs by default (if all 3 variables are set) -[Logging.Grafana] -# grafana url (trailing "/" will be stripped) -base_url="http://grafana.url" -# url of your grafana dashboard (prefix and suffix "/" are stirpped), example: /d/ad61652-2712-1722/my-dashboard -dashboard_url="/d/your-dashboard" -# Grafana dashboard uid to annotate. Find it in Dashboard Settings -> JSON Model -dashboard_uid="dashboard-uid-to-annotate" -bearer_token_secret="my-awesome-token" - # if you want to use polygon_mumbial [Network] selected_networks=["polygon_mumbai"] -[Network.RpcHttpUrls] -polygon_mumbai = ["https://my-rpc-endpoint.io"] - -[Network.RpcWsUrls] -polygon_mumbai = ["https://my-rpc-endpoint.io"] - -[Network.WalletKeys] -polygon_mumbai = ["change-me-to-your-PK"] - [PrivateEthereumNetwork] # pos or pow consensus_type="pos" diff --git a/integration-tests/testconfig/node/example.toml b/integration-tests/testconfig/node/example.toml index 510379b4f05..bc5628e46b3 100644 --- a/integration-tests/testconfig/node/example.toml +++ b/integration-tests/testconfig/node/example.toml @@ -1,7 +1,6 @@ # Example of full config with all fields # General part [ChainlinkImage] -image="public.ecr.aws/chainlink/chainlink" version="2.7.0" [Logging] @@ -16,38 +15,10 @@ log_producer_timeout="10s" # number of retries before log producer gives up and stops listening to logs log_producer_retry_limit=10 -[Logging.Loki] -tenant_id="tenant_id" -# full URL of Loki ingest endpoint -endpoint="https://loki.url/api/v3/push" -# currently only needed when using public instance -basic_auth_secret="loki-basic-auth" -# only needed for cloud grafana -bearer_token_secret="bearer_token" - -# LogStream will try to shorten Grafana URLs by default (if all 3 variables are set) -[Logging.Grafana] -# grafana url (trailing "/" will be stripped) -base_url="http://grafana.url" -# url of your grafana dashboard (prefix and suffix "/" are stirpped), example: /d/ad61652-2712-1722/my-dashboard -dashboard_url="/d/your-dashboard" -# Grafana dashboard uid to annotate. Find it in Dashboard Settings -> JSON Model -dashboard_uid="dashboard-uid-to-annotate" -bearer_token_secret="my-awesome-token" - # if you want to use polygon_mumbial [Network] selected_networks=["polygon_mumbai"] -[Network.RpcHttpUrls] -polygon_mumbai = ["https://my-rpc-endpoint.io"] - -[Network.RpcWsUrls] -polygon_mumbai = ["https://my-rpc-endpoint.io"] - -[Network.WalletKeys] -polygon_mumbai = ["change-me-to-your-PK"] - [PrivateEthereumNetwork] # pos or pow consensus_type="pos" @@ -77,5 +48,4 @@ chainlink_node_funding = 0.5 # Test-specific part [ChainlinkUpgradeImage] -image="public.ecr.aws/chainlink/chainlink" version="2.8.0" \ No newline at end of file diff --git a/integration-tests/testconfig/ocr/example.toml b/integration-tests/testconfig/ocr/example.toml index 26f0dd5a84e..7c1c755567f 100644 --- a/integration-tests/testconfig/ocr/example.toml +++ b/integration-tests/testconfig/ocr/example.toml @@ -1,7 +1,6 @@ # Example of full config with all fields # General part [ChainlinkImage] -image="public.ecr.aws/chainlink/chainlink" version="2.7.0" [Logging] @@ -39,15 +38,6 @@ bearer_token_secret="my-awesome-token" [Network] selected_networks=["polygon_mumbai"] -[Network.RpcHttpUrls] -polygon_mumbai = ["https://my-rpc-endpoint.io"] - -[Network.RpcWsUrls] -polygon_mumbai = ["https://my-rpc-endpoint.io"] - -[Network.WalletKeys] -polygon_mumbai = ["change-me-to-your-PK"] - [PrivateEthereumNetwork] # pos or pow consensus_type="pos" diff --git a/integration-tests/testconfig/ocr/overrides/arbitrum_mainnet.toml b/integration-tests/testconfig/ocr/overrides/arbitrum_mainnet.toml new file mode 100644 index 00000000000..953d9f351a4 --- /dev/null +++ b/integration-tests/testconfig/ocr/overrides/arbitrum_mainnet.toml @@ -0,0 +1,18 @@ +[Network] +selected_networks = ["ARBITRUM_MAINNET"] + +[Soak.Common] +chainlink_node_funding = 0.1 + +[Soak.OCR] +[Soak.OCR.Common] +test_duration = "24h" + +[Soak.OCR.Soak] +time_between_rounds = "10m" + +[OCR.Common] +number_of_contracts = 2 + +[Seth] +experiments_enabled = ["slow_funds_return"] diff --git a/integration-tests/testconfig/ocr/overrides/arbitrum_sepolia.toml b/integration-tests/testconfig/ocr/overrides/arbitrum_sepolia.toml new file mode 100644 index 00000000000..1428e50b0e3 --- /dev/null +++ b/integration-tests/testconfig/ocr/overrides/arbitrum_sepolia.toml @@ -0,0 +1,15 @@ +[Network] +selected_networks = ["ARBITRUM_SEPOLIA"] + +[Soak.Common] +chainlink_node_funding = 1 + +[Soak.OCR] +[Soak.OCR.Common] +test_duration = "24h" + +[Soak.OCR.Soak] +time_between_rounds = "2m" + +[OCR.Common] +number_of_contracts = 2 diff --git a/integration-tests/testconfig/ocr/overrides/base_mainnet.toml b/integration-tests/testconfig/ocr/overrides/base_mainnet.toml new file mode 100644 index 00000000000..a285c23758e --- /dev/null +++ b/integration-tests/testconfig/ocr/overrides/base_mainnet.toml @@ -0,0 +1,18 @@ +[Network] +selected_networks = ["BASE_MAINNET"] + +[Soak.Common] +chainlink_node_funding = 5 + +[Soak.OCR] +[Soak.OCR.Common] +test_duration = "24h" + +[Soak.OCR.Soak] +time_between_rounds = "10m" + +[OCR.Common] +number_of_contracts = 2 + +[Seth] +experiments_enabled = ["slow_funds_return"] diff --git a/integration-tests/testconfig/ocr/overrides/base_sepolia.toml b/integration-tests/testconfig/ocr/overrides/base_sepolia.toml new file mode 100644 index 00000000000..3dbbadcbef2 --- /dev/null +++ b/integration-tests/testconfig/ocr/overrides/base_sepolia.toml @@ -0,0 +1,15 @@ +[Network] +selected_networks = ["BASE_SEPOLIA"] + +[Soak.Common] +chainlink_node_funding = 1 + +[Soak.OCR] +[Soak.OCR.Common] +test_duration = "24h" + +[Soak.OCR.Soak] +time_between_rounds = "2m" + +[OCR.Common] +number_of_contracts = 2 diff --git a/integration-tests/testconfig/ocr/overrides/celo_alfajores.toml b/integration-tests/testconfig/ocr/overrides/celo_alfajores.toml new file mode 100644 index 00000000000..37c4a8cf16d --- /dev/null +++ b/integration-tests/testconfig/ocr/overrides/celo_alfajores.toml @@ -0,0 +1,15 @@ +[Network] +selected_networks = ["CELO_ALFAJORES"] + +[Soak.Common] +chainlink_node_funding = 10 + +[Soak.OCR] +[Soak.OCR.Common] +test_duration = "24h" + +[Soak.OCR.Soak] +time_between_rounds = "2m" + +[OCR.Common] +number_of_contracts = 2 diff --git a/integration-tests/testconfig/ocr/overrides/ethereum_sepolia.toml b/integration-tests/testconfig/ocr/overrides/ethereum_sepolia.toml new file mode 100644 index 00000000000..612e47506a7 --- /dev/null +++ b/integration-tests/testconfig/ocr/overrides/ethereum_sepolia.toml @@ -0,0 +1,15 @@ +[Network] +selected_networks = ["SEPOLIA"] + +[Soak.Common] +chainlink_node_funding = 1 + +[Soak.OCR] +[Soak.OCR.Common] +test_duration = "24h" + +[Soak.OCR.Soak] +time_between_rounds = "2m" + +[OCR.Common] +number_of_contracts = 2 diff --git a/integration-tests/testconfig/ocr/overrides/linea_sepolia.toml b/integration-tests/testconfig/ocr/overrides/linea_sepolia.toml new file mode 100644 index 00000000000..6fa6218d541 --- /dev/null +++ b/integration-tests/testconfig/ocr/overrides/linea_sepolia.toml @@ -0,0 +1,15 @@ +[Network] +selected_networks = ["Linea_Sepolia"] + +[Soak.Common] +chainlink_node_funding = 1 + +[Soak.OCR] +[Soak.OCR.Common] +test_duration = "24h" + +[Soak.OCR.Soak] +time_between_rounds = "2m" + +[OCR.Common] +number_of_contracts = 2 diff --git a/integration-tests/testconfig/ocr/overrides/optimism_mainnet.toml b/integration-tests/testconfig/ocr/overrides/optimism_mainnet.toml new file mode 100644 index 00000000000..eec0640baa3 --- /dev/null +++ b/integration-tests/testconfig/ocr/overrides/optimism_mainnet.toml @@ -0,0 +1,18 @@ +[Network] +selected_networks = ["OPTIMISM_MAINNET"] + +[Soak.Common] +chainlink_node_funding = 0.1 + +[Soak.OCR] +[Soak.OCR.Common] +test_duration = "24h" + +[Soak.OCR.Soak] +time_between_rounds = "10m" + +[OCR.Common] +number_of_contracts = 2 + +[Seth] +experiments_enabled = ["slow_funds_return"] diff --git a/integration-tests/testconfig/ocr/overrides/optimism_sepolia.toml b/integration-tests/testconfig/ocr/overrides/optimism_sepolia.toml new file mode 100644 index 00000000000..fe666b6aa9e --- /dev/null +++ b/integration-tests/testconfig/ocr/overrides/optimism_sepolia.toml @@ -0,0 +1,15 @@ +[Network] +selected_networks = ["OPTIMISM_SEPOLIA"] + +[Soak.Common] +chainlink_node_funding = 1 + +[Soak.OCR] +[Soak.OCR.Common] +test_duration = "24h" + +[Soak.OCR.Soak] +time_between_rounds = "2m" + +[OCR.Common] +number_of_contracts = 2 diff --git a/integration-tests/testconfig/ocr/overrides/scroll_sepolia.toml b/integration-tests/testconfig/ocr/overrides/scroll_sepolia.toml new file mode 100644 index 00000000000..0beedfd05ea --- /dev/null +++ b/integration-tests/testconfig/ocr/overrides/scroll_sepolia.toml @@ -0,0 +1,15 @@ +[Network] +selected_networks = ["SCROLL_SEPOLIA"] + +[Soak.Common] +chainlink_node_funding = 1 + +[Soak.OCR] +[Soak.OCR.Common] +test_duration = "24h" + +[Soak.OCR.Soak] +time_between_rounds = "2m" + +[OCR.Common] +number_of_contracts = 2 diff --git a/integration-tests/testconfig/ocr/overrides/wemix_mainnet.toml b/integration-tests/testconfig/ocr/overrides/wemix_mainnet.toml new file mode 100644 index 00000000000..4a7859db3c9 --- /dev/null +++ b/integration-tests/testconfig/ocr/overrides/wemix_mainnet.toml @@ -0,0 +1,18 @@ +[Network] +selected_networks = ["WEMIX_MAINNET"] + +[Soak.Common] +chainlink_node_funding = 5 + +[Soak.OCR] +[Soak.OCR.Common] +test_duration = "24h" + +[Soak.OCR.Soak] +time_between_rounds = "10m" + +[OCR.Common] +number_of_contracts = 2 + +[Seth] +experiments_enabled = ["slow_funds_return"] diff --git a/integration-tests/testconfig/ocr2/example.toml b/integration-tests/testconfig/ocr2/example.toml index 624c3b77752..319f64d2580 100644 --- a/integration-tests/testconfig/ocr2/example.toml +++ b/integration-tests/testconfig/ocr2/example.toml @@ -1,7 +1,6 @@ # Example of full config with all fields # General part [ChainlinkImage] -image="public.ecr.aws/chainlink/chainlink" version="2.7.0" [Logging] @@ -39,15 +38,6 @@ bearer_token_secret="my-awesome-token" [Network] selected_networks=["polygon_mumbai"] -[Network.RpcHttpUrls] -polygon_mumbai = ["https://my-rpc-endpoint.io"] - -[Network.RpcWsUrls] -polygon_mumbai = ["https://my-rpc-endpoint.io"] - -[Network.WalletKeys] -polygon_mumbai = ["change-me-to-your-PK"] - [PrivateEthereumNetwork] # pos or pow consensus_type="pos" diff --git a/integration-tests/testconfig/ocr2/overrides/base_sepolia.toml b/integration-tests/testconfig/ocr2/overrides/base_sepolia.toml new file mode 100644 index 00000000000..76e4fc4722d --- /dev/null +++ b/integration-tests/testconfig/ocr2/overrides/base_sepolia.toml @@ -0,0 +1,18 @@ +[Network] +selected_networks = ["BASE_SEPOLIA"] + +[Soak.Common] +chainlink_node_funding = 1 + +[Soak.OCR2] +[Soak.OCR2.Common] +test_duration = "24h" + +[Soak.OCR2.Soak] +time_between_rounds = "2m" + +[OCR2.Common] +number_of_contracts = 2 + +[Seth] +experiments_enabled = ["slow_funds_return"] diff --git a/integration-tests/testconfig/ocr2/overrides/ethereum_sepolia.toml b/integration-tests/testconfig/ocr2/overrides/ethereum_sepolia.toml new file mode 100644 index 00000000000..f7e02407808 --- /dev/null +++ b/integration-tests/testconfig/ocr2/overrides/ethereum_sepolia.toml @@ -0,0 +1,15 @@ +[Network] +selected_networks = ["SEPOLIA"] + +[Soak.Common] +chainlink_node_funding = 1 + +[Soak.OCR2] +[Soak.OCR2.Common] +test_duration = "24h" + +[Soak.OCR2.Soak] +time_between_rounds = "2m" + +[OCR2.Common] +number_of_contracts = 2 diff --git a/integration-tests/testconfig/ocr2/overrides/polygon_amoy.toml b/integration-tests/testconfig/ocr2/overrides/polygon_amoy.toml new file mode 100644 index 00000000000..41a31897c5f --- /dev/null +++ b/integration-tests/testconfig/ocr2/overrides/polygon_amoy.toml @@ -0,0 +1,15 @@ +[Network] +selected_networks = ["POLYGON_AMOY"] + +[Soak.Common] +chainlink_node_funding = 20 + +[Soak.OCR2] +[Soak.OCR2.Common] +test_duration = "24h" + +[Soak.OCR2.Soak] +time_between_rounds = "2m" + +[OCR2.Common] +number_of_contracts = 2 diff --git a/integration-tests/testconfig/ocr2/overrides/polygon_mainnet.toml b/integration-tests/testconfig/ocr2/overrides/polygon_mainnet.toml new file mode 100644 index 00000000000..51da2793133 --- /dev/null +++ b/integration-tests/testconfig/ocr2/overrides/polygon_mainnet.toml @@ -0,0 +1,18 @@ +[Network] +selected_networks = ["POLYGON_MAINNET"] + +[Soak.Common] +chainlink_node_funding = 5 + +[Soak.OCR2] +[Soak.OCR2.Common] +test_duration = "24h" + +[Soak.OCR2.Soak] +time_between_rounds = "10m" + +[OCR2.Common] +number_of_contracts = 2 + +[Seth] +experiments_enabled = ["slow_funds_return"] diff --git a/integration-tests/testconfig/ocr2/overrides/wemix_testnet.toml b/integration-tests/testconfig/ocr2/overrides/wemix_testnet.toml new file mode 100644 index 00000000000..82bc06c17ee --- /dev/null +++ b/integration-tests/testconfig/ocr2/overrides/wemix_testnet.toml @@ -0,0 +1,15 @@ +[Network] +selected_networks = ["WEMIX_TESTNET"] + +[Soak.Common] +chainlink_node_funding = 10 + +[Soak.OCR2] +[Soak.OCR2.Common] +test_duration = "24h" + +[Soak.OCR2.Soak] +time_between_rounds = "2m" + +[OCR2.Common] +number_of_contracts = 2 diff --git a/integration-tests/testconfig/ocr2/overrides/xlayer_sepolia.toml b/integration-tests/testconfig/ocr2/overrides/xlayer_sepolia.toml new file mode 100644 index 00000000000..d58098c5b0f --- /dev/null +++ b/integration-tests/testconfig/ocr2/overrides/xlayer_sepolia.toml @@ -0,0 +1,15 @@ +[Network] +selected_networks = ["xlayer_sepolia"] + +[Soak.Common] +chainlink_node_funding = 1 + +[Soak.OCR2] +[Soak.OCR2.Common] +test_duration = "24h" + +[Soak.OCR2.Soak] +time_between_rounds = "2m" + +[OCR2.Common] +number_of_contracts = 2 diff --git a/integration-tests/testconfig/vrfv2/example.toml b/integration-tests/testconfig/vrfv2/example.toml index 9417a422cf0..13af6dee620 100644 --- a/integration-tests/testconfig/vrfv2/example.toml +++ b/integration-tests/testconfig/vrfv2/example.toml @@ -1,7 +1,6 @@ # Example of full config with all fields # General part [ChainlinkImage] -image="public.ecr.aws/chainlink/chainlink" version="2.7.0" [Logging] @@ -16,38 +15,10 @@ log_producer_timeout="10s" # number of retries before log producer gives up and stops listening to logs log_producer_retry_limit=10 -[Logging.Loki] -tenant_id="tenant_id" -# full URL of Loki ingest endpoint -endpoint="https://loki.url/api/v3/push" -# currently only needed when using public instance -basic_auth_secret="loki-basic-auth" -# only needed for cloud grafana -bearer_token_secret="bearer_token" - -# LogStream will try to shorten Grafana URLs by default (if all 3 variables are set) -[Logging.Grafana] -# grafana url (trailing "/" will be stripped) -base_url="http://grafana.url" -# url of your grafana dashboard (prefix and suffix "/" are stirpped), example: /d/ad61652-2712-1722/my-dashboard -dashboard_url="/d/your-dashboard" -# Grafana dashboard uid to annotate. Find it in Dashboard Settings -> JSON Model -dashboard_uid="dashboard-uid-to-annotate" -bearer_token_secret="my-awesome-token" - # if you want to use polygon_mumbial [Network] selected_networks=["polygon_mumbai"] -[Network.RpcHttpUrls] -polygon_mumbai = ["https://my-rpc-endpoint.io"] - -[Network.RpcWsUrls] -polygon_mumbai = ["https://my-rpc-endpoint.io"] - -[Network.WalletKeys] -polygon_mumbai = ["change-me-to-your-PK"] - [PrivateEthereumNetwork] # pos or pow consensus_type="pos" diff --git a/integration-tests/testconfig/vrfv2plus/example.toml b/integration-tests/testconfig/vrfv2plus/example.toml index d6e7a3f28d5..160e9ba03a9 100644 --- a/integration-tests/testconfig/vrfv2plus/example.toml +++ b/integration-tests/testconfig/vrfv2plus/example.toml @@ -1,7 +1,6 @@ # Example of full config with all fields # General part [ChainlinkImage] -image="public.ecr.aws/chainlink/chainlink" version="2.7.0" [Logging] @@ -16,38 +15,10 @@ log_producer_timeout="10s" # number of retries before log producer gives up and stops listening to logs log_producer_retry_limit=10 -[Logging.Loki] -tenant_id="tenant_id" -# full URL of Loki ingest endpoint -endpoint="https://loki.url/api/v3/push" -# currently only needed when using public instance -basic_auth_secret="loki-basic-auth" -# only needed for cloud grafana -bearer_token_secret="bearer_token" - -# LogStream will try to shorten Grafana URLs by default (if all 3 variables are set) -[Logging.Grafana] -# grafana url (trailing "/" will be stripped) -base_url="http://grafana.url" -# url of your grafana dashboard (prefix and suffix "/" are stirpped), example: /d/ad61652-2712-1722/my-dashboard -dashboard_url="/d/your-dashboard" -# Grafana dashboard uid to annotate. Find it in Dashboard Settings -> JSON Model -dashboard_uid="dashboard-uid-to-annotate" -bearer_token_secret="my-awesome-token" - # if you want to use polygon_mumbial [Network] selected_networks=["polygon_mumbai"] -[Network.RpcHttpUrls] -polygon_mumbai = ["https://my-rpc-endpoint.io"] - -[Network.RpcWsUrls] -polygon_mumbai = ["https://my-rpc-endpoint.io"] - -[Network.WalletKeys] -polygon_mumbai = ["change-me-to-your-PK"] - [PrivateEthereumNetwork] # pos or pow consensus_type="pos" From 0799e6eddabd7717aad3624622dbe4054f03cacc Mon Sep 17 00:00:00 2001 From: Lukasz <120112546+lukaszcl@users.noreply.github.com> Date: Mon, 23 Sep 2024 11:42:12 +0200 Subject: [PATCH 04/14] Bump CTF (#14518) --- integration-tests/go.mod | 2 +- integration-tests/go.sum | 4 ++-- integration-tests/load/go.mod | 2 +- integration-tests/load/go.sum | 4 ++-- 4 files changed, 6 insertions(+), 6 deletions(-) diff --git a/integration-tests/go.mod b/integration-tests/go.mod index 5ea0f78f6b4..8387c80782d 100644 --- a/integration-tests/go.mod +++ b/integration-tests/go.mod @@ -42,7 +42,7 @@ require ( github.com/smartcontractkit/chainlink-ccip v0.0.0-20240920150748-cf2125c094fe github.com/smartcontractkit/chainlink-common v0.2.3-0.20240919092417-53e784c2e420 github.com/smartcontractkit/chainlink-testing-framework/havoc v1.50.0 - github.com/smartcontractkit/chainlink-testing-framework/lib v1.50.6 + github.com/smartcontractkit/chainlink-testing-framework/lib v1.50.7 github.com/smartcontractkit/chainlink-testing-framework/lib/grafana v1.50.0 github.com/smartcontractkit/chainlink-testing-framework/seth v1.50.1 github.com/smartcontractkit/chainlink-testing-framework/wasp v1.50.0 diff --git a/integration-tests/go.sum b/integration-tests/go.sum index bdc7ebb362a..effab5e3da3 100644 --- a/integration-tests/go.sum +++ b/integration-tests/go.sum @@ -1439,8 +1439,8 @@ github.com/smartcontractkit/chainlink-starknet/relayer v0.0.1-beta-test.0.202409 github.com/smartcontractkit/chainlink-starknet/relayer v0.0.1-beta-test.0.20240911194142-506bc469d8ae/go.mod h1:ec/a20UZ7YRK4oxJcnTBFzp1+DBcJcwqEaerUMsktMs= github.com/smartcontractkit/chainlink-testing-framework/havoc v1.50.0 h1:mgjBQIEy+3V3G6K8e+6by3xndgsXdYYsdy+7kzQZwSk= github.com/smartcontractkit/chainlink-testing-framework/havoc v1.50.0/go.mod h1:pdIxrooP5CFGmC0p5NTOBiZAFtMw+5pTT4de5GY3ywA= -github.com/smartcontractkit/chainlink-testing-framework/lib v1.50.6 h1:Pzr5VAMdI2CjFftodGkilMTFlIjCHJ7oqWAD7aZvFeI= -github.com/smartcontractkit/chainlink-testing-framework/lib v1.50.6/go.mod h1:7R5wGWWJi0dr5Y5cXbLQ4vSeIj0ElvhBaymcfvqqUmo= +github.com/smartcontractkit/chainlink-testing-framework/lib v1.50.7 h1:IzDNN3YvQL0yAFLj7fDJqGUDR76ewGhVJx5RiovKDI4= +github.com/smartcontractkit/chainlink-testing-framework/lib v1.50.7/go.mod h1:7R5wGWWJi0dr5Y5cXbLQ4vSeIj0ElvhBaymcfvqqUmo= github.com/smartcontractkit/chainlink-testing-framework/lib/grafana v1.50.0 h1:VIxK8u0Jd0Q/VuhmsNm6Bls6Tb31H/sA3A/rbc5hnhg= github.com/smartcontractkit/chainlink-testing-framework/lib/grafana v1.50.0/go.mod h1:lyAu+oMXdNUzEDScj2DXB2IueY+SDXPPfyl/kb63tMM= github.com/smartcontractkit/chainlink-testing-framework/seth v1.50.1 h1:2OxnPfvjC+zs0ZokSsRTRnJrEGJ4NVJwZgfroS1lPHs= diff --git a/integration-tests/load/go.mod b/integration-tests/load/go.mod index e9e5c6ee4ea..46824e50fbb 100644 --- a/integration-tests/load/go.mod +++ b/integration-tests/load/go.mod @@ -16,7 +16,7 @@ require ( github.com/rs/zerolog v1.33.0 github.com/slack-go/slack v0.12.2 github.com/smartcontractkit/chainlink-common v0.2.3-0.20240919092417-53e784c2e420 - github.com/smartcontractkit/chainlink-testing-framework/lib v1.50.6 + github.com/smartcontractkit/chainlink-testing-framework/lib v1.50.7 github.com/smartcontractkit/chainlink-testing-framework/seth v1.50.1 github.com/smartcontractkit/chainlink-testing-framework/wasp v1.50.0 github.com/smartcontractkit/chainlink/integration-tests v0.0.0-20240214231432-4ad5eb95178c diff --git a/integration-tests/load/go.sum b/integration-tests/load/go.sum index 96f1ed8aea5..eb4f92d14eb 100644 --- a/integration-tests/load/go.sum +++ b/integration-tests/load/go.sum @@ -1413,8 +1413,8 @@ github.com/smartcontractkit/chainlink-starknet/relayer v0.0.1-beta-test.0.202409 github.com/smartcontractkit/chainlink-starknet/relayer v0.0.1-beta-test.0.20240911194142-506bc469d8ae/go.mod h1:ec/a20UZ7YRK4oxJcnTBFzp1+DBcJcwqEaerUMsktMs= github.com/smartcontractkit/chainlink-testing-framework/havoc v1.50.0 h1:mgjBQIEy+3V3G6K8e+6by3xndgsXdYYsdy+7kzQZwSk= github.com/smartcontractkit/chainlink-testing-framework/havoc v1.50.0/go.mod h1:pdIxrooP5CFGmC0p5NTOBiZAFtMw+5pTT4de5GY3ywA= -github.com/smartcontractkit/chainlink-testing-framework/lib v1.50.6 h1:Pzr5VAMdI2CjFftodGkilMTFlIjCHJ7oqWAD7aZvFeI= -github.com/smartcontractkit/chainlink-testing-framework/lib v1.50.6/go.mod h1:7R5wGWWJi0dr5Y5cXbLQ4vSeIj0ElvhBaymcfvqqUmo= +github.com/smartcontractkit/chainlink-testing-framework/lib v1.50.7 h1:IzDNN3YvQL0yAFLj7fDJqGUDR76ewGhVJx5RiovKDI4= +github.com/smartcontractkit/chainlink-testing-framework/lib v1.50.7/go.mod h1:7R5wGWWJi0dr5Y5cXbLQ4vSeIj0ElvhBaymcfvqqUmo= github.com/smartcontractkit/chainlink-testing-framework/lib/grafana v1.50.0 h1:VIxK8u0Jd0Q/VuhmsNm6Bls6Tb31H/sA3A/rbc5hnhg= github.com/smartcontractkit/chainlink-testing-framework/lib/grafana v1.50.0/go.mod h1:lyAu+oMXdNUzEDScj2DXB2IueY+SDXPPfyl/kb63tMM= github.com/smartcontractkit/chainlink-testing-framework/seth v1.50.1 h1:2OxnPfvjC+zs0ZokSsRTRnJrEGJ4NVJwZgfroS1lPHs= From 5acca3719ecd7a3189db3a8a8d09418ed8423016 Mon Sep 17 00:00:00 2001 From: Joe Huang Date: Mon, 23 Sep 2024 04:58:44 -0500 Subject: [PATCH 05/14] support new heads polling over http rpc client (#14373) * add functions for newHead polling * add new flag, NewHeadsPollInterval * fix test * fix test * step for adding polling new head impelmentation and sync+outOfSync loop handling logic. To add unit test * fix lint * add unit test * update changeset * update comments * simplify step 1, need to fix test * remove tests, no longer needed * fix lint * fix mock * fix simulated client * update interface * rm more * temp update for testing log level * temp update * revert modification to polygon toml * enable heap monitoring * enable for testing * revert * Dmytro's comments * make func private * fix test * add polling support for SubscribeNewHeaddocke * update log level --- .changeset/happy-feet-rhyme.md | 11 +++ common/client/node.go | 1 + common/client/node_test.go | 5 ++ core/chains/evm/client/config_builder.go | 3 +- core/chains/evm/client/config_builder_test.go | 5 +- core/chains/evm/client/evm_client.go | 4 +- core/chains/evm/client/evm_client_test.go | 4 +- core/chains/evm/client/helpers_test.go | 9 ++- core/chains/evm/client/rpc_client.go | 32 +++++++++ core/chains/evm/client/rpc_client_test.go | 22 +++--- .../evm/config/chain_scoped_node_pool.go | 4 ++ core/chains/evm/config/config.go | 1 + core/chains/evm/config/toml/config.go | 6 ++ .../evm/config/toml/defaults/fallback.toml | 1 + core/config/docs/chains-evm.toml | 4 ++ core/services/chainlink/config_test.go | 2 + .../chainlink/testdata/config-full.toml | 1 + .../config-multi-chain-effective.toml | 3 + core/web/resolver/testdata/config-full.toml | 1 + .../config-multi-chain-effective.toml | 3 + docs/CONFIG.md | 72 +++++++++++++++++++ .../node/validate/defaults-override.txtar | 1 + .../disk-based-logging-disabled.txtar | 1 + .../validate/disk-based-logging-no-dir.txtar | 1 + .../node/validate/disk-based-logging.txtar | 1 + testdata/scripts/node/validate/invalid.txtar | 1 + testdata/scripts/node/validate/valid.txtar | 1 + 27 files changed, 182 insertions(+), 18 deletions(-) create mode 100644 .changeset/happy-feet-rhyme.md diff --git a/.changeset/happy-feet-rhyme.md b/.changeset/happy-feet-rhyme.md new file mode 100644 index 00000000000..6e1697d96ae --- /dev/null +++ b/.changeset/happy-feet-rhyme.md @@ -0,0 +1,11 @@ +--- +"chainlink": minor +--- + +This PR introduce few changes: +- Add a new config option `EVM.NodePool.NewHeadsPollInterval` (0 by default indicate disabled), which is an interval for polling new block periodically using http client rather than subscribe to ws feed. +- Updated new head handler for polling new head over http, and register the subscription in node lifecycle logic. +- If the polling new heads is enabled, WS new heads subscription will be replaced with the new http based polling. + +Note: There will be another PR for making WS URL optional with some extra condition. +#added diff --git a/common/client/node.go b/common/client/node.go index d6543c772a8..1f55e69cacc 100644 --- a/common/client/node.go +++ b/common/client/node.go @@ -45,6 +45,7 @@ type NodeConfig interface { FinalizedBlockPollInterval() time.Duration EnforceRepeatableRead() bool DeathDeclarationDelay() time.Duration + NewHeadsPollInterval() time.Duration } type ChainConfig interface { diff --git a/common/client/node_test.go b/common/client/node_test.go index 3b971e84902..66bb50fc94f 100644 --- a/common/client/node_test.go +++ b/common/client/node_test.go @@ -20,6 +20,11 @@ type testNodeConfig struct { enforceRepeatableRead bool finalizedBlockPollInterval time.Duration deathDeclarationDelay time.Duration + newHeadsPollInterval time.Duration +} + +func (n testNodeConfig) NewHeadsPollInterval() time.Duration { + return n.newHeadsPollInterval } func (n testNodeConfig) PollFailureThreshold() uint32 { diff --git a/core/chains/evm/client/config_builder.go b/core/chains/evm/client/config_builder.go index e713ec9be24..9a31f9e4b40 100644 --- a/core/chains/evm/client/config_builder.go +++ b/core/chains/evm/client/config_builder.go @@ -43,7 +43,7 @@ func NewClientConfigs( deathDeclarationDelay time.Duration, noNewFinalizedHeadsThreshold time.Duration, finalizedBlockPollInterval time.Duration, - + newHeadsPollInterval time.Duration, ) (commonclient.ChainConfig, evmconfig.NodePool, []*toml.Node, error) { nodes, err := parseNodeConfigs(nodeCfgs) if err != nil { @@ -59,6 +59,7 @@ func NewClientConfigs( EnforceRepeatableRead: enforceRepeatableRead, DeathDeclarationDelay: commonconfig.MustNewDuration(deathDeclarationDelay), FinalizedBlockPollInterval: commonconfig.MustNewDuration(finalizedBlockPollInterval), + NewHeadsPollInterval: commonconfig.MustNewDuration(newHeadsPollInterval), } nodePoolCfg := &evmconfig.NodePoolConfig{C: nodePool} chainConfig := &evmconfig.EVMConfig{ diff --git a/core/chains/evm/client/config_builder_test.go b/core/chains/evm/client/config_builder_test.go index 403c6c2d619..28620ac6ca9 100644 --- a/core/chains/evm/client/config_builder_test.go +++ b/core/chains/evm/client/config_builder_test.go @@ -37,9 +37,11 @@ func TestClientConfigBuilder(t *testing.T) { finalityDepth := ptr(uint32(10)) finalityTagEnabled := ptr(true) noNewHeadsThreshold := time.Second + newHeadsPollInterval := 0 * time.Second chainCfg, nodePool, nodes, err := client.NewClientConfigs(selectionMode, leaseDuration, chainTypeStr, nodeConfigs, pollFailureThreshold, pollInterval, syncThreshold, nodeIsSyncingEnabled, noNewHeadsThreshold, finalityDepth, - finalityTagEnabled, finalizedBlockOffset, enforceRepeatableRead, deathDeclarationDelay, noNewFinalizedBlocksThreshold, pollInterval) + finalityTagEnabled, finalizedBlockOffset, enforceRepeatableRead, deathDeclarationDelay, noNewFinalizedBlocksThreshold, + pollInterval, newHeadsPollInterval) require.NoError(t, err) // Validate node pool configs @@ -52,6 +54,7 @@ func TestClientConfigBuilder(t *testing.T) { require.Equal(t, *enforceRepeatableRead, nodePool.EnforceRepeatableRead()) require.Equal(t, deathDeclarationDelay, nodePool.DeathDeclarationDelay()) require.Equal(t, pollInterval, nodePool.FinalizedBlockPollInterval()) + require.Equal(t, newHeadsPollInterval, nodePool.NewHeadsPollInterval()) // Validate node configs require.Equal(t, *nodeConfigs[0].Name, *nodes[0].Name) diff --git a/core/chains/evm/client/evm_client.go b/core/chains/evm/client/evm_client.go index 1fd533d6aab..c26362d6351 100644 --- a/core/chains/evm/client/evm_client.go +++ b/core/chains/evm/client/evm_client.go @@ -22,13 +22,13 @@ func NewEvmClient(cfg evmconfig.NodePool, chainCfg commonclient.ChainConfig, cli for i, node := range nodes { if node.SendOnly != nil && *node.SendOnly { rpc := NewRPCClient(lggr, empty, (*url.URL)(node.HTTPURL), *node.Name, int32(i), chainID, - commonclient.Secondary, cfg.FinalizedBlockPollInterval(), largePayloadRPCTimeout, defaultRPCTimeout, chainType) + commonclient.Secondary, cfg.FinalizedBlockPollInterval(), cfg.NewHeadsPollInterval(), largePayloadRPCTimeout, defaultRPCTimeout, chainType) sendonly := commonclient.NewSendOnlyNode(lggr, (url.URL)(*node.HTTPURL), *node.Name, chainID, rpc) sendonlys = append(sendonlys, sendonly) } else { rpc := NewRPCClient(lggr, (url.URL)(*node.WSURL), (*url.URL)(node.HTTPURL), *node.Name, int32(i), - chainID, commonclient.Primary, cfg.FinalizedBlockPollInterval(), largePayloadRPCTimeout, defaultRPCTimeout, chainType) + chainID, commonclient.Primary, cfg.FinalizedBlockPollInterval(), cfg.NewHeadsPollInterval(), largePayloadRPCTimeout, defaultRPCTimeout, chainType) primaryNode := commonclient.NewNode(cfg, chainCfg, lggr, (url.URL)(*node.WSURL), (*url.URL)(node.HTTPURL), *node.Name, int32(i), chainID, *node.Order, rpc, "EVM") diff --git a/core/chains/evm/client/evm_client_test.go b/core/chains/evm/client/evm_client_test.go index bdfcf426744..b762c14653c 100644 --- a/core/chains/evm/client/evm_client_test.go +++ b/core/chains/evm/client/evm_client_test.go @@ -29,6 +29,7 @@ func TestNewEvmClient(t *testing.T) { deathDeclarationDelay := time.Second * 3 noNewFinalizedBlocksThreshold := time.Second * 5 finalizedBlockPollInterval := time.Second * 4 + newHeadsPollInterval := time.Second * 4 nodeConfigs := []client.NodeConfig{ { Name: ptr("foo"), @@ -40,7 +41,8 @@ func TestNewEvmClient(t *testing.T) { finalityTagEnabled := ptr(true) chainCfg, nodePool, nodes, err := client.NewClientConfigs(selectionMode, leaseDuration, chainTypeStr, nodeConfigs, pollFailureThreshold, pollInterval, syncThreshold, nodeIsSyncingEnabled, noNewHeadsThreshold, finalityDepth, - finalityTagEnabled, finalizedBlockOffset, enforceRepeatableRead, deathDeclarationDelay, noNewFinalizedBlocksThreshold, finalizedBlockPollInterval) + finalityTagEnabled, finalizedBlockOffset, enforceRepeatableRead, deathDeclarationDelay, noNewFinalizedBlocksThreshold, + finalizedBlockPollInterval, newHeadsPollInterval) require.NoError(t, err) client := client.NewEvmClient(nodePool, chainCfg, nil, logger.Test(t), testutils.FixtureChainID, nodes, chaintype.ChainType(chainTypeStr)) diff --git a/core/chains/evm/client/helpers_test.go b/core/chains/evm/client/helpers_test.go index 67977b180ed..031f6481574 100644 --- a/core/chains/evm/client/helpers_test.go +++ b/core/chains/evm/client/helpers_test.go @@ -92,6 +92,7 @@ type TestNodePoolConfig struct { NodeErrors config.ClientErrors EnforceRepeatableReadVal bool NodeDeathDeclarationDelay time.Duration + NodeNewHeadsPollInterval time.Duration } func (tc TestNodePoolConfig) PollFailureThreshold() uint32 { return tc.NodePollFailureThreshold } @@ -110,6 +111,10 @@ func (tc TestNodePoolConfig) FinalizedBlockPollInterval() time.Duration { return tc.NodeFinalizedBlockPollInterval } +func (tc TestNodePoolConfig) NewHeadsPollInterval() time.Duration { + return tc.NodeNewHeadsPollInterval +} + func (tc TestNodePoolConfig) Errors() config.ClientErrors { return tc.NodeErrors } @@ -143,7 +148,7 @@ func NewChainClientWithTestNode( } lggr := logger.Test(t) - rpc := NewRPCClient(lggr, *parsed, rpcHTTPURL, "eth-primary-rpc-0", id, chainID, commonclient.Primary, 0, commonclient.QueryTimeout, commonclient.QueryTimeout, "") + rpc := NewRPCClient(lggr, *parsed, rpcHTTPURL, "eth-primary-rpc-0", id, chainID, commonclient.Primary, 0, 0, commonclient.QueryTimeout, commonclient.QueryTimeout, "") n := commonclient.NewNode[*big.Int, *evmtypes.Head, RPCClient]( nodeCfg, clientMocks.ChainConfig{NoNewHeadsThresholdVal: noNewHeadsThreshold}, lggr, *parsed, rpcHTTPURL, "eth-primary-node-0", id, chainID, 1, rpc, "EVM") @@ -155,7 +160,7 @@ func NewChainClientWithTestNode( return nil, pkgerrors.Errorf("sendonly ethereum rpc url scheme must be http(s): %s", u.String()) } var empty url.URL - rpc := NewRPCClient(lggr, empty, &sendonlyRPCURLs[i], fmt.Sprintf("eth-sendonly-rpc-%d", i), id, chainID, commonclient.Secondary, 0, commonclient.QueryTimeout, commonclient.QueryTimeout, "") + rpc := NewRPCClient(lggr, empty, &sendonlyRPCURLs[i], fmt.Sprintf("eth-sendonly-rpc-%d", i), id, chainID, commonclient.Secondary, 0, 0, commonclient.QueryTimeout, commonclient.QueryTimeout, "") s := commonclient.NewSendOnlyNode[*big.Int, RPCClient]( lggr, u, fmt.Sprintf("eth-sendonly-%d", i), chainID, rpc) sendonlys = append(sendonlys, s) diff --git a/core/chains/evm/client/rpc_client.go b/core/chains/evm/client/rpc_client.go index 0752def9949..763348173aa 100644 --- a/core/chains/evm/client/rpc_client.go +++ b/core/chains/evm/client/rpc_client.go @@ -123,6 +123,7 @@ type rpcClient struct { largePayloadRpcTimeout time.Duration rpcTimeout time.Duration finalizedBlockPollInterval time.Duration + newHeadsPollInterval time.Duration chainType chaintype.ChainType ws rawclient @@ -159,6 +160,7 @@ func NewRPCClient( chainID *big.Int, tier commonclient.NodeTier, finalizedBlockPollInterval time.Duration, + newHeadsPollInterval time.Duration, largePayloadRpcTimeout time.Duration, rpcTimeout time.Duration, chainType chaintype.ChainType, @@ -174,6 +176,7 @@ func NewRPCClient( r.tier = tier r.ws.uri = wsuri r.finalizedBlockPollInterval = finalizedBlockPollInterval + r.newHeadsPollInterval = newHeadsPollInterval if httpuri != nil { r.http = &rawclient{uri: *httpuri} } @@ -490,6 +493,18 @@ func (r *rpcClient) SubscribeNewHead(ctx context.Context, channel chan<- *evmtyp args := []interface{}{"newHeads"} lggr := r.newRqLggr().With("args", args) + if r.newHeadsPollInterval > 0 { + interval := r.newHeadsPollInterval + timeout := interval + poller, _ := commonclient.NewPoller[*evmtypes.Head](interval, r.latestBlock, timeout, r.rpcLog) + if err = poller.Start(ctx); err != nil { + return nil, err + } + + lggr.Debugf("Polling new heads over http") + return &poller, nil + } + lggr.Debug("RPC call: evmclient.Client#EthSubscribe") start := time.Now() defer func() { @@ -523,6 +538,19 @@ func (r *rpcClient) SubscribeToHeads(ctx context.Context) (ch <-chan *evmtypes.H start := time.Now() lggr := r.newRqLggr().With("args", args) + // if new head based on http polling is enabled, we will replace it for WS newHead subscription + if r.newHeadsPollInterval > 0 { + interval := r.newHeadsPollInterval + timeout := interval + poller, channel := commonclient.NewPoller[*evmtypes.Head](interval, r.latestBlock, timeout, r.rpcLog) + if err = poller.Start(ctx); err != nil { + return nil, nil, err + } + + lggr.Debugf("Polling new heads over http") + return channel, &poller, nil + } + lggr.Debug("RPC call: evmclient.Client#EthSubscribe") defer func() { duration := time.Since(start) @@ -695,6 +723,10 @@ func (r *rpcClient) LatestFinalizedBlock(ctx context.Context) (head *evmtypes.He return } +func (r *rpcClient) latestBlock(ctx context.Context) (head *evmtypes.Head, err error) { + return r.BlockByNumber(ctx, nil) +} + func (r *rpcClient) astarLatestFinalizedBlock(ctx context.Context, result interface{}) (err error) { var hashResult string err = r.CallContext(ctx, &hashResult, "chain_getFinalizedHead") diff --git a/core/chains/evm/client/rpc_client_test.go b/core/chains/evm/client/rpc_client_test.go index 07e097727a3..b594a0ca166 100644 --- a/core/chains/evm/client/rpc_client_test.go +++ b/core/chains/evm/client/rpc_client_test.go @@ -61,7 +61,7 @@ func TestRPCClient_SubscribeNewHead(t *testing.T) { server := testutils.NewWSServer(t, chainId, serverCallBack) wsURL := server.WSURL() - rpc := client.NewRPCClient(lggr, *wsURL, nil, "rpc", 1, chainId, commonclient.Primary, 0, commonclient.QueryTimeout, commonclient.QueryTimeout, "") + rpc := client.NewRPCClient(lggr, *wsURL, nil, "rpc", 1, chainId, commonclient.Primary, 0, 0, commonclient.QueryTimeout, commonclient.QueryTimeout, "") defer rpc.Close() require.NoError(t, rpc.Dial(ctx)) // set to default values @@ -111,7 +111,7 @@ func TestRPCClient_SubscribeNewHead(t *testing.T) { server := testutils.NewWSServer(t, chainId, serverCallBack) wsURL := server.WSURL() - rpc := client.NewRPCClient(lggr, *wsURL, nil, "rpc", 1, chainId, commonclient.Primary, 0, commonclient.QueryTimeout, commonclient.QueryTimeout, "") + rpc := client.NewRPCClient(lggr, *wsURL, nil, "rpc", 1, chainId, commonclient.Primary, 0, 0, commonclient.QueryTimeout, commonclient.QueryTimeout, "") defer rpc.Close() require.NoError(t, rpc.Dial(ctx)) ch := make(chan *evmtypes.Head) @@ -136,7 +136,7 @@ func TestRPCClient_SubscribeNewHead(t *testing.T) { server := testutils.NewWSServer(t, chainId, serverCallBack) wsURL := server.WSURL() - rpc := client.NewRPCClient(lggr, *wsURL, nil, "rpc", 1, chainId, commonclient.Primary, 0, commonclient.QueryTimeout, commonclient.QueryTimeout, "") + rpc := client.NewRPCClient(lggr, *wsURL, nil, "rpc", 1, chainId, commonclient.Primary, 0, 0, commonclient.QueryTimeout, commonclient.QueryTimeout, "") defer rpc.Close() require.NoError(t, rpc.Dial(ctx)) var wg sync.WaitGroup @@ -160,7 +160,7 @@ func TestRPCClient_SubscribeNewHead(t *testing.T) { t.Run("Block's chain ID matched configured", func(t *testing.T) { server := testutils.NewWSServer(t, chainId, serverCallBack) wsURL := server.WSURL() - rpc := client.NewRPCClient(lggr, *wsURL, nil, "rpc", 1, chainId, commonclient.Primary, 0, commonclient.QueryTimeout, commonclient.QueryTimeout, "") + rpc := client.NewRPCClient(lggr, *wsURL, nil, "rpc", 1, chainId, commonclient.Primary, 0, 0, commonclient.QueryTimeout, commonclient.QueryTimeout, "") defer rpc.Close() require.NoError(t, rpc.Dial(ctx)) ch := make(chan *evmtypes.Head) @@ -177,7 +177,7 @@ func TestRPCClient_SubscribeNewHead(t *testing.T) { }) wsURL := server.WSURL() observedLggr, observed := logger.TestObserved(t, zap.DebugLevel) - rpc := client.NewRPCClient(observedLggr, *wsURL, nil, "rpc", 1, chainId, commonclient.Primary, 0, commonclient.QueryTimeout, commonclient.QueryTimeout, "") + rpc := client.NewRPCClient(observedLggr, *wsURL, nil, "rpc", 1, chainId, commonclient.Primary, 0, 0, commonclient.QueryTimeout, commonclient.QueryTimeout, "") require.NoError(t, rpc.Dial(ctx)) server.Close() _, err := rpc.SubscribeNewHead(ctx, make(chan *evmtypes.Head)) @@ -187,7 +187,7 @@ func TestRPCClient_SubscribeNewHead(t *testing.T) { t.Run("Subscription error is properly wrapper", func(t *testing.T) { server := testutils.NewWSServer(t, chainId, serverCallBack) wsURL := server.WSURL() - rpc := client.NewRPCClient(lggr, *wsURL, nil, "rpc", 1, chainId, commonclient.Primary, 0, commonclient.QueryTimeout, commonclient.QueryTimeout, "") + rpc := client.NewRPCClient(lggr, *wsURL, nil, "rpc", 1, chainId, commonclient.Primary, 0, 0, commonclient.QueryTimeout, commonclient.QueryTimeout, "") defer rpc.Close() require.NoError(t, rpc.Dial(ctx)) sub, err := rpc.SubscribeNewHead(ctx, make(chan *evmtypes.Head)) @@ -215,7 +215,7 @@ func TestRPCClient_SubscribeFilterLogs(t *testing.T) { }) wsURL := server.WSURL() observedLggr, observed := logger.TestObserved(t, zap.DebugLevel) - rpc := client.NewRPCClient(observedLggr, *wsURL, nil, "rpc", 1, chainId, commonclient.Primary, 0, commonclient.QueryTimeout, commonclient.QueryTimeout, "") + rpc := client.NewRPCClient(observedLggr, *wsURL, nil, "rpc", 1, chainId, commonclient.Primary, 0, 0, commonclient.QueryTimeout, commonclient.QueryTimeout, "") require.NoError(t, rpc.Dial(ctx)) server.Close() _, err := rpc.SubscribeFilterLogs(ctx, ethereum.FilterQuery{}, make(chan types.Log)) @@ -232,7 +232,7 @@ func TestRPCClient_SubscribeFilterLogs(t *testing.T) { return resp }) wsURL := server.WSURL() - rpc := client.NewRPCClient(lggr, *wsURL, nil, "rpc", 1, chainId, commonclient.Primary, 0, commonclient.QueryTimeout, commonclient.QueryTimeout, "") + rpc := client.NewRPCClient(lggr, *wsURL, nil, "rpc", 1, chainId, commonclient.Primary, 0, 0, commonclient.QueryTimeout, commonclient.QueryTimeout, "") defer rpc.Close() require.NoError(t, rpc.Dial(ctx)) sub, err := rpc.SubscribeFilterLogs(ctx, ethereum.FilterQuery{}, make(chan types.Log)) @@ -281,7 +281,7 @@ func TestRPCClient_LatestFinalizedBlock(t *testing.T) { } server := createRPCServer() - rpc := client.NewRPCClient(lggr, *server.URL, nil, "rpc", 1, chainId, commonclient.Primary, 0, commonclient.QueryTimeout, commonclient.QueryTimeout, "") + rpc := client.NewRPCClient(lggr, *server.URL, nil, "rpc", 1, chainId, commonclient.Primary, 0, 0, commonclient.QueryTimeout, commonclient.QueryTimeout, "") require.NoError(t, rpc.Dial(ctx)) defer rpc.Close() server.Head = &evmtypes.Head{Number: 128} @@ -391,7 +391,7 @@ func TestRpcClientLargePayloadTimeout(t *testing.T) { // use something unreasonably large for RPC timeout to ensure that we use largePayloadRPCTimeout const rpcTimeout = time.Hour const largePayloadRPCTimeout = tests.TestInterval - rpc := client.NewRPCClient(logger.Test(t), *rpcURL, nil, "rpc", 1, chainId, commonclient.Primary, 0, largePayloadRPCTimeout, rpcTimeout, "") + rpc := client.NewRPCClient(logger.Test(t), *rpcURL, nil, "rpc", 1, chainId, commonclient.Primary, 0, 0, largePayloadRPCTimeout, rpcTimeout, "") require.NoError(t, rpc.Dial(ctx)) defer rpc.Close() err := testCase.Fn(ctx, rpc) @@ -431,7 +431,7 @@ func TestAstarCustomFinality(t *testing.T) { const expectedFinalizedBlockNumber = int64(4) const expectedFinalizedBlockHash = "0x7441e97acf83f555e0deefef86db636bc8a37eb84747603412884e4df4d22804" - rpcClient := client.NewRPCClient(logger.Test(t), *wsURL, nil, "rpc", 1, chainId, commonclient.Primary, 0, commonclient.QueryTimeout, commonclient.QueryTimeout, chaintype.ChainAstar) + rpcClient := client.NewRPCClient(logger.Test(t), *wsURL, nil, "rpc", 1, chainId, commonclient.Primary, 0, 0, commonclient.QueryTimeout, commonclient.QueryTimeout, chaintype.ChainAstar) defer rpcClient.Close() err := rpcClient.Dial(tests.Context(t)) require.NoError(t, err) diff --git a/core/chains/evm/config/chain_scoped_node_pool.go b/core/chains/evm/config/chain_scoped_node_pool.go index a4974366486..4b1d02d148e 100644 --- a/core/chains/evm/config/chain_scoped_node_pool.go +++ b/core/chains/evm/config/chain_scoped_node_pool.go @@ -38,6 +38,10 @@ func (n *NodePoolConfig) FinalizedBlockPollInterval() time.Duration { return n.C.FinalizedBlockPollInterval.Duration() } +func (n *NodePoolConfig) NewHeadsPollInterval() time.Duration { + return n.C.NewHeadsPollInterval.Duration() +} + func (n *NodePoolConfig) Errors() ClientErrors { return &clientErrorsConfig{c: n.C.Errors} } func (n *NodePoolConfig) EnforceRepeatableRead() bool { diff --git a/core/chains/evm/config/config.go b/core/chains/evm/config/config.go index 943616d9630..e5b806aa58c 100644 --- a/core/chains/evm/config/config.go +++ b/core/chains/evm/config/config.go @@ -182,6 +182,7 @@ type NodePool interface { Errors() ClientErrors EnforceRepeatableRead() bool DeathDeclarationDelay() time.Duration + NewHeadsPollInterval() time.Duration } // TODO BCF-2509 does the chainscopedconfig really need the entire app config? diff --git a/core/chains/evm/config/toml/config.go b/core/chains/evm/config/toml/config.go index 9fb418cc8df..fd4039f5ea1 100644 --- a/core/chains/evm/config/toml/config.go +++ b/core/chains/evm/config/toml/config.go @@ -885,6 +885,7 @@ type NodePool struct { Errors ClientErrors `toml:",omitempty"` EnforceRepeatableRead *bool DeathDeclarationDelay *commonconfig.Duration + NewHeadsPollInterval *commonconfig.Duration } func (p *NodePool) setFrom(f *NodePool) { @@ -917,6 +918,11 @@ func (p *NodePool) setFrom(f *NodePool) { if v := f.DeathDeclarationDelay; v != nil { p.DeathDeclarationDelay = v } + + if v := f.NewHeadsPollInterval; v != nil { + p.NewHeadsPollInterval = v + } + p.Errors.setFrom(&f.Errors) } diff --git a/core/chains/evm/config/toml/defaults/fallback.toml b/core/chains/evm/config/toml/defaults/fallback.toml index 2f9af4f85a8..6f43f956faf 100644 --- a/core/chains/evm/config/toml/defaults/fallback.toml +++ b/core/chains/evm/config/toml/defaults/fallback.toml @@ -77,6 +77,7 @@ NodeIsSyncingEnabled = false FinalizedBlockPollInterval = '5s' EnforceRepeatableRead = false DeathDeclarationDelay = '10s' +NewHeadsPollInterval = '0s' [OCR] ContractConfirmations = 4 diff --git a/core/config/docs/chains-evm.toml b/core/config/docs/chains-evm.toml index ce85f242f5d..bab4385f9fb 100644 --- a/core/config/docs/chains-evm.toml +++ b/core/config/docs/chains-evm.toml @@ -409,6 +409,10 @@ EnforceRepeatableRead = false # Default # trigger declaration of `FinalizedBlockOutOfSync` due to insignificant network delays in broadcasting of the finalized state among RPCs. # RPC will not be picked to handle a request even if this option is set to a nonzero value. DeathDeclarationDelay = '10s' # Default +# NewHeadsPollInterval define an interval for polling new block periodically using http client rather than subscribe to ws feed +# +# Set to 0 to disable. +NewHeadsPollInterval = '0s' # Default # **ADVANCED** # Errors enable the node to provide custom regex patterns to match against error messages from RPCs. [EVM.NodePool.Errors] diff --git a/core/services/chainlink/config_test.go b/core/services/chainlink/config_test.go index 44dc9f98ab0..ed69475a54e 100644 --- a/core/services/chainlink/config_test.go +++ b/core/services/chainlink/config_test.go @@ -634,6 +634,7 @@ func TestConfig_Marshal(t *testing.T) { FinalizedBlockPollInterval: &second, EnforceRepeatableRead: ptr(true), DeathDeclarationDelay: &minute, + NewHeadsPollInterval: &zeroSeconds, Errors: evmcfg.ClientErrors{ NonceTooLow: ptr[string]("(: |^)nonce too low"), NonceTooHigh: ptr[string]("(: |^)nonce too high"), @@ -1117,6 +1118,7 @@ NodeIsSyncingEnabled = true FinalizedBlockPollInterval = '1s' EnforceRepeatableRead = true DeathDeclarationDelay = '1m0s' +NewHeadsPollInterval = '0s' [EVM.NodePool.Errors] NonceTooLow = '(: |^)nonce too low' diff --git a/core/services/chainlink/testdata/config-full.toml b/core/services/chainlink/testdata/config-full.toml index eb69cee8b72..36002f46c5f 100644 --- a/core/services/chainlink/testdata/config-full.toml +++ b/core/services/chainlink/testdata/config-full.toml @@ -403,6 +403,7 @@ NodeIsSyncingEnabled = true FinalizedBlockPollInterval = '1s' EnforceRepeatableRead = true DeathDeclarationDelay = '1m0s' +NewHeadsPollInterval = '0s' [EVM.NodePool.Errors] NonceTooLow = '(: |^)nonce too low' diff --git a/core/services/chainlink/testdata/config-multi-chain-effective.toml b/core/services/chainlink/testdata/config-multi-chain-effective.toml index 776481ed6a9..5d16b9b87cb 100644 --- a/core/services/chainlink/testdata/config-multi-chain-effective.toml +++ b/core/services/chainlink/testdata/config-multi-chain-effective.toml @@ -370,6 +370,7 @@ NodeIsSyncingEnabled = false FinalizedBlockPollInterval = '5s' EnforceRepeatableRead = false DeathDeclarationDelay = '10s' +NewHeadsPollInterval = '0s' [EVM.OCR] ContractConfirmations = 4 @@ -478,6 +479,7 @@ NodeIsSyncingEnabled = false FinalizedBlockPollInterval = '5s' EnforceRepeatableRead = false DeathDeclarationDelay = '10s' +NewHeadsPollInterval = '0s' [EVM.OCR] ContractConfirmations = 4 @@ -580,6 +582,7 @@ NodeIsSyncingEnabled = false FinalizedBlockPollInterval = '5s' EnforceRepeatableRead = false DeathDeclarationDelay = '10s' +NewHeadsPollInterval = '0s' [EVM.OCR] ContractConfirmations = 4 diff --git a/core/web/resolver/testdata/config-full.toml b/core/web/resolver/testdata/config-full.toml index ee413a610e4..971036991f2 100644 --- a/core/web/resolver/testdata/config-full.toml +++ b/core/web/resolver/testdata/config-full.toml @@ -402,6 +402,7 @@ NodeIsSyncingEnabled = false FinalizedBlockPollInterval = '5s' EnforceRepeatableRead = false DeathDeclarationDelay = '10s' +NewHeadsPollInterval = '0s' [EVM.NodePool.Errors] NonceTooLow = '(: |^)nonce too low' diff --git a/core/web/resolver/testdata/config-multi-chain-effective.toml b/core/web/resolver/testdata/config-multi-chain-effective.toml index 210894d6162..480fc1b4c09 100644 --- a/core/web/resolver/testdata/config-multi-chain-effective.toml +++ b/core/web/resolver/testdata/config-multi-chain-effective.toml @@ -370,6 +370,7 @@ NodeIsSyncingEnabled = false FinalizedBlockPollInterval = '5s' EnforceRepeatableRead = false DeathDeclarationDelay = '10s' +NewHeadsPollInterval = '0s' [EVM.OCR] ContractConfirmations = 4 @@ -478,6 +479,7 @@ NodeIsSyncingEnabled = false FinalizedBlockPollInterval = '5s' EnforceRepeatableRead = false DeathDeclarationDelay = '10s' +NewHeadsPollInterval = '0s' [EVM.OCR] ContractConfirmations = 4 @@ -580,6 +582,7 @@ NodeIsSyncingEnabled = false FinalizedBlockPollInterval = '5s' EnforceRepeatableRead = false DeathDeclarationDelay = '10s' +NewHeadsPollInterval = '0s' [EVM.OCR] ContractConfirmations = 4 diff --git a/docs/CONFIG.md b/docs/CONFIG.md index c3eeaac2827..1512b33cad4 100644 --- a/docs/CONFIG.md +++ b/docs/CONFIG.md @@ -2042,6 +2042,7 @@ NodeIsSyncingEnabled = false FinalizedBlockPollInterval = '5s' EnforceRepeatableRead = false DeathDeclarationDelay = '10s' +NewHeadsPollInterval = '0s' [OCR] ContractConfirmations = 4 @@ -2144,6 +2145,7 @@ NodeIsSyncingEnabled = false FinalizedBlockPollInterval = '5s' EnforceRepeatableRead = false DeathDeclarationDelay = '10s' +NewHeadsPollInterval = '0s' [OCR] ContractConfirmations = 4 @@ -2246,6 +2248,7 @@ NodeIsSyncingEnabled = false FinalizedBlockPollInterval = '5s' EnforceRepeatableRead = false DeathDeclarationDelay = '10s' +NewHeadsPollInterval = '0s' [OCR] ContractConfirmations = 4 @@ -2348,6 +2351,7 @@ NodeIsSyncingEnabled = false FinalizedBlockPollInterval = '5s' EnforceRepeatableRead = false DeathDeclarationDelay = '10s' +NewHeadsPollInterval = '0s' [OCR] ContractConfirmations = 4 @@ -2451,6 +2455,7 @@ NodeIsSyncingEnabled = false FinalizedBlockPollInterval = '5s' EnforceRepeatableRead = false DeathDeclarationDelay = '10s' +NewHeadsPollInterval = '0s' [OCR] ContractConfirmations = 1 @@ -2553,6 +2558,7 @@ NodeIsSyncingEnabled = false FinalizedBlockPollInterval = '5s' EnforceRepeatableRead = false DeathDeclarationDelay = '10s' +NewHeadsPollInterval = '0s' [OCR] ContractConfirmations = 4 @@ -2655,6 +2661,7 @@ NodeIsSyncingEnabled = false FinalizedBlockPollInterval = '5s' EnforceRepeatableRead = false DeathDeclarationDelay = '10s' +NewHeadsPollInterval = '0s' [OCR] ContractConfirmations = 4 @@ -2758,6 +2765,7 @@ NodeIsSyncingEnabled = false FinalizedBlockPollInterval = '5s' EnforceRepeatableRead = false DeathDeclarationDelay = '10s' +NewHeadsPollInterval = '0s' [OCR] ContractConfirmations = 4 @@ -2860,6 +2868,7 @@ NodeIsSyncingEnabled = false FinalizedBlockPollInterval = '5s' EnforceRepeatableRead = false DeathDeclarationDelay = '10s' +NewHeadsPollInterval = '0s' [OCR] ContractConfirmations = 4 @@ -2961,6 +2970,7 @@ NodeIsSyncingEnabled = false FinalizedBlockPollInterval = '5s' EnforceRepeatableRead = false DeathDeclarationDelay = '10s' +NewHeadsPollInterval = '0s' [OCR] ContractConfirmations = 4 @@ -3062,6 +3072,7 @@ NodeIsSyncingEnabled = false FinalizedBlockPollInterval = '5s' EnforceRepeatableRead = false DeathDeclarationDelay = '10s' +NewHeadsPollInterval = '0s' [OCR] ContractConfirmations = 4 @@ -3164,6 +3175,7 @@ NodeIsSyncingEnabled = false FinalizedBlockPollInterval = '5s' EnforceRepeatableRead = false DeathDeclarationDelay = '10s' +NewHeadsPollInterval = '0s' [OCR] ContractConfirmations = 4 @@ -3267,6 +3279,7 @@ NodeIsSyncingEnabled = false FinalizedBlockPollInterval = '5s' EnforceRepeatableRead = false DeathDeclarationDelay = '10s' +NewHeadsPollInterval = '0s' [OCR] ContractConfirmations = 4 @@ -3369,6 +3382,7 @@ NodeIsSyncingEnabled = false FinalizedBlockPollInterval = '5s' EnforceRepeatableRead = false DeathDeclarationDelay = '10s' +NewHeadsPollInterval = '0s' [OCR] ContractConfirmations = 4 @@ -3471,6 +3485,7 @@ NodeIsSyncingEnabled = false FinalizedBlockPollInterval = '5s' EnforceRepeatableRead = false DeathDeclarationDelay = '10s' +NewHeadsPollInterval = '0s' [OCR] ContractConfirmations = 4 @@ -3573,6 +3588,7 @@ NodeIsSyncingEnabled = false FinalizedBlockPollInterval = '5s' EnforceRepeatableRead = false DeathDeclarationDelay = '10s' +NewHeadsPollInterval = '0s' [OCR] ContractConfirmations = 1 @@ -3675,6 +3691,7 @@ NodeIsSyncingEnabled = false FinalizedBlockPollInterval = '5s' EnforceRepeatableRead = false DeathDeclarationDelay = '10s' +NewHeadsPollInterval = '0s' [OCR] ContractConfirmations = 1 @@ -3777,6 +3794,7 @@ NodeIsSyncingEnabled = false FinalizedBlockPollInterval = '5s' EnforceRepeatableRead = false DeathDeclarationDelay = '10s' +NewHeadsPollInterval = '0s' [OCR] ContractConfirmations = 4 @@ -3879,6 +3897,7 @@ NodeIsSyncingEnabled = false FinalizedBlockPollInterval = '5s' EnforceRepeatableRead = false DeathDeclarationDelay = '10s' +NewHeadsPollInterval = '0s' [OCR] ContractConfirmations = 1 @@ -3981,6 +4000,7 @@ NodeIsSyncingEnabled = false FinalizedBlockPollInterval = '5s' EnforceRepeatableRead = false DeathDeclarationDelay = '10s' +NewHeadsPollInterval = '0s' [OCR] ContractConfirmations = 4 @@ -4083,6 +4103,7 @@ NodeIsSyncingEnabled = false FinalizedBlockPollInterval = '5s' EnforceRepeatableRead = false DeathDeclarationDelay = '10s' +NewHeadsPollInterval = '0s' [OCR] ContractConfirmations = 4 @@ -4185,6 +4206,7 @@ NodeIsSyncingEnabled = false FinalizedBlockPollInterval = '5s' EnforceRepeatableRead = false DeathDeclarationDelay = '10s' +NewHeadsPollInterval = '0s' [OCR] ContractConfirmations = 4 @@ -4287,6 +4309,7 @@ NodeIsSyncingEnabled = false FinalizedBlockPollInterval = '5s' EnforceRepeatableRead = false DeathDeclarationDelay = '10s' +NewHeadsPollInterval = '0s' [OCR] ContractConfirmations = 4 @@ -4389,6 +4412,7 @@ NodeIsSyncingEnabled = false FinalizedBlockPollInterval = '5s' EnforceRepeatableRead = false DeathDeclarationDelay = '10s' +NewHeadsPollInterval = '0s' [OCR] ContractConfirmations = 4 @@ -4492,6 +4516,7 @@ NodeIsSyncingEnabled = false FinalizedBlockPollInterval = '5s' EnforceRepeatableRead = false DeathDeclarationDelay = '10s' +NewHeadsPollInterval = '0s' [OCR] ContractConfirmations = 1 @@ -4594,6 +4619,7 @@ NodeIsSyncingEnabled = false FinalizedBlockPollInterval = '5s' EnforceRepeatableRead = false DeathDeclarationDelay = '10s' +NewHeadsPollInterval = '0s' [OCR] ContractConfirmations = 1 @@ -4695,6 +4721,7 @@ NodeIsSyncingEnabled = false FinalizedBlockPollInterval = '5s' EnforceRepeatableRead = false DeathDeclarationDelay = '10s' +NewHeadsPollInterval = '0s' [OCR] ContractConfirmations = 1 @@ -4797,6 +4824,7 @@ NodeIsSyncingEnabled = false FinalizedBlockPollInterval = '5s' EnforceRepeatableRead = false DeathDeclarationDelay = '10s' +NewHeadsPollInterval = '0s' [OCR] ContractConfirmations = 1 @@ -4899,6 +4927,7 @@ NodeIsSyncingEnabled = false FinalizedBlockPollInterval = '5s' EnforceRepeatableRead = false DeathDeclarationDelay = '10s' +NewHeadsPollInterval = '0s' [OCR] ContractConfirmations = 1 @@ -5001,6 +5030,7 @@ NodeIsSyncingEnabled = false FinalizedBlockPollInterval = '5s' EnforceRepeatableRead = false DeathDeclarationDelay = '10s' +NewHeadsPollInterval = '0s' [OCR] ContractConfirmations = 1 @@ -5103,6 +5133,7 @@ NodeIsSyncingEnabled = false FinalizedBlockPollInterval = '5s' EnforceRepeatableRead = false DeathDeclarationDelay = '10s' +NewHeadsPollInterval = '0s' [OCR] ContractConfirmations = 1 @@ -5204,6 +5235,7 @@ NodeIsSyncingEnabled = false FinalizedBlockPollInterval = '5s' EnforceRepeatableRead = false DeathDeclarationDelay = '10s' +NewHeadsPollInterval = '0s' [OCR] ContractConfirmations = 1 @@ -5306,6 +5338,7 @@ NodeIsSyncingEnabled = false FinalizedBlockPollInterval = '5s' EnforceRepeatableRead = false DeathDeclarationDelay = '10s' +NewHeadsPollInterval = '0s' [OCR] ContractConfirmations = 1 @@ -5408,6 +5441,7 @@ NodeIsSyncingEnabled = false FinalizedBlockPollInterval = '5s' EnforceRepeatableRead = false DeathDeclarationDelay = '10s' +NewHeadsPollInterval = '0s' [OCR] ContractConfirmations = 1 @@ -5510,6 +5544,7 @@ NodeIsSyncingEnabled = false FinalizedBlockPollInterval = '5s' EnforceRepeatableRead = false DeathDeclarationDelay = '10s' +NewHeadsPollInterval = '0s' [OCR] ContractConfirmations = 1 @@ -5612,6 +5647,7 @@ NodeIsSyncingEnabled = false FinalizedBlockPollInterval = '5s' EnforceRepeatableRead = false DeathDeclarationDelay = '10s' +NewHeadsPollInterval = '0s' [OCR] ContractConfirmations = 4 @@ -5713,6 +5749,7 @@ NodeIsSyncingEnabled = false FinalizedBlockPollInterval = '5s' EnforceRepeatableRead = false DeathDeclarationDelay = '10s' +NewHeadsPollInterval = '0s' [OCR] ContractConfirmations = 1 @@ -5815,6 +5852,7 @@ NodeIsSyncingEnabled = false FinalizedBlockPollInterval = '5s' EnforceRepeatableRead = false DeathDeclarationDelay = '10s' +NewHeadsPollInterval = '0s' [OCR] ContractConfirmations = 1 @@ -5917,6 +5955,7 @@ NodeIsSyncingEnabled = false FinalizedBlockPollInterval = '5s' EnforceRepeatableRead = false DeathDeclarationDelay = '10s' +NewHeadsPollInterval = '0s' [OCR] ContractConfirmations = 4 @@ -6020,6 +6059,7 @@ NodeIsSyncingEnabled = false FinalizedBlockPollInterval = '5s' EnforceRepeatableRead = false DeathDeclarationDelay = '10s' +NewHeadsPollInterval = '0s' [OCR] ContractConfirmations = 1 @@ -6123,6 +6163,7 @@ NodeIsSyncingEnabled = false FinalizedBlockPollInterval = '5s' EnforceRepeatableRead = false DeathDeclarationDelay = '10s' +NewHeadsPollInterval = '0s' [OCR] ContractConfirmations = 1 @@ -6226,6 +6267,7 @@ NodeIsSyncingEnabled = false FinalizedBlockPollInterval = '5s' EnforceRepeatableRead = false DeathDeclarationDelay = '10s' +NewHeadsPollInterval = '0s' [OCR] ContractConfirmations = 1 @@ -6328,6 +6370,7 @@ NodeIsSyncingEnabled = false FinalizedBlockPollInterval = '5s' EnforceRepeatableRead = false DeathDeclarationDelay = '10s' +NewHeadsPollInterval = '0s' [OCR] ContractConfirmations = 1 @@ -6430,6 +6473,7 @@ NodeIsSyncingEnabled = false FinalizedBlockPollInterval = '5s' EnforceRepeatableRead = false DeathDeclarationDelay = '10s' +NewHeadsPollInterval = '0s' [OCR] ContractConfirmations = 1 @@ -6532,6 +6576,7 @@ NodeIsSyncingEnabled = false FinalizedBlockPollInterval = '5s' EnforceRepeatableRead = false DeathDeclarationDelay = '10s' +NewHeadsPollInterval = '0s' [OCR] ContractConfirmations = 1 @@ -6634,6 +6679,7 @@ NodeIsSyncingEnabled = false FinalizedBlockPollInterval = '5s' EnforceRepeatableRead = false DeathDeclarationDelay = '10s' +NewHeadsPollInterval = '0s' [OCR] ContractConfirmations = 1 @@ -6735,6 +6781,7 @@ NodeIsSyncingEnabled = false FinalizedBlockPollInterval = '5s' EnforceRepeatableRead = false DeathDeclarationDelay = '10s' +NewHeadsPollInterval = '0s' [OCR] ContractConfirmations = 4 @@ -6836,6 +6883,7 @@ NodeIsSyncingEnabled = false FinalizedBlockPollInterval = '5s' EnforceRepeatableRead = false DeathDeclarationDelay = '10s' +NewHeadsPollInterval = '0s' [OCR] ContractConfirmations = 4 @@ -6937,6 +6985,7 @@ NodeIsSyncingEnabled = false FinalizedBlockPollInterval = '5s' EnforceRepeatableRead = false DeathDeclarationDelay = '10s' +NewHeadsPollInterval = '0s' [OCR] ContractConfirmations = 4 @@ -7039,6 +7088,7 @@ NodeIsSyncingEnabled = false FinalizedBlockPollInterval = '5s' EnforceRepeatableRead = false DeathDeclarationDelay = '10s' +NewHeadsPollInterval = '0s' [OCR] ContractConfirmations = 1 @@ -7141,6 +7191,7 @@ NodeIsSyncingEnabled = false FinalizedBlockPollInterval = '5s' EnforceRepeatableRead = false DeathDeclarationDelay = '10s' +NewHeadsPollInterval = '0s' [OCR] ContractConfirmations = 4 @@ -7242,6 +7293,7 @@ NodeIsSyncingEnabled = false FinalizedBlockPollInterval = '5s' EnforceRepeatableRead = false DeathDeclarationDelay = '10s' +NewHeadsPollInterval = '0s' [OCR] ContractConfirmations = 4 @@ -7344,6 +7396,7 @@ NodeIsSyncingEnabled = false FinalizedBlockPollInterval = '5s' EnforceRepeatableRead = false DeathDeclarationDelay = '10s' +NewHeadsPollInterval = '0s' [OCR] ContractConfirmations = 1 @@ -7446,6 +7499,7 @@ NodeIsSyncingEnabled = false FinalizedBlockPollInterval = '5s' EnforceRepeatableRead = false DeathDeclarationDelay = '10s' +NewHeadsPollInterval = '0s' [OCR] ContractConfirmations = 1 @@ -7549,6 +7603,7 @@ NodeIsSyncingEnabled = false FinalizedBlockPollInterval = '5s' EnforceRepeatableRead = false DeathDeclarationDelay = '10s' +NewHeadsPollInterval = '0s' [OCR] ContractConfirmations = 1 @@ -7652,6 +7707,7 @@ NodeIsSyncingEnabled = false FinalizedBlockPollInterval = '5s' EnforceRepeatableRead = false DeathDeclarationDelay = '10s' +NewHeadsPollInterval = '0s' [OCR] ContractConfirmations = 1 @@ -7754,6 +7810,7 @@ NodeIsSyncingEnabled = false FinalizedBlockPollInterval = '5s' EnforceRepeatableRead = false DeathDeclarationDelay = '10s' +NewHeadsPollInterval = '0s' [OCR] ContractConfirmations = 1 @@ -7856,6 +7913,7 @@ NodeIsSyncingEnabled = false FinalizedBlockPollInterval = '5s' EnforceRepeatableRead = false DeathDeclarationDelay = '10s' +NewHeadsPollInterval = '0s' [OCR] ContractConfirmations = 1 @@ -7958,6 +8016,7 @@ NodeIsSyncingEnabled = false FinalizedBlockPollInterval = '5s' EnforceRepeatableRead = false DeathDeclarationDelay = '10s' +NewHeadsPollInterval = '0s' [OCR] ContractConfirmations = 1 @@ -8060,6 +8119,7 @@ NodeIsSyncingEnabled = false FinalizedBlockPollInterval = '5s' EnforceRepeatableRead = false DeathDeclarationDelay = '10s' +NewHeadsPollInterval = '0s' [OCR] ContractConfirmations = 4 @@ -8162,6 +8222,7 @@ NodeIsSyncingEnabled = false FinalizedBlockPollInterval = '5s' EnforceRepeatableRead = false DeathDeclarationDelay = '10s' +NewHeadsPollInterval = '0s' [OCR] ContractConfirmations = 1 @@ -8264,6 +8325,7 @@ NodeIsSyncingEnabled = false FinalizedBlockPollInterval = '5s' EnforceRepeatableRead = false DeathDeclarationDelay = '10s' +NewHeadsPollInterval = '0s' [OCR] ContractConfirmations = 4 @@ -8366,6 +8428,7 @@ NodeIsSyncingEnabled = false FinalizedBlockPollInterval = '5s' EnforceRepeatableRead = false DeathDeclarationDelay = '10s' +NewHeadsPollInterval = '0s' [OCR] ContractConfirmations = 4 @@ -9122,6 +9185,7 @@ NodeIsSyncingEnabled = false # Default FinalizedBlockPollInterval = '5s' # Default EnforceRepeatableRead = false # Default DeathDeclarationDelay = '10s' # Default +NewHeadsPollInterval = '0s' # Default ``` The node pool manages multiple RPC endpoints. @@ -9214,6 +9278,14 @@ Larger values might be helpful to reduce the noisiness of health checks like `En trigger declaration of `FinalizedBlockOutOfSync` due to insignificant network delays in broadcasting of the finalized state among RPCs. RPC will not be picked to handle a request even if this option is set to a nonzero value. +### NewHeadsPollInterval +```toml +NewHeadsPollInterval = '0s' # Default +``` +NewHeadsPollInterval define an interval for polling new block periodically using http client rather than subscribe to ws feed + +Set to 0 to disable. + ## EVM.NodePool.Errors :warning: **_ADVANCED_**: _Do not change these settings unless you know what you are doing._ ```toml diff --git a/testdata/scripts/node/validate/defaults-override.txtar b/testdata/scripts/node/validate/defaults-override.txtar index 52eb86b3e6d..85228771ce1 100644 --- a/testdata/scripts/node/validate/defaults-override.txtar +++ b/testdata/scripts/node/validate/defaults-override.txtar @@ -443,6 +443,7 @@ NodeIsSyncingEnabled = false FinalizedBlockPollInterval = '5s' EnforceRepeatableRead = false DeathDeclarationDelay = '10s' +NewHeadsPollInterval = '0s' [EVM.OCR] ContractConfirmations = 4 diff --git a/testdata/scripts/node/validate/disk-based-logging-disabled.txtar b/testdata/scripts/node/validate/disk-based-logging-disabled.txtar index b9f456c4882..75ab3bb55c7 100644 --- a/testdata/scripts/node/validate/disk-based-logging-disabled.txtar +++ b/testdata/scripts/node/validate/disk-based-logging-disabled.txtar @@ -426,6 +426,7 @@ NodeIsSyncingEnabled = false FinalizedBlockPollInterval = '5s' EnforceRepeatableRead = false DeathDeclarationDelay = '10s' +NewHeadsPollInterval = '0s' [EVM.OCR] ContractConfirmations = 4 diff --git a/testdata/scripts/node/validate/disk-based-logging-no-dir.txtar b/testdata/scripts/node/validate/disk-based-logging-no-dir.txtar index 9457ae718c7..de2bdf3209e 100644 --- a/testdata/scripts/node/validate/disk-based-logging-no-dir.txtar +++ b/testdata/scripts/node/validate/disk-based-logging-no-dir.txtar @@ -426,6 +426,7 @@ NodeIsSyncingEnabled = false FinalizedBlockPollInterval = '5s' EnforceRepeatableRead = false DeathDeclarationDelay = '10s' +NewHeadsPollInterval = '0s' [EVM.OCR] ContractConfirmations = 4 diff --git a/testdata/scripts/node/validate/disk-based-logging.txtar b/testdata/scripts/node/validate/disk-based-logging.txtar index 9a77ab1eac0..ec0c4423928 100644 --- a/testdata/scripts/node/validate/disk-based-logging.txtar +++ b/testdata/scripts/node/validate/disk-based-logging.txtar @@ -426,6 +426,7 @@ NodeIsSyncingEnabled = false FinalizedBlockPollInterval = '5s' EnforceRepeatableRead = false DeathDeclarationDelay = '10s' +NewHeadsPollInterval = '0s' [EVM.OCR] ContractConfirmations = 4 diff --git a/testdata/scripts/node/validate/invalid.txtar b/testdata/scripts/node/validate/invalid.txtar index af8fe17b877..dbef892b275 100644 --- a/testdata/scripts/node/validate/invalid.txtar +++ b/testdata/scripts/node/validate/invalid.txtar @@ -416,6 +416,7 @@ NodeIsSyncingEnabled = false FinalizedBlockPollInterval = '5s' EnforceRepeatableRead = false DeathDeclarationDelay = '10s' +NewHeadsPollInterval = '0s' [EVM.OCR] ContractConfirmations = 4 diff --git a/testdata/scripts/node/validate/valid.txtar b/testdata/scripts/node/validate/valid.txtar index c020664740c..83cb6f451af 100644 --- a/testdata/scripts/node/validate/valid.txtar +++ b/testdata/scripts/node/validate/valid.txtar @@ -423,6 +423,7 @@ NodeIsSyncingEnabled = false FinalizedBlockPollInterval = '5s' EnforceRepeatableRead = false DeathDeclarationDelay = '10s' +NewHeadsPollInterval = '0s' [EVM.OCR] ContractConfirmations = 4 From 27d5cbf5787531d541ba774397b3abdfcb8b20a7 Mon Sep 17 00:00:00 2001 From: Dmytro Haidashenko <34754799+dhaidashenko@users.noreply.github.com> Date: Mon, 23 Sep 2024 12:27:46 +0200 Subject: [PATCH 06/14] BCFR-888 LP support chains that have not reached finality yet (#14366) * LP support chains that have not reached finality yet * changelog * fix test * ensure Test_ContractTransmitter_TransmitWithoutSignatures do not run in parallel to prevent db deadlock --- .changeset/famous-goats-hug.md | 5 +++++ .../ocrimpls/contract_transmitter_test.go | 1 - core/chains/evm/logpoller/log_poller.go | 21 ++++++++++--------- core/chains/evm/logpoller/log_poller_test.go | 2 +- 4 files changed, 17 insertions(+), 12 deletions(-) create mode 100644 .changeset/famous-goats-hug.md diff --git a/.changeset/famous-goats-hug.md b/.changeset/famous-goats-hug.md new file mode 100644 index 00000000000..986f86bab28 --- /dev/null +++ b/.changeset/famous-goats-hug.md @@ -0,0 +1,5 @@ +--- +"chainlink": patch +--- + +LogPoller polls logs even if chain have not reached finality #internal diff --git a/core/capabilities/ccip/ocrimpls/contract_transmitter_test.go b/core/capabilities/ccip/ocrimpls/contract_transmitter_test.go index eae7abae9d5..43081413ceb 100644 --- a/core/capabilities/ccip/ocrimpls/contract_transmitter_test.go +++ b/core/capabilities/ccip/ocrimpls/contract_transmitter_test.go @@ -90,7 +90,6 @@ func Test_ContractTransmitter_TransmitWithoutSignatures(t *testing.T) { for _, tc := range testCases { t.Run(tc.name, func(t *testing.T) { tc := tc - t.Parallel() testTransmitter(t, tc.pluginType, tc.withSigs, tc.expectedSigsEnabled, tc.report) }) } diff --git a/core/chains/evm/logpoller/log_poller.go b/core/chains/evm/logpoller/log_poller.go index 2a551c12378..360511951ee 100644 --- a/core/chains/evm/logpoller/log_poller.go +++ b/core/chains/evm/logpoller/log_poller.go @@ -598,18 +598,11 @@ func (lp *logPoller) run() { } // Otherwise this is the first poll _ever_ on a new chain. // Only safe thing to do is to start at the first finalized block. - latestBlock, latestFinalizedBlockNumber, err := lp.latestBlocks(ctx) + _, latestFinalizedBlockNumber, err := lp.latestBlocks(ctx) if err != nil { lp.lggr.Warnw("Unable to get latest for first poll", "err", err) continue } - // Do not support polling chains which don't even have finality depth worth of blocks. - // Could conceivably support this but not worth the effort. - // Need last finalized block number to be higher than 0 - if latestFinalizedBlockNumber <= 0 { - lp.lggr.Warnw("Insufficient number of blocks on chain, waiting for finality depth", "err", err, "latest", latestBlock.Number) - continue - } // Starting at the first finalized block. We do not backfill the first finalized block. start = latestFinalizedBlockNumber } else { @@ -1023,8 +1016,16 @@ func (lp *logPoller) latestBlocks(ctx context.Context) (*evmtypes.Head, int64, e return nil, 0, fmt.Errorf("failed to get latest and latest finalized block from HeadTracker: %w", err) } - lp.lggr.Debugw("Latest blocks read from chain", "latest", latest.Number, "finalized", finalized.BlockNumber()) - return latest, finalized.BlockNumber(), nil + finalizedBN := finalized.BlockNumber() + // This is a dirty trick that allows LogPoller to function properly in tests where chain needs significant time to + // reach finality depth. An alternative to this one-liner is a database migration that drops restriction + // LogPollerBlock.FinalizedBlockNumber > 0 (which we actually want to keep to spot cases when FinalizedBlockNumber was simply not populated) + // and refactoring of queries that assume that restriction still holds. + if finalizedBN == 0 { + finalizedBN = 1 + } + lp.lggr.Debugw("Latest blocks read from chain", "latest", latest.Number, "finalized", finalizedBN) + return latest, finalizedBN, nil } // Find the first place where our chain and their chain have the same block, diff --git a/core/chains/evm/logpoller/log_poller_test.go b/core/chains/evm/logpoller/log_poller_test.go index 6ad76030bb5..1ab548063a7 100644 --- a/core/chains/evm/logpoller/log_poller_test.go +++ b/core/chains/evm/logpoller/log_poller_test.go @@ -1774,7 +1774,7 @@ func Test_PollAndSavePersistsFinalityInBlocks(t *testing.T) { name: "setting last finalized block number to 0 if finality is too deep", useFinalityTag: false, finalityDepth: 20, - expectedFinalizedBlock: 0, + expectedFinalizedBlock: 1, }, { name: "using finality from chain", From 31874ba5a4abbc2dca7b985f04019485a339a71c Mon Sep 17 00:00:00 2001 From: Dmytro Haidashenko <34754799+dhaidashenko@users.noreply.github.com> Date: Mon, 23 Sep 2024 13:07:00 +0200 Subject: [PATCH 07/14] BCI-3668 Optimise HeadTracker's memory usage (#14130) * base benchmarks * Reduce allocs noise from mocks in Backfill Benchmark * Optimize HeadTracker's memory usage * avoid .Parent race in confirmer * optimise block history estimator heads usage * update telemetry test to use new Parent and IsFinalized * avoid redundant allocations * Revert "avoid redundant allocations" This reverts commit 76343e573980ac81df6d6e3b3409a3d164aeb579. * lint fixes * remove redundant files * drop cycle detection logic from head methods. Heads cycle prevention should be enough * drop unused test * nits --- .changeset/short-shoes-crash.md | 5 + common/txmgr/confirmer.go | 5 +- .../evm/client/simulated_backend_client.go | 10 +- .../chains/evm/gas/block_history_estimator.go | 3 +- .../evm/gas/block_history_estimator_test.go | 6 +- core/chains/evm/headtracker/head_saver.go | 12 +- core/chains/evm/headtracker/head_tracker.go | 3 +- .../evm/headtracker/head_tracker_test.go | 299 +++++++----------- core/chains/evm/headtracker/heads.go | 163 ++++++---- core/chains/evm/headtracker/heads_test.go | 125 ++++++-- core/chains/evm/headtracker/heap.go | 35 ++ core/chains/evm/headtracker/types/types.go | 4 + core/chains/evm/log/broadcaster.go | 4 +- core/chains/evm/log/registrations.go | 5 +- core/chains/evm/logpoller/log_poller.go | 6 +- .../evm/logpoller/log_poller_internal_test.go | 3 +- core/chains/evm/txmgr/confirmer_test.go | 115 +++---- core/chains/evm/txmgr/evm_tx_store_test.go | 42 +-- core/chains/evm/txmgr/finalizer_test.go | 35 +- core/chains/evm/txmgr/txmgr_test.go | 15 +- core/chains/evm/types/head_test.go | 51 ++- core/chains/evm/types/models.go | 97 ++---- core/chains/evm/types/models_test.go | 97 +++--- core/internal/cltest/cltest.go | 6 +- core/internal/cltest/factories.go | 5 +- .../headreporter/telemetry_reporter_test.go | 35 +- .../evmregistry/v21/block_subscriber.go | 2 +- .../evmregistry/v21/block_subscriber_test.go | 17 +- .../arbitrum_block_translator_test.go | 53 ++-- .../relay/evm/mercury/v1/data_source_test.go | 7 +- 30 files changed, 681 insertions(+), 584 deletions(-) create mode 100644 .changeset/short-shoes-crash.md create mode 100644 core/chains/evm/headtracker/heap.go diff --git a/.changeset/short-shoes-crash.md b/.changeset/short-shoes-crash.md new file mode 100644 index 00000000000..30431916241 --- /dev/null +++ b/.changeset/short-shoes-crash.md @@ -0,0 +1,5 @@ +--- +"chainlink": patch +--- + +Optimize HeadTracker's memory usage #internal diff --git a/common/txmgr/confirmer.go b/common/txmgr/confirmer.go index bbf1d3b27b7..4eaa6739d58 100644 --- a/common/txmgr/confirmer.go +++ b/common/txmgr/confirmer.go @@ -1148,10 +1148,11 @@ func hasReceiptInLongestChain[ } } } - if head.GetParent() == nil { + + head = head.GetParent() + if head == nil { return false } - head = head.GetParent() } } diff --git a/core/chains/evm/client/simulated_backend_client.go b/core/chains/evm/client/simulated_backend_client.go index 6c569b16b85..11828e58710 100644 --- a/core/chains/evm/client/simulated_backend_client.go +++ b/core/chains/evm/client/simulated_backend_client.go @@ -320,7 +320,15 @@ func (c *SimulatedBackendClient) SubscribeNewHead( case h := <-ch: var head *evmtypes.Head if h != nil { - head = &evmtypes.Head{Difficulty: h.Difficulty, Timestamp: time.Unix(int64(h.Time), 0), Number: h.Number.Int64(), Hash: h.Hash(), ParentHash: h.ParentHash, Parent: lastHead, EVMChainID: ubig.New(c.chainId)} + head = &evmtypes.Head{ + Difficulty: h.Difficulty, + Timestamp: time.Unix(int64(h.Time), 0), //nolint:gosec + Number: h.Number.Int64(), + Hash: h.Hash(), + ParentHash: h.ParentHash, + EVMChainID: ubig.New(c.chainId), + } + head.Parent.Store(lastHead) lastHead = head } select { diff --git a/core/chains/evm/gas/block_history_estimator.go b/core/chains/evm/gas/block_history_estimator.go index b933ea23825..0386d92a0d6 100644 --- a/core/chains/evm/gas/block_history_estimator.go +++ b/core/chains/evm/gas/block_history_estimator.go @@ -655,12 +655,13 @@ func (b *BlockHistoryEstimator) FetchBlocks(ctx context.Context, head *evmtypes. } blocks := make(map[int64]evmtypes.Block) + earliestInChain := head.EarliestInChain() for _, block := range b.getBlocks() { // Make a best-effort to be re-org resistant using the head // chain, refetch blocks that got re-org'd out. // NOTE: Any blocks in the history that are older than the oldest block // in the provided chain will be assumed final. - if block.Number < head.EarliestInChain().BlockNumber() { + if block.Number < earliestInChain.BlockNumber() { blocks[block.Number] = block } else if head.IsInChain(block.Hash) { blocks[block.Number] = block diff --git a/core/chains/evm/gas/block_history_estimator_test.go b/core/chains/evm/gas/block_history_estimator_test.go index c2f4a2219cb..d84137cb7cf 100644 --- a/core/chains/evm/gas/block_history_estimator_test.go +++ b/core/chains/evm/gas/block_history_estimator_test.go @@ -515,7 +515,7 @@ func TestBlockHistoryEstimator_FetchBlocks(t *testing.T) { head2 := evmtypes.NewHead(big.NewInt(2), b2.Hash, b1.Hash, uint64(time.Now().Unix()), ubig.New(testutils.FixtureChainID)) head3 := evmtypes.NewHead(big.NewInt(3), b3.Hash, b2.Hash, uint64(time.Now().Unix()), ubig.New(testutils.FixtureChainID)) - head3.Parent = &head2 + head3.Parent.Store(&head2) err := bhe.FetchBlocks(tests.Context(t), &head3) require.NoError(t, err) @@ -570,7 +570,7 @@ func TestBlockHistoryEstimator_FetchBlocks(t *testing.T) { // RE-ORG, head2 and head3 have different hash than saved b2 and b3 head2 := evmtypes.NewHead(big.NewInt(2), utils.NewHash(), b1.Hash, uint64(time.Now().Unix()), ubig.New(testutils.FixtureChainID)) head3 := evmtypes.NewHead(big.NewInt(3), utils.NewHash(), head2.Hash, uint64(time.Now().Unix()), ubig.New(testutils.FixtureChainID)) - head3.Parent = &head2 + head3.Parent.Store(&head2) ethClient.On("BatchCallContext", mock.Anything, mock.MatchedBy(func(b []rpc.BatchElem) bool { return len(b) == 2 && @@ -643,7 +643,7 @@ func TestBlockHistoryEstimator_FetchBlocks(t *testing.T) { // head2 and head3 have identical hash to saved blocks head2 := evmtypes.NewHead(big.NewInt(2), b2.Hash, b1.Hash, uint64(time.Now().Unix()), ubig.New(testutils.FixtureChainID)) head3 := evmtypes.NewHead(big.NewInt(3), b3.Hash, head2.Hash, uint64(time.Now().Unix()), ubig.New(testutils.FixtureChainID)) - head3.Parent = &head2 + head3.Parent.Store(&head2) err := bhe.FetchBlocks(tests.Context(t), &head3) require.NoError(t, err) diff --git a/core/chains/evm/headtracker/head_saver.go b/core/chains/evm/headtracker/head_saver.go index 320e88a19bc..f2613334c49 100644 --- a/core/chains/evm/headtracker/head_saver.go +++ b/core/chains/evm/headtracker/head_saver.go @@ -35,13 +35,12 @@ func NewHeadSaver(lggr logger.Logger, orm ORM, config commontypes.Config, htConf } func (hs *headSaver) Save(ctx context.Context, head *evmtypes.Head) error { - if err := hs.orm.IdempotentInsertHead(ctx, head); err != nil { + // adding new head might form a cycle, so it's better to validate cached chain before persisting it + if err := hs.heads.AddHeads(head); err != nil { return err } - hs.heads.AddHeads(head) - - return nil + return hs.orm.IdempotentInsertHead(ctx, head) } func (hs *headSaver) Load(ctx context.Context, latestFinalized int64) (chain *evmtypes.Head, err error) { @@ -51,7 +50,10 @@ func (hs *headSaver) Load(ctx context.Context, latestFinalized int64) (chain *ev return nil, err } - hs.heads.AddHeads(heads...) + err = hs.heads.AddHeads(heads...) + if err != nil { + return nil, fmt.Errorf("failed to populate cache with loaded heads: %w", err) + } return hs.heads.LatestHead(), nil } diff --git a/core/chains/evm/headtracker/head_tracker.go b/core/chains/evm/headtracker/head_tracker.go index f7607189f7e..bb39b3b5c79 100644 --- a/core/chains/evm/headtracker/head_tracker.go +++ b/core/chains/evm/headtracker/head_tracker.go @@ -11,14 +11,13 @@ import ( "github.com/smartcontractkit/chainlink/v2/common/headtracker" commontypes "github.com/smartcontractkit/chainlink/v2/common/headtracker/types" - evmclient "github.com/smartcontractkit/chainlink/v2/core/chains/evm/client" httypes "github.com/smartcontractkit/chainlink/v2/core/chains/evm/headtracker/types" evmtypes "github.com/smartcontractkit/chainlink/v2/core/chains/evm/types" ) func NewHeadTracker( lggr logger.Logger, - ethClient evmclient.Client, + ethClient httypes.Client, config commontypes.Config, htConfig commontypes.HeadTrackerConfig, headBroadcaster httypes.HeadBroadcaster, diff --git a/core/chains/evm/headtracker/head_tracker_test.go b/core/chains/evm/headtracker/head_tracker_test.go index 21ff1b1a929..de54f12ff96 100644 --- a/core/chains/evm/headtracker/head_tracker_test.go +++ b/core/chains/evm/headtracker/head_tracker_test.go @@ -12,7 +12,6 @@ import ( "github.com/ethereum/go-ethereum" "github.com/ethereum/go-ethereum/common" - gethTypes "github.com/ethereum/go-ethereum/core/types" "github.com/onsi/gomega" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/mock" @@ -21,21 +20,19 @@ import ( "go.uber.org/zap/zaptest/observer" "golang.org/x/exp/maps" + commonconfig "github.com/smartcontractkit/chainlink-common/pkg/config" + "github.com/smartcontractkit/chainlink-common/pkg/services" + "github.com/smartcontractkit/chainlink-common/pkg/logger" "github.com/smartcontractkit/chainlink-common/pkg/utils/mailbox/mailboxtest" "github.com/jmoiron/sqlx" - commonconfig "github.com/smartcontractkit/chainlink-common/pkg/config" - "github.com/smartcontractkit/chainlink-common/pkg/services" "github.com/smartcontractkit/chainlink-common/pkg/utils/mailbox" "github.com/smartcontractkit/chainlink-common/pkg/utils/tests" htmocks "github.com/smartcontractkit/chainlink/v2/common/headtracker/mocks" commontypes "github.com/smartcontractkit/chainlink/v2/common/headtracker/types" - "github.com/smartcontractkit/chainlink/v2/core/internal/testutils/evmtest" - - evmclient "github.com/smartcontractkit/chainlink/v2/core/chains/evm/client" evmclimocks "github.com/smartcontractkit/chainlink/v2/core/chains/evm/client/mocks" "github.com/smartcontractkit/chainlink/v2/core/chains/evm/config/toml" "github.com/smartcontractkit/chainlink/v2/core/chains/evm/headtracker" @@ -45,11 +42,13 @@ import ( evmtypes "github.com/smartcontractkit/chainlink/v2/core/chains/evm/types" "github.com/smartcontractkit/chainlink/v2/core/chains/evm/utils" ubig "github.com/smartcontractkit/chainlink/v2/core/chains/evm/utils/big" + "github.com/smartcontractkit/chainlink/v2/core/internal/testutils/evmtest" "github.com/smartcontractkit/chainlink/v2/core/internal/testutils/pgtest" ) -func firstHead(t *testing.T, db *sqlx.DB) (h evmtypes.Head) { - if err := db.Get(&h, `SELECT * FROM evm.heads ORDER BY number ASC LIMIT 1`); err != nil { +func firstHead(t *testing.T, db *sqlx.DB) *evmtypes.Head { + h := new(evmtypes.Head) + if err := db.Get(h, `SELECT * FROM evm.heads ORDER BY number ASC LIMIT 1`); err != nil { t.Fatal(err) } return h @@ -578,27 +577,10 @@ func TestHeadTracker_SwitchesToLongestChainWithHeadSamplingEnabled(t *testing.T) checker.On("OnNewLongestChain", mock.Anything, mock.Anything). Run(func(args mock.Arguments) { h := args.Get(1).(*evmtypes.Head) + // This is the new longest chain [0, 5], check that it came with its parents + assert.Equal(t, uint32(6), h.ChainLength()) + assertChainWithParents(t, blocksForked, 5, 1, h) - assert.Equal(t, int64(5), h.Number) - assert.Equal(t, blocksForked.Head(5).Hash, h.Hash) - - // This is the new longest chain, check that it came with its parents - if !assert.NotNil(t, h.Parent) { - return - } - assert.Equal(t, h.Parent.Hash, blocksForked.Head(4).Hash) - if !assert.NotNil(t, h.Parent.Parent) { - return - } - assert.Equal(t, h.Parent.Parent.Hash, blocksForked.Head(3).Hash) - if !assert.NotNil(t, h.Parent.Parent.Parent) { - return - } - assert.Equal(t, h.Parent.Parent.Parent.Hash, blocksForked.Head(2).Hash) - if !assert.NotNil(t, h.Parent.Parent.Parent.Parent) { - return - } - assert.Equal(t, h.Parent.Parent.Parent.Parent.Hash, blocksForked.Head(1).Hash) lastLongestChainAwaiter.ItHappened() }).Return().Once() @@ -639,6 +621,16 @@ func TestHeadTracker_SwitchesToLongestChainWithHeadSamplingEnabled(t *testing.T) } } +func assertChainWithParents(t testing.TB, blocks *blocks, startBN, endBN uint64, h *evmtypes.Head) { + for blockNumber := startBN; blockNumber >= endBN; blockNumber-- { + assert.NotNil(t, h) + assert.Equal(t, blockNumber, uint64(h.Number)) + assert.Equal(t, blocks.Head(blockNumber).Hash, h.Hash) + // move to parent + h = h.Parent.Load() + } +} + func TestHeadTracker_SwitchesToLongestChainWithHeadSamplingDisabled(t *testing.T) { t.Parallel() @@ -725,34 +717,13 @@ func TestHeadTracker_SwitchesToLongestChainWithHeadSamplingDisabled(t *testing.T checker.On("OnNewLongestChain", mock.Anything, mock.Anything). Run(func(args mock.Arguments) { h := args.Get(1).(*evmtypes.Head) - require.Equal(t, int64(4), h.Number) - require.Equal(t, blocks.Head(4).Hash, h.Hash) - - // Check that the block came with its parents - require.NotNil(t, h.Parent) - require.Equal(t, h.Parent.Hash, blocks.Head(3).Hash) - require.NotNil(t, h.Parent.Parent.Hash) - require.Equal(t, h.Parent.Parent.Hash, blocks.Head(2).Hash) - require.NotNil(t, h.Parent.Parent.Parent) - require.Equal(t, h.Parent.Parent.Parent.Hash, blocks.Head(1).Hash) + assertChainWithParents(t, blocks, 4, 1, h) }).Return().Once() checker.On("OnNewLongestChain", mock.Anything, mock.Anything). Run(func(args mock.Arguments) { h := args.Get(1).(*evmtypes.Head) - - require.Equal(t, int64(5), h.Number) - require.Equal(t, blocksForked.Head(5).Hash, h.Hash) - - // This is the new longest chain, check that it came with its parents - require.NotNil(t, h.Parent) - require.Equal(t, h.Parent.Hash, blocksForked.Head(4).Hash) - require.NotNil(t, h.Parent.Parent) - require.Equal(t, h.Parent.Parent.Hash, blocksForked.Head(3).Hash) - require.NotNil(t, h.Parent.Parent.Parent) - require.Equal(t, h.Parent.Parent.Parent.Hash, blocksForked.Head(2).Hash) - require.NotNil(t, h.Parent.Parent.Parent.Parent) - require.Equal(t, h.Parent.Parent.Parent.Parent.Hash, blocksForked.Head(1).Hash) + assertChainWithParents(t, blocksForked, 5, 1, h) lastLongestChainAwaiter.ItHappened() }).Return().Once() @@ -811,52 +782,37 @@ func TestHeadTracker_Backfill(t *testing.T) { now := uint64(time.Now().UTC().Unix()) - gethHead0 := &gethTypes.Header{ - Number: big.NewInt(0), - ParentHash: common.BigToHash(big.NewInt(0)), - Time: now, - } - head0 := evmtypes.NewHead(gethHead0.Number, utils.NewHash(), gethHead0.ParentHash, gethHead0.Time, ubig.New(testutils.FixtureChainID)) + head0 := evmtypes.NewHead(big.NewInt(0), utils.NewHash(), common.BigToHash(big.NewInt(0)), now, ubig.New(testutils.FixtureChainID)) - h1 := *testutils.Head(1) + h1 := testutils.Head(1) h1.ParentHash = head0.Hash - gethHead8 := &gethTypes.Header{ - Number: big.NewInt(8), - ParentHash: utils.NewHash(), - Time: now, - } - head8 := evmtypes.NewHead(gethHead8.Number, utils.NewHash(), gethHead8.ParentHash, gethHead8.Time, ubig.New(testutils.FixtureChainID)) + head8 := evmtypes.NewHead(big.NewInt(8), utils.NewHash(), utils.NewHash(), now, ubig.New(testutils.FixtureChainID)) - h9 := *testutils.Head(9) + h9 := testutils.Head(9) h9.ParentHash = head8.Hash - gethHead10 := &gethTypes.Header{ - Number: big.NewInt(10), - ParentHash: h9.Hash, - Time: now, - } - head10 := evmtypes.NewHead(gethHead10.Number, utils.NewHash(), gethHead10.ParentHash, gethHead10.Time, ubig.New(testutils.FixtureChainID)) + head10 := evmtypes.NewHead(big.NewInt(10), utils.NewHash(), h9.Hash, now, ubig.New(testutils.FixtureChainID)) - h11 := *testutils.Head(11) + h11 := testutils.Head(11) h11.ParentHash = head10.Hash - h12 := *testutils.Head(12) + h12 := testutils.Head(12) h12.ParentHash = h11.Hash - h13 := *testutils.Head(13) + h13 := testutils.Head(13) h13.ParentHash = h12.Hash - h14Orphaned := *testutils.Head(14) + h14Orphaned := testutils.Head(14) h14Orphaned.ParentHash = h13.Hash - h14 := *testutils.Head(14) + h14 := testutils.Head(14) h14.ParentHash = h13.Hash - h15 := *testutils.Head(15) + h15 := testutils.Head(15) h15.ParentHash = h14.Hash - heads := []evmtypes.Head{ + heads := []*evmtypes.Head{ h9, h11, h12, @@ -869,7 +825,7 @@ func TestHeadTracker_Backfill(t *testing.T) { ctx := tests.Context(t) type opts struct { - Heads []evmtypes.Head + Heads []*evmtypes.Head FinalityTagEnabled bool FinalizedBlockOffset uint32 FinalityDepth uint32 @@ -889,7 +845,7 @@ func TestHeadTracker_Backfill(t *testing.T) { db := pgtest.NewSqlxDB(t) orm := headtracker.NewORM(*testutils.FixtureChainID, db) for i := range opts.Heads { - require.NoError(t, orm.IdempotentInsertHead(tests.Context(t), &opts.Heads[i])) + require.NoError(t, orm.IdempotentInsertHead(tests.Context(t), opts.Heads[i])) } ethClient := testutils.NewEthClientMock(t) ethClient.On("ConfiguredChainID", mock.Anything).Return(evmcfg.EVM().ChainID(), nil) @@ -904,72 +860,70 @@ func TestHeadTracker_Backfill(t *testing.T) { const expectedError = "failed to fetch latest finalized block" htu.ethClient.On("LatestFinalizedBlock", mock.Anything).Return(nil, errors.New(expectedError)).Once() - err := htu.headTracker.Backfill(ctx, &h12) + err := htu.headTracker.Backfill(ctx, h12) require.ErrorContains(t, err, expectedError) }) t.Run("returns error if latestFinalized is not valid", func(t *testing.T) { htu := newHeadTrackerUniverse(t, opts{FinalityTagEnabled: true}) htu.ethClient.On("LatestFinalizedBlock", mock.Anything).Return(nil, nil).Once() - err := htu.headTracker.Backfill(ctx, &h12) + err := htu.headTracker.Backfill(ctx, h12) require.EqualError(t, err, "failed to calculate finalized block: failed to get valid latest finalized block") }) t.Run("Returns error if finality gap is too big", func(t *testing.T) { htu := newHeadTrackerUniverse(t, opts{FinalityTagEnabled: true, MaxAllowedFinalityDepth: 2}) - htu.ethClient.On("LatestFinalizedBlock", mock.Anything).Return(&h9, nil).Once() + htu.ethClient.On("LatestFinalizedBlock", mock.Anything).Return(h9, nil).Once() - err := htu.headTracker.Backfill(ctx, &h12) + err := htu.headTracker.Backfill(ctx, h12) require.EqualError(t, err, "gap between latest finalized block (9) and current head (12) is too large (> 2)") }) t.Run("Returns error if finalized head is ahead of canonical", func(t *testing.T) { htu := newHeadTrackerUniverse(t, opts{FinalityTagEnabled: true}) - htu.ethClient.On("LatestFinalizedBlock", mock.Anything).Return(&h14Orphaned, nil).Once() + htu.ethClient.On("LatestFinalizedBlock", mock.Anything).Return(h14Orphaned, nil).Once() - err := htu.headTracker.Backfill(ctx, &h12) + err := htu.headTracker.Backfill(ctx, h12) require.EqualError(t, err, "invariant violation: expected head of canonical chain to be ahead of the latestFinalized") }) t.Run("Returns error if finalizedHead is not present in the canonical chain", func(t *testing.T) { htu := newHeadTrackerUniverse(t, opts{Heads: heads, FinalityTagEnabled: true}) - htu.ethClient.On("LatestFinalizedBlock", mock.Anything).Return(&h14Orphaned, nil).Once() + htu.ethClient.On("LatestFinalizedBlock", mock.Anything).Return(h14Orphaned, nil).Once() - err := htu.headTracker.Backfill(ctx, &h15) + err := htu.headTracker.Backfill(ctx, h15) require.EqualError(t, err, "expected finalized block to be present in canonical chain") }) t.Run("Marks all blocks in chain that are older than finalized", func(t *testing.T) { htu := newHeadTrackerUniverse(t, opts{Heads: heads, FinalityTagEnabled: true}) - assertFinalized := func(expectedFinalized bool, msg string, heads ...evmtypes.Head) { + assertFinalized := func(expectedFinalized bool, msg string, heads ...*evmtypes.Head) { for _, h := range heads { storedHead := htu.headSaver.Chain(h.Hash) - assert.Equal(t, expectedFinalized, storedHead != nil && storedHead.IsFinalized, msg, "block_number", h.Number) + assert.Equal(t, expectedFinalized, storedHead != nil && storedHead.IsFinalized.Load(), msg, "block_number", h.Number) } } - htu.ethClient.On("LatestFinalizedBlock", mock.Anything).Return(&h14, nil).Once() - err := htu.headTracker.Backfill(ctx, &h15) + htu.ethClient.On("LatestFinalizedBlock", mock.Anything).Return(h14, nil).Once() + err := htu.headTracker.Backfill(ctx, h15) require.NoError(t, err) assertFinalized(true, "expected heads to be marked as finalized after backfill", h14, h13, h12, h11) - assertFinalized(false, "expected heads to remain unfinalized", h15, head10) + assertFinalized(false, "expected heads to remain unfinalized", h15, &head10) }) t.Run("fetches a missing head", func(t *testing.T) { htu := newHeadTrackerUniverse(t, opts{Heads: heads, FinalityTagEnabled: true}) - htu.ethClient.On("LatestFinalizedBlock", mock.Anything).Return(&h9, nil).Once() + htu.ethClient.On("LatestFinalizedBlock", mock.Anything).Return(h9, nil).Once() htu.ethClient.On("HeadByHash", mock.Anything, head10.Hash). Return(&head10, nil) - err := htu.headTracker.Backfill(ctx, &h12) + err := htu.headTracker.Backfill(ctx, h12) require.NoError(t, err) h := htu.headSaver.Chain(h12.Hash) - assert.Equal(t, int64(12), h.Number) - require.NotNil(t, h.Parent) - assert.Equal(t, int64(11), h.Parent.Number) - require.NotNil(t, h.Parent.Parent) - assert.Equal(t, int64(10), h.Parent.Parent.Number) - require.NotNil(t, h.Parent.Parent.Parent) - assert.Equal(t, int64(9), h.Parent.Parent.Parent.Number) + for expectedBlockNumber := int64(12); expectedBlockNumber >= 9; expectedBlockNumber-- { + require.NotNil(t, h) + assert.Equal(t, expectedBlockNumber, h.Number) + h = h.Parent.Load() + } writtenHead, err := htu.orm.HeadByHash(tests.Context(t), head10.Hash) require.NoError(t, err) @@ -984,7 +938,7 @@ func TestHeadTracker_Backfill(t *testing.T) { htu.ethClient.On("HeadByHash", mock.Anything, head8.Hash). Return(&head8, nil) - err := htu.headTracker.Backfill(ctx, &h15) + err := htu.headTracker.Backfill(ctx, h15) require.NoError(t, err) h := htu.headSaver.Chain(h15.Hash) @@ -1005,7 +959,7 @@ func TestHeadTracker_Backfill(t *testing.T) { Return(nil, ethereum.NotFound). Once() - err := htu.headTracker.Backfill(ctx, &h12) + err := htu.headTracker.Backfill(ctx, h12) require.Error(t, err) require.ErrorContains(t, err, "fetchAndSaveHead failed: not found") @@ -1027,7 +981,7 @@ func TestHeadTracker_Backfill(t *testing.T) { cancel() }) - err := htu.headTracker.Backfill(lctx, &h12) + err := htu.headTracker.Backfill(lctx, h12) require.Error(t, err) require.ErrorContains(t, err, "fetchAndSaveHead failed: context canceled") @@ -1039,12 +993,12 @@ func TestHeadTracker_Backfill(t *testing.T) { }) t.Run("abandons backfill and returns error when fetching a block by hash fails, indicating a reorg", func(t *testing.T) { htu := newHeadTrackerUniverse(t, opts{FinalityTagEnabled: true}) - htu.ethClient.On("LatestFinalizedBlock", mock.Anything).Return(&h11, nil).Once() - htu.ethClient.On("HeadByHash", mock.Anything, h14.Hash).Return(&h14, nil).Once() - htu.ethClient.On("HeadByHash", mock.Anything, h13.Hash).Return(&h13, nil).Once() + htu.ethClient.On("LatestFinalizedBlock", mock.Anything).Return(h11, nil).Once() + htu.ethClient.On("HeadByHash", mock.Anything, h14.Hash).Return(h14, nil).Once() + htu.ethClient.On("HeadByHash", mock.Anything, h13.Hash).Return(h13, nil).Once() htu.ethClient.On("HeadByHash", mock.Anything, h12.Hash).Return(nil, errors.New("not found")).Once() - err := htu.headTracker.Backfill(ctx, &h15) + err := htu.headTracker.Backfill(ctx, h15) require.Error(t, err) require.ErrorContains(t, err, "fetchAndSaveHead failed: not found") @@ -1056,83 +1010,83 @@ func TestHeadTracker_Backfill(t *testing.T) { assert.Equal(t, int64(13), h.EarliestInChain().BlockNumber()) }) t.Run("marks head as finalized, if latestHead = finalizedHead (0 finality depth)", func(t *testing.T) { - htu := newHeadTrackerUniverse(t, opts{Heads: []evmtypes.Head{h15}, FinalityTagEnabled: true}) + htu := newHeadTrackerUniverse(t, opts{Heads: []*evmtypes.Head{h15}, FinalityTagEnabled: true}) finalizedH15 := h15 // copy h15 to have different addresses - htu.ethClient.On("LatestFinalizedBlock", mock.Anything).Return(&finalizedH15, nil).Once() - err := htu.headTracker.Backfill(ctx, &h15) + htu.ethClient.On("LatestFinalizedBlock", mock.Anything).Return(finalizedH15, nil).Once() + err := htu.headTracker.Backfill(ctx, h15) require.NoError(t, err) h := htu.headSaver.LatestChain() // Should contain 14, 13 (15 was never added). When trying to get the parent of h13 by hash, a reorg happened and backfill exited. assert.Equal(t, 1, int(h.ChainLength())) - assert.True(t, h.IsFinalized) + assert.True(t, h.IsFinalized.Load()) assert.Equal(t, h15.BlockNumber(), h.BlockNumber()) assert.Equal(t, h15.Hash, h.Hash) }) t.Run("marks block as finalized according to FinalizedBlockOffset (finality tag)", func(t *testing.T) { - htu := newHeadTrackerUniverse(t, opts{Heads: []evmtypes.Head{h15}, FinalityTagEnabled: true, FinalizedBlockOffset: 2}) - htu.ethClient.On("LatestFinalizedBlock", mock.Anything).Return(&h14, nil).Once() + htu := newHeadTrackerUniverse(t, opts{Heads: []*evmtypes.Head{h15}, FinalityTagEnabled: true, FinalizedBlockOffset: 2}) + htu.ethClient.On("LatestFinalizedBlock", mock.Anything).Return(h14, nil).Once() // calculateLatestFinalizedBlock fetches blocks at LatestFinalized - FinalizedBlockOffset - htu.ethClient.On("HeadByNumber", mock.Anything, big.NewInt(h12.Number)).Return(&h12, nil).Once() + htu.ethClient.On("HeadByNumber", mock.Anything, big.NewInt(h12.Number)).Return(h12, nil).Once() // backfill from 15 to 12 - htu.ethClient.On("HeadByHash", mock.Anything, h12.Hash).Return(&h12, nil).Once() - htu.ethClient.On("HeadByHash", mock.Anything, h13.Hash).Return(&h13, nil).Once() - htu.ethClient.On("HeadByHash", mock.Anything, h14.Hash).Return(&h14, nil).Once() - err := htu.headTracker.Backfill(ctx, &h15) + htu.ethClient.On("HeadByHash", mock.Anything, h12.Hash).Return(h12, nil).Once() + htu.ethClient.On("HeadByHash", mock.Anything, h13.Hash).Return(h13, nil).Once() + htu.ethClient.On("HeadByHash", mock.Anything, h14.Hash).Return(h14, nil).Once() + err := htu.headTracker.Backfill(ctx, h15) require.NoError(t, err) h := htu.headSaver.LatestChain() // h - must contain 15, 14, 13, 12 and only 12 is finalized assert.Equal(t, 4, int(h.ChainLength())) - for ; h.Hash != h12.Hash; h = h.Parent { - assert.False(t, h.IsFinalized) + for ; h.Hash != h12.Hash; h = h.Parent.Load() { + assert.False(t, h.IsFinalized.Load()) } - assert.True(t, h.IsFinalized) + assert.True(t, h.IsFinalized.Load()) assert.Equal(t, h12.BlockNumber(), h.BlockNumber()) assert.Equal(t, h12.Hash, h.Hash) }) t.Run("marks block as finalized according to FinalizedBlockOffset (finality depth)", func(t *testing.T) { - htu := newHeadTrackerUniverse(t, opts{Heads: []evmtypes.Head{h15}, FinalityDepth: 1, FinalizedBlockOffset: 2}) - htu.ethClient.On("HeadByNumber", mock.Anything, big.NewInt(12)).Return(&h12, nil).Once() + htu := newHeadTrackerUniverse(t, opts{Heads: []*evmtypes.Head{h15}, FinalityDepth: 1, FinalizedBlockOffset: 2}) + htu.ethClient.On("HeadByNumber", mock.Anything, big.NewInt(12)).Return(h12, nil).Once() // backfill from 15 to 12 - htu.ethClient.On("HeadByHash", mock.Anything, h14.Hash).Return(&h14, nil).Once() - htu.ethClient.On("HeadByHash", mock.Anything, h13.Hash).Return(&h13, nil).Once() - htu.ethClient.On("HeadByHash", mock.Anything, h12.Hash).Return(&h12, nil).Once() - err := htu.headTracker.Backfill(ctx, &h15) + htu.ethClient.On("HeadByHash", mock.Anything, h14.Hash).Return(h14, nil).Once() + htu.ethClient.On("HeadByHash", mock.Anything, h13.Hash).Return(h13, nil).Once() + htu.ethClient.On("HeadByHash", mock.Anything, h12.Hash).Return(h12, nil).Once() + err := htu.headTracker.Backfill(ctx, h15) require.NoError(t, err) h := htu.headSaver.LatestChain() // h - must contain 15, 14, 13, 12 and only 12 is finalized assert.Equal(t, 4, int(h.ChainLength())) - for ; h.Hash != h12.Hash; h = h.Parent { - assert.False(t, h.IsFinalized) + for ; h.Hash != h12.Hash; h = h.Parent.Load() { + assert.False(t, h.IsFinalized.Load()) } - assert.True(t, h.IsFinalized) + assert.True(t, h.IsFinalized.Load()) assert.Equal(t, h12.BlockNumber(), h.BlockNumber()) assert.Equal(t, h12.Hash, h.Hash) }) t.Run("marks block as finalized according to FinalizedBlockOffset even with instant finality", func(t *testing.T) { - htu := newHeadTrackerUniverse(t, opts{Heads: []evmtypes.Head{h15}, FinalityDepth: 0, FinalizedBlockOffset: 2}) - htu.ethClient.On("HeadByNumber", mock.Anything, big.NewInt(13)).Return(&h13, nil).Once() + htu := newHeadTrackerUniverse(t, opts{Heads: []*evmtypes.Head{h15}, FinalityDepth: 0, FinalizedBlockOffset: 2}) + htu.ethClient.On("HeadByNumber", mock.Anything, big.NewInt(13)).Return(h13, nil).Once() // backfill from 15 to 13 - htu.ethClient.On("HeadByHash", mock.Anything, h14.Hash).Return(&h14, nil).Once() - htu.ethClient.On("HeadByHash", mock.Anything, h13.Hash).Return(&h13, nil).Once() - err := htu.headTracker.Backfill(ctx, &h15) + htu.ethClient.On("HeadByHash", mock.Anything, h14.Hash).Return(h14, nil).Once() + htu.ethClient.On("HeadByHash", mock.Anything, h13.Hash).Return(h13, nil).Once() + err := htu.headTracker.Backfill(ctx, h15) require.NoError(t, err) h := htu.headSaver.LatestChain() // h - must contain 15, 14, 13, only 13 is finalized assert.Equal(t, 3, int(h.ChainLength())) - for ; h.Hash != h13.Hash; h = h.Parent { - assert.False(t, h.IsFinalized) + for ; h.Hash != h13.Hash; h = h.Parent.Load() { + assert.False(t, h.IsFinalized.Load()) } - assert.True(t, h.IsFinalized) + assert.True(t, h.IsFinalized.Load()) assert.Equal(t, h13.BlockNumber(), h.BlockNumber()) assert.Equal(t, h13.Hash, h.Hash) }) @@ -1153,7 +1107,7 @@ func TestHeadTracker_LatestAndFinalizedBlock(t *testing.T) { h13.ParentHash = h12.Hash type opts struct { - Heads []evmtypes.Head + Heads []*evmtypes.Head FinalityTagEnabled bool FinalizedBlockOffset uint32 FinalityDepth uint32 @@ -1169,7 +1123,7 @@ func TestHeadTracker_LatestAndFinalizedBlock(t *testing.T) { db := pgtest.NewSqlxDB(t) orm := headtracker.NewORM(*testutils.FixtureChainID, db) for i := range opts.Heads { - require.NoError(t, orm.IdempotentInsertHead(tests.Context(t), &opts.Heads[i])) + require.NoError(t, orm.IdempotentInsertHead(tests.Context(t), opts.Heads[i])) } ethClient := evmtest.NewEthClientMock(t) ethClient.On("ConfiguredChainID", mock.Anything).Return(testutils.FixtureChainID, nil) @@ -1221,7 +1175,7 @@ func TestHeadTracker_LatestAndFinalizedBlock(t *testing.T) { assert.Equal(t, actualLF, h11) }) t.Run("returns latest finalized block with offset from cache (finality tag)", func(t *testing.T) { - htu := newHeadTrackerUniverse(t, opts{FinalityTagEnabled: true, FinalizedBlockOffset: 1, Heads: []evmtypes.Head{*h13, *h12, *h11}}) + htu := newHeadTrackerUniverse(t, opts{FinalityTagEnabled: true, FinalizedBlockOffset: 1, Heads: []*evmtypes.Head{h13, h12, h11}}) htu.ethClient.On("HeadByNumber", mock.Anything, (*big.Int)(nil)).Return(h13, nil).Once() htu.ethClient.On("LatestFinalizedBlock", mock.Anything).Return(h12, nil).Once() @@ -1231,7 +1185,7 @@ func TestHeadTracker_LatestAndFinalizedBlock(t *testing.T) { assert.Equal(t, actualLF.Number, h11.Number) }) t.Run("returns latest finalized block with offset from RPC (finality tag)", func(t *testing.T) { - htu := newHeadTrackerUniverse(t, opts{FinalityTagEnabled: true, FinalizedBlockOffset: 2, Heads: []evmtypes.Head{*h13, *h12, *h11}}) + htu := newHeadTrackerUniverse(t, opts{FinalityTagEnabled: true, FinalizedBlockOffset: 2, Heads: []*evmtypes.Head{h13, h12, h11}}) htu.ethClient.On("HeadByNumber", mock.Anything, (*big.Int)(nil)).Return(h13, nil).Once() htu.ethClient.On("LatestFinalizedBlock", mock.Anything).Return(h12, nil).Once() h10 := testutils.Head(10) @@ -1252,7 +1206,7 @@ func TestHeadTracker_LatestAndFinalizedBlock(t *testing.T) { assert.Equal(t, actualLF.Number, h13.Number) }) t.Run("returns latest finalized block with offset from cache (finality depth)", func(t *testing.T) { - htu := newHeadTrackerUniverse(t, opts{FinalityDepth: 1, FinalizedBlockOffset: 1, Heads: []evmtypes.Head{*h13, *h12, *h11}}) + htu := newHeadTrackerUniverse(t, opts{FinalityDepth: 1, FinalizedBlockOffset: 1, Heads: []*evmtypes.Head{h13, h12, h11}}) htu.ethClient.On("HeadByNumber", mock.Anything, (*big.Int)(nil)).Return(h13, nil).Once() actualL, actualLF, err := htu.headTracker.LatestAndFinalizedBlock(ctx) @@ -1261,7 +1215,7 @@ func TestHeadTracker_LatestAndFinalizedBlock(t *testing.T) { assert.Equal(t, actualLF.Number, h11.Number) }) t.Run("returns latest finalized block with offset from RPC (finality depth)", func(t *testing.T) { - htu := newHeadTrackerUniverse(t, opts{FinalityDepth: 1, FinalizedBlockOffset: 2, Heads: []evmtypes.Head{*h13, *h12, *h11}}) + htu := newHeadTrackerUniverse(t, opts{FinalityDepth: 1, FinalizedBlockOffset: 2, Heads: []*evmtypes.Head{h13, h12, h11}}) htu.ethClient.On("HeadByNumber", mock.Anything, (*big.Int)(nil)).Return(h13, nil).Once() h10 := testutils.Head(10) htu.ethClient.On("HeadByNumber", mock.Anything, big.NewInt(10)).Return(h10, nil).Once() @@ -1273,47 +1227,6 @@ func TestHeadTracker_LatestAndFinalizedBlock(t *testing.T) { }) } -// BenchmarkHeadTracker_Backfill - benchmarks HeadTracker's Backfill with focus on efficiency after initial -// backfill on start up -func BenchmarkHeadTracker_Backfill(b *testing.B) { - evmcfg := testutils.NewTestChainScopedConfig(b, func(c *toml.EVMConfig) { - c.FinalityTagEnabled = ptr(true) - }) - db := pgtest.NewSqlxDB(b) - chainID := big.NewInt(evmclient.NullClientChainID) - orm := headtracker.NewORM(*chainID, db) - ethClient := evmclimocks.NewClient(b) - ethClient.On("ConfiguredChainID").Return(chainID) - ht := createHeadTracker(b, ethClient, evmcfg.EVM(), evmcfg.EVM().HeadTracker(), orm) - ctx := tests.Context(b) - makeHash := func(n int64) common.Hash { - return common.BigToHash(big.NewInt(n)) - } - const finalityDepth = 12000 // observed value on Arbitrum - makeBlock := func(n int64) *evmtypes.Head { - return &evmtypes.Head{Number: n, Hash: makeHash(n), ParentHash: makeHash(n - 1)} - } - latest := makeBlock(finalityDepth) - finalized := makeBlock(1) - ethClient.On("HeadByHash", mock.Anything, mock.Anything).Return(func(_ context.Context, hash common.Hash) (*evmtypes.Head, error) { - number := hash.Big().Int64() - return makeBlock(number), nil - }) - ethClient.On("LatestFinalizedBlock", mock.Anything).Return(finalized, nil).Once() - // run initial backfill to populate the database - err := ht.headTracker.Backfill(ctx, latest) - require.NoError(b, err) - b.ResetTimer() - // focus benchmark on processing of a new latest block - for i := 0; i < b.N; i++ { - latest = makeBlock(int64(finalityDepth + i)) - finalized = makeBlock(int64(i + 1)) - ethClient.On("LatestFinalizedBlock", mock.Anything).Return(finalized, nil).Once() - err := ht.headTracker.Backfill(ctx, latest) - require.NoError(b, err) - } -} - func createHeadTracker(t testing.TB, ethClient *evmclimocks.Client, config commontypes.Config, htConfig commontypes.HeadTrackerConfig, orm headtracker.ORM) *headTrackerUniverse { lggr, ob := logger.TestObserved(t, zap.DebugLevel) hb := headtracker.NewHeadBroadcaster(lggr) @@ -1417,15 +1330,15 @@ func (hb *headBuffer) Append(head *evmtypes.Head) { Number: head.Number, Hash: head.Hash, ParentHash: head.ParentHash, - Parent: head.Parent, Timestamp: time.Unix(int64(len(hb.Heads)), 0), EVMChainID: head.EVMChainID, } + cloned.Parent.Store(head.Parent.Load()) hb.Heads = append(hb.Heads, cloned) } type blocks struct { - t *testing.T + t testing.TB Hashes []common.Hash mHashes map[int64]common.Hash Heads map[int64]*evmtypes.Head @@ -1435,7 +1348,7 @@ func (b *blocks) Head(number uint64) *evmtypes.Head { return b.Heads[int64(number)] } -func NewBlocks(t *testing.T, numHashes int) *blocks { +func NewBlocks(t testing.TB, numHashes int) *blocks { hashes := make([]common.Hash, 0) heads := make(map[int64]*evmtypes.Head) for i := int64(0); i < int64(numHashes); i++ { @@ -1445,7 +1358,7 @@ func NewBlocks(t *testing.T, numHashes int) *blocks { heads[i] = &evmtypes.Head{Hash: hash, Number: i, Timestamp: time.Unix(i, 0), EVMChainID: ubig.New(testutils.FixtureChainID)} if i > 0 { parent := heads[i-1] - heads[i].Parent = parent + heads[i].Parent.Store(parent) heads[i].ParentHash = parent.Hash } } @@ -1474,7 +1387,7 @@ func (b *blocks) ForkAt(t *testing.T, blockNum int64, numHashes int) *blocks { } forked.Heads[blockNum].ParentHash = b.Heads[blockNum].ParentHash - forked.Heads[blockNum].Parent = b.Heads[blockNum].Parent + forked.Heads[blockNum].Parent.Store(b.Heads[blockNum].Parent.Load()) return forked } @@ -1488,9 +1401,9 @@ func (b *blocks) NewHead(number uint64) *evmtypes.Head { Number: parent.Number + 1, Hash: testutils.NewHash(), ParentHash: parent.Hash, - Parent: parent, Timestamp: time.Unix(parent.Number+1, 0), EVMChainID: ubig.New(testutils.FixtureChainID), } + head.Parent.Store(parent) return head } diff --git a/core/chains/evm/headtracker/heads.go b/core/chains/evm/headtracker/heads.go index a61e55dcd28..c3492f9a595 100644 --- a/core/chains/evm/headtracker/heads.go +++ b/core/chains/evm/headtracker/heads.go @@ -1,7 +1,8 @@ package headtracker import ( - "sort" + "container/heap" + "fmt" "sync" "github.com/ethereum/go-ethereum/common" @@ -17,7 +18,7 @@ type Heads interface { HeadByHash(hash common.Hash) *evmtypes.Head // AddHeads adds newHeads to the collection, eliminates duplicates, // sorts by head number, fixes parents and cuts off old heads (historyDepth). - AddHeads(newHeads ...*evmtypes.Head) + AddHeads(newHeads ...*evmtypes.Head) error // Count returns number of heads in the collection. Count() int // MarkFinalized - finds `finalized` in the LatestHead and marks it and all direct ancestors as finalized. @@ -26,114 +27,158 @@ type Heads interface { } type heads struct { - heads []*evmtypes.Head - headsMap map[common.Hash]*evmtypes.Head - mu sync.RWMutex + highest *evmtypes.Head + headsAsc *headsHeap + headsByHash map[common.Hash]*evmtypes.Head + headsByParent map[common.Hash]map[common.Hash]*evmtypes.Head + mu sync.RWMutex } func NewHeads() Heads { - return &heads{} + return &heads{ + headsAsc: &headsHeap{}, + headsByHash: make(map[common.Hash]*evmtypes.Head), + headsByParent: map[common.Hash]map[common.Hash]*evmtypes.Head{}, + } } func (h *heads) LatestHead() *evmtypes.Head { h.mu.RLock() defer h.mu.RUnlock() - if len(h.heads) == 0 { - return nil - } - return h.heads[0] + return h.highest } func (h *heads) HeadByHash(hash common.Hash) *evmtypes.Head { h.mu.RLock() defer h.mu.RUnlock() - if h.headsMap == nil { + if h.headsByHash == nil { return nil } - return h.headsMap[hash] + return h.headsByHash[hash] } func (h *heads) Count() int { h.mu.RLock() defer h.mu.RUnlock() - return len(h.heads) + return h.headsAsc.Len() } -// MarkFinalized - marks block with has equal to finalized and all it's direct ancestors as finalized. +// MarkFinalized - marks block with hash equal to finalized and all it's direct ancestors as finalized. // Trims old blocks whose height is smaller than minBlockToKeep func (h *heads) MarkFinalized(finalized common.Hash, minBlockToKeep int64) bool { h.mu.Lock() defer h.mu.Unlock() - if len(h.heads) == 0 { + if len(h.headsByHash) == 0 { return false } - // deep copy to avoid race on head.Parent - h.heads, h.headsMap = deepCopy(h.heads, minBlockToKeep) - - finalizedHead, ok := h.headsMap[finalized] + finalizedHead, ok := h.headsByHash[finalized] if !ok { return false } - for finalizedHead != nil { - finalizedHead.IsFinalized = true - finalizedHead = finalizedHead.Parent + + markFinalized(finalizedHead) + + // remove all blocks that are older than minBlockToKeep + for h.headsAsc.Len() > 0 && h.headsAsc.Peek().Number < minBlockToKeep { + oldBlock := heap.Pop(h.headsAsc).(*evmtypes.Head) + delete(h.headsByHash, oldBlock.Hash) + // clear .Parent in oldBlock's children + for _, oldBlockChildren := range h.headsByParent[oldBlock.Hash] { + oldBlockChildren.Parent.Store(nil) + } + // headsByParent are expected to be of the same height, so we can remove them all at once + delete(h.headsByParent, oldBlock.ParentHash) + } + + if h.highest.Number < minBlockToKeep { + h.highest = nil } return true } -func deepCopy(oldHeads []*evmtypes.Head, minBlockToKeep int64) ([]*evmtypes.Head, map[common.Hash]*evmtypes.Head) { - headsMap := make(map[common.Hash]*evmtypes.Head, len(oldHeads)) - heads := make([]*evmtypes.Head, 0, len(headsMap)) - for _, head := range oldHeads { - if head.Hash == head.ParentHash { - // shouldn't happen but it is untrusted input - continue - } - if head.BlockNumber() < minBlockToKeep { - // trim redundant blocks - continue - } - // copy all head objects to avoid races when a previous head chain is used - // elsewhere (since we mutate Parent here) - headCopy := *head - headCopy.Parent = nil // always build it from scratch in case it points to a head too old to be included - // map eliminates duplicates - // prefer head that was already in heads as it might have been marked as finalized on previous run - if _, ok := headsMap[head.Hash]; !ok { - headsMap[head.Hash] = &headCopy - heads = append(heads, &headCopy) +func markFinalized(head *evmtypes.Head) { + // we can assume that if a head was previously marked as finalized all its ancestors were marked as finalized + for head != nil && !head.IsFinalized.Load() { + head.IsFinalized.Store(true) + head = head.Parent.Load() + } +} + +func (h *heads) ensureNoCycles(newHead *evmtypes.Head) error { + if newHead.ParentHash == newHead.Hash { + return fmt.Errorf("cycle detected: newHeads reference itself newHead(%s)", newHead.String()) + } + if parent, ok := h.headsByHash[newHead.ParentHash]; ok { + if parent.Number >= newHead.Number { + return fmt.Errorf("potential cycle detected while adding newHead as child: %w", newPotentialCycleError(parent, newHead)) } } - // sort the heads as original slice might be out of order - sort.SliceStable(heads, func(i, j int) bool { - // sorting from the highest number to lowest - return heads[i].Number > heads[j].Number - }) - - // assign parents - for i := 0; i < len(heads); i++ { - head := heads[i] - parent, exists := headsMap[head.ParentHash] - if exists { - head.Parent = parent + for _, child := range h.headsByParent[newHead.Hash] { + if newHead.Number >= child.Number { + return fmt.Errorf("potential cycle detected while adding newHead as parent: %w", newPotentialCycleError(newHead, child)) } } - return heads, headsMap + return nil } -func (h *heads) AddHeads(newHeads ...*evmtypes.Head) { +func (h *heads) AddHeads(newHeads ...*evmtypes.Head) error { h.mu.Lock() defer h.mu.Unlock() - // deep copy to avoid race on head.Parent - h.heads, h.headsMap = deepCopy(append(h.heads, newHeads...), 0) + for _, newHead := range newHeads { + // skip blocks that were previously added + if _, ok := h.headsByHash[newHead.Hash]; ok { + continue + } + + if err := h.ensureNoCycles(newHead); err != nil { + return err + } + + // heads now owns the newHead - reset values that are populated by heads + newHead.IsFinalized.Store(false) + newHead.Parent.Store(nil) + + // prefer newer head to set as highest + if h.highest == nil || h.highest.Number <= newHead.Number { + h.highest = newHead + } + + heap.Push(h.headsAsc, newHead) + h.headsByHash[newHead.Hash] = newHead + siblings, ok := h.headsByParent[newHead.ParentHash] + if !ok { + siblings = make(map[common.Hash]*evmtypes.Head) + h.headsByParent[newHead.ParentHash] = siblings + } + siblings[newHead.Hash] = newHead + // populate reference to parent + if parent, ok := h.headsByHash[newHead.ParentHash]; ok { + newHead.Parent.Store(parent) + } + for _, child := range h.headsByParent[newHead.Hash] { + // ensure all children have reference to newHead + child.Parent.Store(newHead) + if child.IsFinalized.Load() { + // mark newHead as finalized if any of its children is finalized + markFinalized(newHead) + } + } + } + + return nil +} + +func newPotentialCycleError(parent, child *evmtypes.Head) error { + return fmt.Errorf("expected head number to strictly decrease in 'child -> parent' relation: "+ + "child(%s), parent(%s)", child.String(), parent.String()) } diff --git a/core/chains/evm/headtracker/heads_test.go b/core/chains/evm/headtracker/heads_test.go index 6c02c528ba2..92e4015d8c3 100644 --- a/core/chains/evm/headtracker/heads_test.go +++ b/core/chains/evm/headtracker/heads_test.go @@ -20,21 +20,29 @@ func TestHeads_LatestHead(t *testing.T) { t.Parallel() heads := headtracker.NewHeads() - heads.AddHeads(testutils.Head(100), testutils.Head(200), testutils.Head(300)) + assert.NoError(t, heads.AddHeads(testutils.Head(100), testutils.Head(200), testutils.Head(300))) latest := heads.LatestHead() require.NotNil(t, latest) require.Equal(t, int64(300), latest.Number) - heads.AddHeads(testutils.Head(250)) + assert.NoError(t, heads.AddHeads(testutils.Head(250))) latest = heads.LatestHead() require.NotNil(t, latest) require.Equal(t, int64(300), latest.Number) - heads.AddHeads(testutils.Head(400)) + assert.NoError(t, heads.AddHeads(testutils.Head(400))) latest = heads.LatestHead() require.NotNil(t, latest) require.Equal(t, int64(400), latest.Number) + + // if heads have the same height, LatestHead prefers most recent + newerH400 := testutils.Head(400) + assert.NoError(t, heads.AddHeads(newerH400)) + latest = heads.LatestHead() + require.NotNil(t, latest) + require.Equal(t, int64(400), latest.Number) + require.Equal(t, newerH400.Hash, latest.Hash) } func TestHeads_HeadByHash(t *testing.T) { @@ -46,7 +54,7 @@ func TestHeads_HeadByHash(t *testing.T) { testutils.Head(300), } heads := headtracker.NewHeads() - heads.AddHeads(testHeads...) + assert.NoError(t, heads.AddHeads(testHeads...)) head := heads.HeadByHash(testHeads[1].Hash) require.NotNil(t, head) @@ -62,10 +70,10 @@ func TestHeads_Count(t *testing.T) { heads := headtracker.NewHeads() require.Zero(t, heads.Count()) - heads.AddHeads(testutils.Head(100), testutils.Head(200), testutils.Head(300)) + assert.NoError(t, heads.AddHeads(testutils.Head(100), testutils.Head(200), testutils.Head(300))) require.Equal(t, 3, heads.Count()) - heads.AddHeads(testutils.Head(400)) + assert.NoError(t, heads.AddHeads(testutils.Head(400))) require.Equal(t, 4, heads.Count()) } @@ -77,11 +85,11 @@ func TestHeads_AddHeads(t *testing.T) { var testHeads []*evmtypes.Head var parentHash common.Hash - for i := 0; i < 5; i++ { - hash := utils.NewHash() + for i := 1; i < 6; i++ { + hash := common.BigToHash(big.NewInt(int64(i))) h := evmtypes.NewHead(big.NewInt(int64(i)), hash, parentHash, uint64(time.Now().Unix()), ubig.NewI(0)) testHeads = append(testHeads, &h) - if i == 2 { + if i == 3 { // uncled block h := evmtypes.NewHead(big.NewInt(int64(i)), uncleHash, parentHash, uint64(time.Now().Unix()), ubig.NewI(0)) testHeads = append(testHeads, &h) @@ -89,10 +97,10 @@ func TestHeads_AddHeads(t *testing.T) { parentHash = hash } - heads.AddHeads(testHeads...) + assert.NoError(t, heads.AddHeads(testHeads...)) require.Equal(t, 6, heads.Count()) // Add duplicates (should be ignored) - heads.AddHeads(testHeads[2:5]...) + assert.NoError(t, heads.AddHeads(testHeads[2:5]...)) require.Equal(t, 6, heads.Count()) head := heads.LatestHead() @@ -102,6 +110,26 @@ func TestHeads_AddHeads(t *testing.T) { head = heads.HeadByHash(uncleHash) require.NotNil(t, head) require.Equal(t, 3, int(head.ChainLength())) + // returns an error, if newHead creates cycle + t.Run("Returns an error, if newHead create cycle", func(t *testing.T) { + cycleHead := &evmtypes.Head{ + Hash: heads.LatestHead().EarliestInChain().ParentHash, + ParentHash: heads.LatestHead().Hash, + } + // 1. try adding in front + cycleHead.Number = heads.LatestHead().Number + 1 + assert.EqualError(t, heads.AddHeads(cycleHead), "potential cycle detected while adding newHead as parent: expected head number to strictly decrease in 'child -> parent' relation: child(Head{Number: 1, Hash: 0x0000000000000000000000000000000000000000000000000000000000000001, ParentHash: 0x0000000000000000000000000000000000000000000000000000000000000000}), parent(Head{Number: 6, Hash: 0x0000000000000000000000000000000000000000000000000000000000000000, ParentHash: 0x0000000000000000000000000000000000000000000000000000000000000005})") + // 2. try adding to back + cycleHead.Number = heads.LatestHead().EarliestInChain().Number - 1 + assert.EqualError(t, heads.AddHeads(cycleHead), "potential cycle detected while adding newHead as child: expected head number to strictly decrease in 'child -> parent' relation: child(Head{Number: 0, Hash: 0x0000000000000000000000000000000000000000000000000000000000000000, ParentHash: 0x0000000000000000000000000000000000000000000000000000000000000005}), parent(Head{Number: 5, Hash: 0x0000000000000000000000000000000000000000000000000000000000000005, ParentHash: 0x0000000000000000000000000000000000000000000000000000000000000004})") + // 3. try adding to back with reference to self + cycleHead = &evmtypes.Head{ + Number: 1000, + Hash: common.BigToHash(big.NewInt(1000)), + ParentHash: common.BigToHash(big.NewInt(1000)), + } + assert.EqualError(t, heads.AddHeads(cycleHead), "cycle detected: newHeads reference itself newHead(Head{Number: 1000, Hash: 0x00000000000000000000000000000000000000000000000000000000000003e8, ParentHash: 0x00000000000000000000000000000000000000000000000000000000000003e8})") + }) } func TestHeads_MarkFinalized(t *testing.T) { @@ -110,7 +138,7 @@ func TestHeads_MarkFinalized(t *testing.T) { heads := headtracker.NewHeads() // create chain - // H0 <- H1 <- H2 <- H3 <- H4 <- H5 + // H0 <- H1 <- H2 <- H3 <- H4 <- H5 - Canonical // \ \ // H1Uncle H2Uncle // @@ -127,35 +155,80 @@ func TestHeads_MarkFinalized(t *testing.T) { h5 := newHead(5, h4.Hash) h2Uncle := newHead(2, h1.Hash) - allHeads := []*evmtypes.Head{h0, h1, h1Uncle, h2, h2Uncle, h3, h4, h5} - heads.AddHeads(allHeads...) + assert.NoError(t, heads.AddHeads(h0, h1, h1Uncle, h2, h2Uncle, h3, h4, h5)) // mark h3 and all ancestors as finalized require.True(t, heads.MarkFinalized(h3.Hash, h1.BlockNumber()), "expected MarkFinalized succeed") - // original heads remain unchanged - for _, h := range allHeads { - assert.False(t, h.IsFinalized, "expected original heads to remain unfinalized") - } - // h0 is too old. It should not be available directly or through its children assert.Nil(t, heads.HeadByHash(h0.Hash)) - assert.Nil(t, heads.HeadByHash(h1.Hash).Parent) - assert.Nil(t, heads.HeadByHash(h1Uncle.Hash).Parent) - assert.Nil(t, heads.HeadByHash(h2Uncle.Hash).Parent.Parent) + assert.Nil(t, heads.HeadByHash(h1.Hash).Parent.Load()) + assert.Nil(t, heads.HeadByHash(h1Uncle.Hash).Parent.Load()) + assert.Nil(t, heads.HeadByHash(h2Uncle.Hash).Parent.Load().Parent.Load()) require.False(t, heads.MarkFinalized(utils.NewHash(), 0), "expected false if finalized hash was not found in existing LatestHead chain") ensureProperFinalization := func(t *testing.T) { t.Helper() for _, head := range []*evmtypes.Head{h5, h4} { - require.False(t, heads.HeadByHash(head.Hash).IsFinalized, "expected h4-h5 not to be finalized", head.BlockNumber()) + require.False(t, heads.HeadByHash(head.Hash).IsFinalized.Load(), "expected h4-h5 not to be finalized", head.BlockNumber()) } for _, head := range []*evmtypes.Head{h3, h2, h1} { - require.True(t, heads.HeadByHash(head.Hash).IsFinalized, "expected h3 and all ancestors to be finalized", head.BlockNumber()) + require.True(t, heads.HeadByHash(head.Hash).IsFinalized.Load(), "expected h3 and all ancestors to be finalized", head.BlockNumber()) } - require.False(t, heads.HeadByHash(h2Uncle.Hash).IsFinalized, "expected uncle block not to be marked as finalized") + require.False(t, heads.HeadByHash(h2Uncle.Hash).IsFinalized.Load(), "expected uncle block not to be marked as finalized") } t.Run("blocks were correctly marked as finalized", ensureProperFinalization) - heads.AddHeads(h0, h1, h2, h2Uncle, h3, h4, h5) + assert.NoError(t, heads.AddHeads(h0, h1, h2, h2Uncle, h3, h4, h5)) t.Run("blocks remain finalized after re adding them to the Heads", ensureProperFinalization) + + // ensure that IsFinalized is propagated, when older blocks are added + // 1. remove all blocks older than 3 + heads.MarkFinalized(h3.Hash, 3) + // 2. ensure that h2 and h1 are no longer present + assert.Nil(t, heads.HeadByHash(h2.Hash)) + assert.Nil(t, heads.HeadByHash(h1.Hash)) + // 3. add blocks back, starting from older + assert.NoError(t, heads.AddHeads(h1)) + assert.False(t, heads.HeadByHash(h1.Hash).IsFinalized.Load(), "expected h1 to not be finalized as it was not explicitly marked and there no path to h3") + assert.NoError(t, heads.AddHeads(h2)) + // 4. now h2 and h1 must be marked as finalized + assert.True(t, heads.HeadByHash(h1.Hash).IsFinalized.Load()) + assert.True(t, heads.HeadByHash(h2.Hash).IsFinalized.Load()) +} + +func BenchmarkEarliestHeadInChain(b *testing.B) { + const latestBlockNum = 200_000 + blocks := NewBlocks(b, latestBlockNum+1) + b.ResetTimer() + for i := 0; i < b.N; i++ { + latest := blocks.Head(latestBlockNum) + earliest := latest.EarliestHeadInChain() + // perform sanity check + assert.NotEqual(b, latest.BlockNumber(), earliest.BlockNumber()) + assert.NotEqual(b, latest.BlockHash(), earliest.BlockHash()) + } +} + +// BenchmarkSimulated_Backfill - benchmarks AddHeads & MarkFinalized as if it was performed by HeadTracker's backfill +func BenchmarkHeads_SimulatedBackfill(b *testing.B) { + makeHash := func(n int64) common.Hash { + return common.BigToHash(big.NewInt(n)) + } + makeHead := func(n int64) *evmtypes.Head { + return &evmtypes.Head{Number: n, Hash: makeHash(n), ParentHash: makeHash(n - 1)} + } + + const finalityDepth = 16_000 // observed value on Arbitrum + // populate with initial values + heads := headtracker.NewHeads() + for i := int64(1); i <= finalityDepth; i++ { + assert.NoError(b, heads.AddHeads(makeHead(i))) + } + heads.MarkFinalized(makeHash(1), 1) + // focus benchmark on processing of a new latest block + b.ResetTimer() + for i := int64(1); i <= int64(b.N); i++ { + assert.NoError(b, heads.AddHeads(makeHead(finalityDepth+i))) + heads.MarkFinalized(makeHash(i), i) + } } diff --git a/core/chains/evm/headtracker/heap.go b/core/chains/evm/headtracker/heap.go new file mode 100644 index 00000000000..572ed541dfa --- /dev/null +++ b/core/chains/evm/headtracker/heap.go @@ -0,0 +1,35 @@ +package headtracker + +import evmtypes "github.com/smartcontractkit/chainlink/v2/core/chains/evm/types" + +type headsHeap struct { + values []*evmtypes.Head +} + +func (h *headsHeap) Len() int { + return len(h.values) +} + +func (h *headsHeap) Swap(i, j int) { + h.values[i], h.values[j] = h.values[j], h.values[i] +} + +func (h *headsHeap) Less(i, j int) bool { + return h.values[i].Number < h.values[j].Number +} + +func (h *headsHeap) Pop() any { + n := len(h.values) - 1 + old := h.values[n] + h.values[n] = nil + h.values = h.values[:n] + return old +} + +func (h *headsHeap) Push(v any) { + h.values = append(h.values, v.(*evmtypes.Head)) +} + +func (h *headsHeap) Peek() *evmtypes.Head { + return h.values[0] +} diff --git a/core/chains/evm/headtracker/types/types.go b/core/chains/evm/headtracker/types/types.go index 1a03f3cec6f..ca5a79fc68d 100644 --- a/core/chains/evm/headtracker/types/types.go +++ b/core/chains/evm/headtracker/types/types.go @@ -2,10 +2,13 @@ package types import ( "context" + "math/big" + "github.com/ethereum/go-ethereum" "github.com/ethereum/go-ethereum/common" "github.com/smartcontractkit/chainlink/v2/common/headtracker" + htrktypes "github.com/smartcontractkit/chainlink/v2/common/headtracker/types" evmtypes "github.com/smartcontractkit/chainlink/v2/core/chains/evm/types" ) @@ -22,4 +25,5 @@ type ( HeadTrackable = headtracker.HeadTrackable[*evmtypes.Head, common.Hash] HeadListener = headtracker.HeadListener[*evmtypes.Head, common.Hash] HeadBroadcaster = headtracker.HeadBroadcaster[*evmtypes.Head, common.Hash] + Client = htrktypes.Client[*evmtypes.Head, ethereum.Subscription, *big.Int, common.Hash] ) diff --git a/core/chains/evm/log/broadcaster.go b/core/chains/evm/log/broadcaster.go index e7f02d1199c..3e37678bee3 100644 --- a/core/chains/evm/log/broadcaster.go +++ b/core/chains/evm/log/broadcaster.go @@ -590,7 +590,7 @@ func (b *broadcaster) onNewHeads() { b.logger.Errorf("Failed to query for log broadcasts, %v", err) return } - b.registrations.sendLogs(ctx, logs, *latestHead, broadcasts, b.orm) + b.registrations.sendLogs(ctx, logs, latestHead, broadcasts, b.orm) if err := b.orm.SetPendingMinBlock(ctx, nil); err != nil { b.logger.Errorw("Failed to set pending broadcasts number null", "err", err) } @@ -605,7 +605,7 @@ func (b *broadcaster) onNewHeads() { return } - b.registrations.sendLogs(ctx, logs, *latestHead, broadcasts, b.orm) + b.registrations.sendLogs(ctx, logs, latestHead, broadcasts, b.orm) } newMin := b.logPool.deleteOlderLogs(keptDepth) if err := b.orm.SetPendingMinBlock(ctx, newMin); err != nil { diff --git a/core/chains/evm/log/registrations.go b/core/chains/evm/log/registrations.go index 68dd93b9d88..01104349a6f 100644 --- a/core/chains/evm/log/registrations.go +++ b/core/chains/evm/log/registrations.go @@ -11,6 +11,7 @@ import ( pkgerrors "github.com/pkg/errors" "github.com/smartcontractkit/chainlink-common/pkg/logger" + evmtypes "github.com/smartcontractkit/chainlink/v2/core/chains/evm/types" "github.com/smartcontractkit/chainlink/v2/core/gethwrappers" "github.com/smartcontractkit/chainlink/v2/core/gethwrappers/generated" @@ -215,7 +216,7 @@ func (r *registrations) isAddressRegistered(address common.Address) bool { return false } -func (r *registrations) sendLogs(ctx context.Context, logsToSend []logsOnBlock, latestHead evmtypes.Head, broadcasts []LogBroadcast, bc broadcastCreator) { +func (r *registrations) sendLogs(ctx context.Context, logsToSend []logsOnBlock, latestHead *evmtypes.Head, broadcasts []LogBroadcast, bc broadcastCreator) { broadcastsExisting := make(map[LogBroadcastAsKey]bool) for _, b := range broadcasts { broadcastsExisting[b.AsKey()] = b.Consumed @@ -387,7 +388,7 @@ type broadcastCreator interface { CreateBroadcast(ctx context.Context, blockHash common.Hash, blockNumber uint64, logIndex uint, jobID int32) error } -func (r *handler) sendLog(ctx context.Context, log types.Log, latestHead evmtypes.Head, +func (r *handler) sendLog(ctx context.Context, log types.Log, latestHead *evmtypes.Head, broadcasts map[LogBroadcastAsKey]bool, bc broadcastCreator, logger logger.Logger) { diff --git a/core/chains/evm/logpoller/log_poller.go b/core/chains/evm/logpoller/log_poller.go index 360511951ee..dd7e0c5242b 100644 --- a/core/chains/evm/logpoller/log_poller.go +++ b/core/chains/evm/logpoller/log_poller.go @@ -1037,7 +1037,7 @@ func (lp *logPoller) findBlockAfterLCA(ctx context.Context, current *evmtypes.He if err != nil { return nil, err } - blockAfterLCA := *current + blockAfterLCA := current // We expect reorgs up to the block after latestFinalizedBlock // We loop via parent instead of current so current always holds the LCA+1. // If the parent block number becomes < the first finalized block our reorg is too deep. @@ -1049,10 +1049,10 @@ func (lp *logPoller) findBlockAfterLCA(ctx context.Context, current *evmtypes.He } if parent.Hash == ourParentBlockHash.BlockHash { // If we do have the blockhash, return blockAfterLCA - return &blockAfterLCA, nil + return blockAfterLCA, nil } // Otherwise get a new parent and update blockAfterLCA. - blockAfterLCA = *parent + blockAfterLCA = parent parent, err = lp.ec.HeadByHash(ctx, parent.ParentHash) if err != nil { return nil, err diff --git a/core/chains/evm/logpoller/log_poller_internal_test.go b/core/chains/evm/logpoller/log_poller_internal_test.go index ca1bd72dd6c..620bbf14f41 100644 --- a/core/chains/evm/logpoller/log_poller_internal_test.go +++ b/core/chains/evm/logpoller/log_poller_internal_test.go @@ -569,7 +569,8 @@ func Test_latestBlockAndFinalityDepth(t *testing.T) { }) t.Run("headTracker returns valid chain", func(t *testing.T) { headTracker := htMocks.NewHeadTracker[*evmtypes.Head, common.Hash](t) - finalizedBlock := &evmtypes.Head{Number: 2, IsFinalized: true} + finalizedBlock := &evmtypes.Head{Number: 2} + finalizedBlock.IsFinalized.Store(true) head := &evmtypes.Head{Number: 10} headTracker.On("LatestAndFinalizedBlock", mock.Anything).Return(head, finalizedBlock, nil) diff --git a/core/chains/evm/txmgr/confirmer_test.go b/core/chains/evm/txmgr/confirmer_test.go index dc8e30f5f4b..d63f0cf1de0 100644 --- a/core/chains/evm/txmgr/confirmer_test.go +++ b/core/chains/evm/txmgr/confirmer_test.go @@ -148,27 +148,28 @@ func TestEthConfirmer_Lifecycle(t *testing.T) { err = ec.Start(ctx) require.Error(t, err) - latestFinalizedHead := evmtypes.Head{ - Number: 8, - Hash: testutils.NewHash(), - Parent: nil, - IsFinalized: true, // We are guaranteed to receive a latestFinalizedHead. + latestFinalizedHead := &evmtypes.Head{ + Number: 8, + Hash: testutils.NewHash(), } + // We are guaranteed to receive a latestFinalizedHead. + latestFinalizedHead.IsFinalized.Store(true) - head := evmtypes.Head{ + h9 := &evmtypes.Head{ + Hash: testutils.NewHash(), + Number: 9, + } + h9.Parent.Store(latestFinalizedHead) + head := &evmtypes.Head{ Hash: testutils.NewHash(), Number: 10, - Parent: &evmtypes.Head{ - Hash: testutils.NewHash(), - Number: 9, - Parent: &latestFinalizedHead, - }, } + head.Parent.Store(h9) - ethClient.On("HeadByNumber", mock.Anything, (*big.Int)(nil)).Return(&head, nil).Once() - ethClient.On("LatestFinalizedBlock", mock.Anything).Return(&latestFinalizedHead, nil).Once() + ethClient.On("HeadByNumber", mock.Anything, (*big.Int)(nil)).Return(head, nil).Once() + ethClient.On("LatestFinalizedBlock", mock.Anything).Return(latestFinalizedHead, nil).Once() - err = ec.ProcessHead(ctx, &head) + err = ec.ProcessHead(ctx, head) require.NoError(t, err) // Can successfully close once err = ec.Close() @@ -2742,34 +2743,33 @@ func TestEthConfirmer_EnsureConfirmedTransactionsInLongestChain(t *testing.T) { ec := newEthConfirmer(t, txStore, ethClient, gconfig, config, ethKeyStore, nil) latestFinalizedHead := evmtypes.Head{ - Number: 8, - Hash: testutils.NewHash(), - Parent: nil, - IsFinalized: false, // We are guaranteed to receive a latestFinalizedHead. + Number: 8, + Hash: testutils.NewHash(), } - head := evmtypes.Head{ + h8 := &evmtypes.Head{ + Number: 8, + Hash: testutils.NewHash(), + } + h9 := &evmtypes.Head{ + Hash: testutils.NewHash(), + Number: 9, + } + h9.Parent.Store(h8) + head := &evmtypes.Head{ Hash: testutils.NewHash(), Number: 10, - Parent: &evmtypes.Head{ - Hash: testutils.NewHash(), - Number: 9, - Parent: &evmtypes.Head{ - Number: 8, - Hash: testutils.NewHash(), - Parent: nil, - }, - }, } + head.Parent.Store(h9) t.Run("does nothing if there aren't any transactions", func(t *testing.T) { - require.NoError(t, ec.EnsureConfirmedTransactionsInLongestChain(tests.Context(t), &head, latestFinalizedHead.BlockNumber())) + require.NoError(t, ec.EnsureConfirmedTransactionsInLongestChain(tests.Context(t), head, latestFinalizedHead.BlockNumber())) }) t.Run("does nothing to unconfirmed transactions", func(t *testing.T) { etx := cltest.MustInsertUnconfirmedEthTxWithBroadcastLegacyAttempt(t, txStore, 0, fromAddress) // Do the thing - require.NoError(t, ec.EnsureConfirmedTransactionsInLongestChain(tests.Context(t), &head, latestFinalizedHead.BlockNumber())) + require.NoError(t, ec.EnsureConfirmedTransactionsInLongestChain(tests.Context(t), head, latestFinalizedHead.BlockNumber())) etx, err := txStore.FindTxWithAttempts(ctx, etx.ID) require.NoError(t, err) @@ -2781,7 +2781,7 @@ func TestEthConfirmer_EnsureConfirmedTransactionsInLongestChain(t *testing.T) { mustInsertEthReceipt(t, txStore, head.Number, head.Hash, etx.TxAttempts[0].Hash) // Do the thing - require.NoError(t, ec.EnsureConfirmedTransactionsInLongestChain(tests.Context(t), &head, latestFinalizedHead.BlockNumber())) + require.NoError(t, ec.EnsureConfirmedTransactionsInLongestChain(tests.Context(t), head, latestFinalizedHead.BlockNumber())) etx, err := txStore.FindTxWithAttempts(ctx, etx.ID) require.NoError(t, err) @@ -2791,10 +2791,10 @@ func TestEthConfirmer_EnsureConfirmedTransactionsInLongestChain(t *testing.T) { t.Run("does nothing to confirmed transactions that only have receipts older than the start of the chain", func(t *testing.T) { etx := cltest.MustInsertConfirmedEthTxWithLegacyAttempt(t, txStore, 3, 1, fromAddress) // Add receipt that is older than the lowest block of the chain - mustInsertEthReceipt(t, txStore, head.Parent.Parent.Number-1, testutils.NewHash(), etx.TxAttempts[0].Hash) + mustInsertEthReceipt(t, txStore, h8.Number-1, testutils.NewHash(), etx.TxAttempts[0].Hash) // Do the thing - require.NoError(t, ec.EnsureConfirmedTransactionsInLongestChain(tests.Context(t), &head, latestFinalizedHead.BlockNumber())) + require.NoError(t, ec.EnsureConfirmedTransactionsInLongestChain(tests.Context(t), head, latestFinalizedHead.BlockNumber())) etx, err := txStore.FindTxWithAttempts(ctx, etx.ID) require.NoError(t, err) @@ -2805,7 +2805,7 @@ func TestEthConfirmer_EnsureConfirmedTransactionsInLongestChain(t *testing.T) { etx := cltest.MustInsertConfirmedEthTxWithLegacyAttempt(t, txStore, 4, 1, fromAddress) attempt := etx.TxAttempts[0] // Include one within head height but a different block hash - mustInsertEthReceipt(t, txStore, head.Parent.Number, testutils.NewHash(), attempt.Hash) + mustInsertEthReceipt(t, txStore, head.Parent.Load().Number, testutils.NewHash(), attempt.Hash) ethClient.On("SendTransactionReturnCode", mock.Anything, mock.MatchedBy(func(tx *types.Transaction) bool { atx, err := txmgr.GetGethSignedTx(attempt.SignedRawTx) @@ -2815,7 +2815,7 @@ func TestEthConfirmer_EnsureConfirmedTransactionsInLongestChain(t *testing.T) { }), fromAddress).Return(commonclient.Successful, nil).Once() // Do the thing - require.NoError(t, ec.EnsureConfirmedTransactionsInLongestChain(tests.Context(t), &head, latestFinalizedHead.BlockNumber())) + require.NoError(t, ec.EnsureConfirmedTransactionsInLongestChain(tests.Context(t), head, latestFinalizedHead.BlockNumber())) etx, err := txStore.FindTxWithAttempts(ctx, etx.ID) require.NoError(t, err) @@ -2830,15 +2830,15 @@ func TestEthConfirmer_EnsureConfirmedTransactionsInLongestChain(t *testing.T) { attempt := etx.TxAttempts[0] attemptHash := attempt.Hash // Add receipt that is older than the lowest block of the chain - mustInsertEthReceipt(t, txStore, head.Parent.Parent.Number-1, testutils.NewHash(), attemptHash) + mustInsertEthReceipt(t, txStore, h8.Number-1, testutils.NewHash(), attemptHash) // Include one within head height but a different block hash - mustInsertEthReceipt(t, txStore, head.Parent.Number, testutils.NewHash(), attemptHash) + mustInsertEthReceipt(t, txStore, head.Parent.Load().Number, testutils.NewHash(), attemptHash) ethClient.On("SendTransactionReturnCode", mock.Anything, mock.Anything, fromAddress).Return( commonclient.Successful, nil).Once() // Do the thing - require.NoError(t, ec.EnsureConfirmedTransactionsInLongestChain(tests.Context(t), &head, latestFinalizedHead.BlockNumber())) + require.NoError(t, ec.EnsureConfirmedTransactionsInLongestChain(tests.Context(t), head, latestFinalizedHead.BlockNumber())) etx, err := txStore.FindTxWithAttempts(ctx, etx.ID) require.NoError(t, err) @@ -2862,9 +2862,9 @@ func TestEthConfirmer_EnsureConfirmedTransactionsInLongestChain(t *testing.T) { require.NoError(t, txStore.InsertTxAttempt(ctx, &attempt3)) // Receipt is within head height but a different block hash - mustInsertEthReceipt(t, txStore, head.Parent.Number, testutils.NewHash(), attempt2.Hash) + mustInsertEthReceipt(t, txStore, head.Parent.Load().Number, testutils.NewHash(), attempt2.Hash) // Receipt is within head height but a different block hash - mustInsertEthReceipt(t, txStore, head.Parent.Number, testutils.NewHash(), attempt3.Hash) + mustInsertEthReceipt(t, txStore, head.Parent.Load().Number, testutils.NewHash(), attempt3.Hash) ethClient.On("SendTransactionReturnCode", mock.Anything, mock.MatchedBy(func(tx *types.Transaction) bool { s, err := txmgr.GetGethSignedTx(attempt3.SignedRawTx) @@ -2873,7 +2873,7 @@ func TestEthConfirmer_EnsureConfirmedTransactionsInLongestChain(t *testing.T) { }), fromAddress).Return(commonclient.Successful, nil).Once() // Do the thing - require.NoError(t, ec.EnsureConfirmedTransactionsInLongestChain(tests.Context(t), &head, latestFinalizedHead.BlockNumber())) + require.NoError(t, ec.EnsureConfirmedTransactionsInLongestChain(tests.Context(t), head, latestFinalizedHead.BlockNumber())) etx, err := txStore.FindTxWithAttempts(ctx, etx.ID) require.NoError(t, err) @@ -2893,7 +2893,7 @@ func TestEthConfirmer_EnsureConfirmedTransactionsInLongestChain(t *testing.T) { // Add receipt that is higher than head mustInsertEthReceipt(t, txStore, head.Number+1, testutils.NewHash(), attempt.Hash) - require.NoError(t, ec.EnsureConfirmedTransactionsInLongestChain(tests.Context(t), &head, latestFinalizedHead.BlockNumber())) + require.NoError(t, ec.EnsureConfirmedTransactionsInLongestChain(tests.Context(t), head, latestFinalizedHead.BlockNumber())) etx, err := txStore.FindTxWithAttempts(ctx, etx.ID) require.NoError(t, err) @@ -3020,19 +3020,20 @@ func TestEthConfirmer_ResumePendingRuns(t *testing.T) { evmcfg := evmtest.NewChainScopedConfig(t, config) + h8 := &evmtypes.Head{ + Number: 8, + Hash: testutils.NewHash(), + } + h9 := &evmtypes.Head{ + Hash: testutils.NewHash(), + Number: 9, + } + h9.Parent.Store(h8) head := evmtypes.Head{ Hash: testutils.NewHash(), Number: 10, - Parent: &evmtypes.Head{ - Hash: testutils.NewHash(), - Number: 9, - Parent: &evmtypes.Head{ - Number: 8, - Hash: testutils.NewHash(), - Parent: nil, - }, - }, } + head.Parent.Store(h9) minConfirmations := int64(2) @@ -3254,10 +3255,10 @@ func TestEthConfirmer_ProcessStuckTransactions(t *testing.T) { // Update tx to signal callback once it is identified as terminally stuck pgtest.MustExec(t, db, `UPDATE evm.txes SET pipeline_task_run_id = $1, signal_callback = TRUE WHERE id = $2`, uuid.New(), tx.ID) head := evmtypes.Head{ - Hash: testutils.NewHash(), - Number: blockNum, - IsFinalized: true, + Hash: testutils.NewHash(), + Number: blockNum, } + head.IsFinalized.Store(true) ethClient.On("HeadByNumber", mock.Anything, (*big.Int)(nil)).Return(&head, nil).Once() ethClient.On("LatestFinalizedBlock", mock.Anything).Return(&head, nil).Once() @@ -3282,10 +3283,10 @@ func TestEthConfirmer_ProcessStuckTransactions(t *testing.T) { require.Equal(t, bumpedFee.Legacy, latestAttempt.TxFee.Legacy) head = evmtypes.Head{ - Hash: testutils.NewHash(), - Number: blockNum + 1, - IsFinalized: true, + Hash: testutils.NewHash(), + Number: blockNum + 1, } + head.IsFinalized.Store(true) ethClient.On("HeadByNumber", mock.Anything, (*big.Int)(nil)).Return(&head, nil).Once() ethClient.On("LatestFinalizedBlock", mock.Anything).Return(&head, nil).Once() ethClient.On("SequenceAt", mock.Anything, mock.Anything, mock.Anything).Return(evmtypes.Nonce(1), nil) diff --git a/core/chains/evm/txmgr/evm_tx_store_test.go b/core/chains/evm/txmgr/evm_tx_store_test.go index c711c2788e8..e47387fb8d3 100644 --- a/core/chains/evm/txmgr/evm_tx_store_test.go +++ b/core/chains/evm/txmgr/evm_tx_store_test.go @@ -628,19 +628,20 @@ func TestORM_FindTxesPendingCallback(t *testing.T) { pgtest.MustExec(t, db, `SET CONSTRAINTS fk_pipeline_runs_pruning_key DEFERRED`) pgtest.MustExec(t, db, `SET CONSTRAINTS pipeline_runs_pipeline_spec_id_fkey DEFERRED`) + h8 := &evmtypes.Head{ + Number: 8, + Hash: testutils.NewHash(), + } + h9 := &evmtypes.Head{ + Hash: testutils.NewHash(), + Number: 9, + } + h9.Parent.Store(h8) head := evmtypes.Head{ - Hash: utils.NewHash(), + Hash: testutils.NewHash(), Number: 10, - Parent: &evmtypes.Head{ - Hash: utils.NewHash(), - Number: 9, - Parent: &evmtypes.Head{ - Number: 8, - Hash: utils.NewHash(), - Parent: nil, - }, - }, } + head.Parent.Store(h9) minConfirmations := int64(2) @@ -792,19 +793,20 @@ func TestORM_FindTransactionsConfirmedInBlockRange(t *testing.T) { ethClient := evmtest.NewEthClientMockWithDefaultChain(t) _, fromAddress := cltest.MustInsertRandomKeyReturningState(t, ethKeyStore) + h8 := &evmtypes.Head{ + Number: 8, + Hash: testutils.NewHash(), + } + h9 := &evmtypes.Head{ + Hash: testutils.NewHash(), + Number: 9, + } + h9.Parent.Store(h8) head := evmtypes.Head{ - Hash: utils.NewHash(), + Hash: testutils.NewHash(), Number: 10, - Parent: &evmtypes.Head{ - Hash: utils.NewHash(), - Number: 9, - Parent: &evmtypes.Head{ - Number: 8, - Hash: utils.NewHash(), - Parent: nil, - }, - }, } + head.Parent.Store(h9) t.Run("find all transactions confirmed in range", func(t *testing.T) { etx_8 := mustInsertConfirmedEthTxWithReceipt(t, txStore, fromAddress, 700, 8) diff --git a/core/chains/evm/txmgr/finalizer_test.go b/core/chains/evm/txmgr/finalizer_test.go index f83a53bf499..b91121d773f 100644 --- a/core/chains/evm/txmgr/finalizer_test.go +++ b/core/chains/evm/txmgr/finalizer_test.go @@ -39,15 +39,16 @@ func TestFinalizer_MarkTxFinalized(t *testing.T) { rpcBatchSize := uint32(1) ht := headtracker.NewSimulatedHeadTracker(ethClient, true, 0) + h99 := &evmtypes.Head{ + Hash: utils.NewHash(), + Number: 99, + } + h99.IsFinalized.Store(true) head := &evmtypes.Head{ Hash: utils.NewHash(), Number: 100, - Parent: &evmtypes.Head{ - Hash: utils.NewHash(), - Number: 99, - IsFinalized: true, - }, } + head.Parent.Store(h99) t.Run("returns not finalized for tx with receipt newer than finalized block", func(t *testing.T) { finalizer := txmgr.NewEvmFinalizer(logger.Test(t), testutils.FixtureChainID, rpcBatchSize, txStore, ethClient, ht) @@ -71,7 +72,7 @@ func TestFinalizer_MarkTxFinalized(t *testing.T) { // Insert receipt for unfinalized block num mustInsertEthReceipt(t, txStore, head.Number, head.Hash, attemptHash) ethClient.On("HeadByNumber", mock.Anything, mock.Anything).Return(head, nil).Once() - ethClient.On("LatestFinalizedBlock", mock.Anything).Return(head.Parent, nil).Once() + ethClient.On("LatestFinalizedBlock", mock.Anything).Return(head.Parent.Load(), nil).Once() err := finalizer.ProcessHead(ctx, head) require.NoError(t, err) tx, err = txStore.FindTxWithIdempotencyKey(ctx, idempotencyKey, testutils.FixtureChainID) @@ -99,9 +100,9 @@ func TestFinalizer_MarkTxFinalized(t *testing.T) { } attemptHash := insertTxAndAttemptWithIdempotencyKey(t, txStore, tx, idempotencyKey) // Insert receipt for finalized block num - mustInsertEthReceipt(t, txStore, head.Parent.Number, utils.NewHash(), attemptHash) + mustInsertEthReceipt(t, txStore, head.Parent.Load().Number, utils.NewHash(), attemptHash) ethClient.On("HeadByNumber", mock.Anything, mock.Anything).Return(head, nil).Once() - ethClient.On("LatestFinalizedBlock", mock.Anything).Return(head.Parent, nil).Once() + ethClient.On("LatestFinalizedBlock", mock.Anything).Return(head.Parent.Load(), nil).Once() err := finalizer.ProcessHead(ctx, head) require.NoError(t, err) tx, err = txStore.FindTxWithIdempotencyKey(ctx, idempotencyKey, testutils.FixtureChainID) @@ -129,9 +130,9 @@ func TestFinalizer_MarkTxFinalized(t *testing.T) { } attemptHash := insertTxAndAttemptWithIdempotencyKey(t, txStore, tx, idempotencyKey) // Insert receipt for finalized block num - mustInsertEthReceipt(t, txStore, head.Parent.Number, head.Parent.Hash, attemptHash) + mustInsertEthReceipt(t, txStore, head.Parent.Load().Number, head.Parent.Load().Hash, attemptHash) ethClient.On("HeadByNumber", mock.Anything, mock.Anything).Return(head, nil).Once() - ethClient.On("LatestFinalizedBlock", mock.Anything).Return(head.Parent, nil).Once() + ethClient.On("LatestFinalizedBlock", mock.Anything).Return(head.Parent.Load(), nil).Once() err := finalizer.ProcessHead(ctx, head) require.NoError(t, err) tx, err = txStore.FindTxWithIdempotencyKey(ctx, idempotencyKey, testutils.FixtureChainID) @@ -160,7 +161,7 @@ func TestFinalizer_MarkTxFinalized(t *testing.T) { attemptHash := insertTxAndAttemptWithIdempotencyKey(t, txStore, tx, idempotencyKey) // Insert receipt for finalized block num receiptBlockHash1 := utils.NewHash() - mustInsertEthReceipt(t, txStore, head.Parent.Number-2, receiptBlockHash1, attemptHash) + mustInsertEthReceipt(t, txStore, head.Parent.Load().Number-2, receiptBlockHash1, attemptHash) idempotencyKey = uuid.New().String() nonce = evmtypes.Nonce(1) tx = &txmgr.Tx{ @@ -176,7 +177,7 @@ func TestFinalizer_MarkTxFinalized(t *testing.T) { attemptHash = insertTxAndAttemptWithIdempotencyKey(t, txStore, tx, idempotencyKey) // Insert receipt for finalized block num receiptBlockHash2 := utils.NewHash() - mustInsertEthReceipt(t, txStore, head.Parent.Number-1, receiptBlockHash2, attemptHash) + mustInsertEthReceipt(t, txStore, head.Parent.Load().Number-1, receiptBlockHash2, attemptHash) // Separate batch calls will be made for each tx due to RPC batch size set to 1 when finalizer initialized above ethClient.On("BatchCallContext", mock.Anything, mock.IsType([]rpc.BatchElem{})).Run(func(args mock.Arguments) { rpcElements := args.Get(1).([]rpc.BatchElem) @@ -186,20 +187,20 @@ func TestFinalizer_MarkTxFinalized(t *testing.T) { require.Equal(t, false, rpcElements[0].Args[1]) reqBlockNum := rpcElements[0].Args[0].(string) - req1BlockNum := hexutil.EncodeBig(big.NewInt(head.Parent.Number - 2)) - req2BlockNum := hexutil.EncodeBig(big.NewInt(head.Parent.Number - 1)) + req1BlockNum := hexutil.EncodeBig(big.NewInt(head.Parent.Load().Number - 2)) + req2BlockNum := hexutil.EncodeBig(big.NewInt(head.Parent.Load().Number - 1)) var headResult evmtypes.Head if req1BlockNum == reqBlockNum { - headResult = evmtypes.Head{Number: head.Parent.Number - 2, Hash: receiptBlockHash1} + headResult = evmtypes.Head{Number: head.Parent.Load().Number - 2, Hash: receiptBlockHash1} } else if req2BlockNum == reqBlockNum { - headResult = evmtypes.Head{Number: head.Parent.Number - 1, Hash: receiptBlockHash2} + headResult = evmtypes.Head{Number: head.Parent.Load().Number - 1, Hash: receiptBlockHash2} } else { require.Fail(t, "unrecognized block hash") } rpcElements[0].Result = &headResult }).Return(nil).Twice() ethClient.On("HeadByNumber", mock.Anything, mock.Anything).Return(head, nil).Once() - ethClient.On("LatestFinalizedBlock", mock.Anything).Return(head.Parent, nil).Once() + ethClient.On("LatestFinalizedBlock", mock.Anything).Return(head.Parent.Load(), nil).Once() err := finalizer.ProcessHead(ctx, head) require.NoError(t, err) tx, err = txStore.FindTxWithIdempotencyKey(ctx, idempotencyKey, testutils.FixtureChainID) diff --git a/core/chains/evm/txmgr/txmgr_test.go b/core/chains/evm/txmgr/txmgr_test.go index d4bfbffd12f..e9437960312 100644 --- a/core/chains/evm/txmgr/txmgr_test.go +++ b/core/chains/evm/txmgr/txmgr_test.go @@ -613,20 +613,21 @@ func TestTxm_GetTransactionStatus(t *testing.T) { gcfg := configtest.NewTestGeneralConfig(t) cfg := evmtest.NewChainScopedConfig(t, gcfg) + h99 := &evmtypes.Head{ + Hash: utils.NewHash(), + Number: 99, + } + h99.IsFinalized.Store(true) head := &evmtypes.Head{ Hash: utils.NewHash(), Number: 100, - Parent: &evmtypes.Head{ - Hash: utils.NewHash(), - Number: 99, - IsFinalized: true, - }, } + head.Parent.Store(h99) ethClient := evmtest.NewEthClientMockWithDefaultChain(t) ethClient.On("PendingNonceAt", mock.Anything, mock.Anything).Return(uint64(0), nil).Maybe() ethClient.On("HeadByNumber", mock.Anything, mock.Anything).Return(head, nil).Once() - ethClient.On("HeadByNumber", mock.Anything, mock.Anything).Return(head.Parent, nil).Once() + ethClient.On("HeadByNumber", mock.Anything, mock.Anything).Return(head.Parent.Load(), nil).Once() ethClient.On("HeadByNumber", mock.Anything, mock.Anything).Return(head, nil) feeEstimator := gasmocks.NewEvmFeeEstimator(t) feeEstimator.On("Start", mock.Anything).Return(nil).Once() @@ -755,7 +756,7 @@ func TestTxm_GetTransactionStatus(t *testing.T) { err = txStore.InsertTxAttempt(ctx, &attempt) require.NoError(t, err) // Insert receipt for finalized block num - mustInsertEthReceipt(t, txStore, head.Parent.Number, head.Parent.Hash, attempt.Hash) + mustInsertEthReceipt(t, txStore, head.Parent.Load().Number, head.Parent.Load().Hash, attempt.Hash) state, err := txm.GetTransactionStatus(ctx, idempotencyKey) require.NoError(t, err) require.Equal(t, commontypes.Finalized, state) diff --git a/core/chains/evm/types/head_test.go b/core/chains/evm/types/head_test.go index 97c536a3444..5d887c43c82 100644 --- a/core/chains/evm/types/head_test.go +++ b/core/chains/evm/types/head_test.go @@ -9,6 +9,11 @@ import ( func TestHead_LatestFinalizedHead(t *testing.T) { t.Parallel() + newFinalizedHead := func(num int64) *Head { + result := &Head{Number: num} + result.IsFinalized.Store(true) + return result + } cases := []struct { Name string Head *Head @@ -21,17 +26,17 @@ func TestHead_LatestFinalizedHead(t *testing.T) { }, { Name: "Chain without finalized returns nil", - Head: &Head{Parent: &Head{Parent: &Head{}}}, + Head: sliceToChain(&Head{}, &Head{}, &Head{}), Finalized: nil, }, { Name: "Returns head if it's finalized", - Head: &Head{Number: 2, IsFinalized: true, Parent: &Head{Number: 1, IsFinalized: true}}, + Head: sliceToChain(newFinalizedHead(2), newFinalizedHead(1)), Finalized: &Head{Number: 2}, }, { Name: "Returns first block in chain if it's finalized", - Head: &Head{Number: 3, IsFinalized: false, Parent: &Head{Number: 2, IsFinalized: true, Parent: &Head{Number: 1, IsFinalized: true}}}, + Head: sliceToChain(&Head{Number: 3}, newFinalizedHead(2), newFinalizedHead(1)), Finalized: &Head{Number: 2}, }, } @@ -48,3 +53,43 @@ func TestHead_LatestFinalizedHead(t *testing.T) { }) } } + +func TestHead_ChainString(t *testing.T) { + cases := []struct { + Name string + Chain *Head + ExpectedResult string + }{ + { + Name: "Empty chain", + ExpectedResult: "->nil", + }, + { + Name: "Single head", + Chain: &Head{Number: 1}, + ExpectedResult: "Head{Number: 1, Hash: 0x0000000000000000000000000000000000000000000000000000000000000000, ParentHash: 0x0000000000000000000000000000000000000000000000000000000000000000}->nil", + }, + { + Name: "Multiple heads", + Chain: sliceToChain(&Head{Number: 1}, &Head{Number: 2}, &Head{Number: 3}), + ExpectedResult: "Head{Number: 1, Hash: 0x0000000000000000000000000000000000000000000000000000000000000000, ParentHash: 0x0000000000000000000000000000000000000000000000000000000000000000}->Head{Number: 2, Hash: 0x0000000000000000000000000000000000000000000000000000000000000000, ParentHash: 0x0000000000000000000000000000000000000000000000000000000000000000}->Head{Number: 3, Hash: 0x0000000000000000000000000000000000000000000000000000000000000000, ParentHash: 0x0000000000000000000000000000000000000000000000000000000000000000}->nil", + }, + } + for _, testCase := range cases { + t.Run(testCase.Name, func(t *testing.T) { + assert.Equal(t, testCase.ExpectedResult, testCase.Chain.ChainString()) + }) + } +} + +func sliceToChain(heads ...*Head) *Head { + if len(heads) == 0 { + return nil + } + + for i := 1; i < len(heads); i++ { + heads[i-1].Parent.Store(heads[i]) + } + + return heads[0] +} diff --git a/core/chains/evm/types/models.go b/core/chains/evm/types/models.go index a9e5cd5841b..1da8754cec4 100644 --- a/core/chains/evm/types/models.go +++ b/core/chains/evm/types/models.go @@ -9,6 +9,7 @@ import ( "math/big" "regexp" "strings" + "sync/atomic" "time" "github.com/ethereum/go-ethereum/common" @@ -34,7 +35,7 @@ type Head struct { Number int64 L1BlockNumber sql.NullInt64 ParentHash common.Hash - Parent *Head + Parent atomic.Pointer[Head] EVMChainID *ubig.Big Timestamp time.Time CreatedAt time.Time @@ -44,7 +45,7 @@ type Head struct { StateRoot common.Hash Difficulty *big.Int TotalDifficulty *big.Int - IsFinalized bool + IsFinalized atomic.Bool } var _ commontypes.Head[common.Hash] = &Head{} @@ -74,10 +75,11 @@ func (h *Head) GetParentHash() common.Hash { } func (h *Head) GetParent() commontypes.Head[common.Hash] { - if h.Parent == nil { - return nil + if parent := h.Parent.Load(); parent != nil { + return parent } - return h.Parent + // explicitly return nil to avoid *Head(nil) + return nil } func (h *Head) GetTimestamp() time.Time { @@ -90,10 +92,11 @@ func (h *Head) BlockDifficulty() *big.Int { // EarliestInChain recurses through parents until it finds the earliest one func (h *Head) EarliestInChain() *Head { - for h.Parent != nil { - h = h.Parent + var earliestInChain *Head + for cur := h; cur != nil; cur = cur.Parent.Load() { + earliestInChain = cur } - return h + return earliestInChain } // EarliestHeadInChain recurses through parents until it finds the earliest one @@ -103,14 +106,10 @@ func (h *Head) EarliestHeadInChain() commontypes.Head[common.Hash] { // IsInChain returns true if the given hash matches the hash of a head in the chain func (h *Head) IsInChain(blockHash common.Hash) bool { - for { - if h.Hash == blockHash { + for cur := h; cur != nil; cur = cur.Parent.Load() { + if cur.Hash == blockHash { return true } - if h.Parent == nil { - break - } - h = h.Parent } return false } @@ -127,32 +126,19 @@ func (h *Head) HashAtHeight(blockNum int64) common.Hash { } func (h *Head) HeadAtHeight(blockNum int64) (commontypes.Head[common.Hash], error) { - for h != nil { - if h.Number == blockNum { - return h, nil + for cur := h; cur != nil; cur = cur.Parent.Load() { + if cur.Number == blockNum { + return cur, nil } - - h = h.Parent } return nil, fmt.Errorf("failed to find head at height %d", blockNum) } // ChainLength returns the length of the chain followed by recursively looking up parents func (h *Head) ChainLength() uint32 { - if h == nil { - return 0 - } - l := uint32(1) - - for { - if h.Parent == nil { - break - } + l := uint32(0) + for cur := h; cur != nil; cur = cur.Parent.Load() { l++ - if h == h.Parent { - panic("circular reference detected") - } - h = h.Parent } return l } @@ -160,29 +146,19 @@ func (h *Head) ChainLength() uint32 { // ChainHashes returns an array of block hashes by recursively looking up parents func (h *Head) ChainHashes() []common.Hash { var hashes []common.Hash - - for { - hashes = append(hashes, h.Hash) - if h.Parent == nil { - break - } - if h == h.Parent { - panic("circular reference detected") - } - h = h.Parent + for cur := h; cur != nil; cur = cur.Parent.Load() { + hashes = append(hashes, cur.Hash) } + return hashes } func (h *Head) LatestFinalizedHead() commontypes.Head[common.Hash] { - for h != nil { - if h.IsFinalized { - return h + for cur := h; cur != nil; cur = cur.Parent.Load() { + if cur.IsFinalized.Load() { + return cur } - - h = h.Parent } - return nil } @@ -200,18 +176,13 @@ func (h *Head) IsValid() bool { func (h *Head) ChainString() string { var sb strings.Builder - - for { - sb.WriteString(h.String()) - if h.Parent == nil { - break - } - if h == h.Parent { - panic("circular reference detected") + for cur := h; cur != nil; cur = cur.Parent.Load() { + if sb.Len() > 0 { + sb.WriteString("->") } - sb.WriteString("->") - h = h.Parent + sb.WriteString(cur.String()) } + sb.WriteString("->nil") return sb.String() } @@ -255,11 +226,11 @@ func (h *Head) AsSlice(k int) (heads []*Head) { if k < 1 || h == nil { return } - heads = make([]*Head, 1) - heads[0] = h - for len(heads) < k && h.Parent != nil { - h = h.Parent - heads = append(heads, h) + heads = make([]*Head, 0, k) + for cur := h; cur != nil; cur = cur.Parent.Load() { + if len(heads) < k { + heads = append(heads, cur) + } } return } diff --git a/core/chains/evm/types/models_test.go b/core/chains/evm/types/models_test.go index 6018d68f962..a54f1f58f5b 100644 --- a/core/chains/evm/types/models_test.go +++ b/core/chains/evm/types/models_test.go @@ -116,11 +116,9 @@ func TestEthTxAttempt_GetSignedTx(t *testing.T) { } func TestHead_ChainLength(t *testing.T) { - head := evmtypes.Head{ - Parent: &evmtypes.Head{ - Parent: &evmtypes.Head{}, - }, - } + head := evmtypes.Head{} + head.Parent.Store(&evmtypes.Head{}) + head.Parent.Load().Parent.Store(&evmtypes.Head{}) assert.Equal(t, uint32(3), head.ChainLength()) @@ -134,12 +132,12 @@ func TestHead_AsSlice(t *testing.T) { } h2 := &evmtypes.Head{ Number: 2, - Parent: h1, } + h2.Parent.Store(h1) h3 := &evmtypes.Head{ Number: 3, - Parent: h2, } + h3.Parent.Store(h2) assert.Len(t, (*evmtypes.Head)(nil).AsSlice(0), 0) assert.Len(t, (*evmtypes.Head)(nil).AsSlice(1), 0) @@ -234,36 +232,35 @@ func TestSafeByteSlice_Error(t *testing.T) { } func TestHead_EarliestInChain(t *testing.T) { - head := evmtypes.Head{ + h3 := evmtypes.Head{ Number: 3, - Parent: &evmtypes.Head{ - Number: 2, - Parent: &evmtypes.Head{ - Number: 1, - }, - }, } + h2 := &evmtypes.Head{Number: 2} + h3.Parent.Store(h2) + h1 := &evmtypes.Head{Number: 1} + h2.Parent.Store(h1) - assert.Equal(t, int64(1), head.EarliestInChain().BlockNumber()) + assert.Equal(t, int64(1), h3.EarliestInChain().BlockNumber()) } func TestHead_HeadAtHeight(t *testing.T) { - expectedResult := &evmtypes.Head{ + h1 := &evmtypes.Head{ + Number: 1, + } + h2 := &evmtypes.Head{ Hash: common.BigToHash(big.NewInt(10)), Number: 2, - Parent: &evmtypes.Head{ - Number: 1, - }, } - head := evmtypes.Head{ + h2.Parent.Store(h1) + h3 := evmtypes.Head{ Number: 3, - Parent: expectedResult, } + h3.Parent.Store(h2) - headAtHeight, err := head.HeadAtHeight(2) + headAtHeight, err := h3.HeadAtHeight(2) require.NoError(t, err) - assert.Equal(t, expectedResult, headAtHeight) - _, err = head.HeadAtHeight(0) + assert.Equal(t, h2, headAtHeight) + _, err = h3.HeadAtHeight(0) assert.Error(t, err, "expected to get an error if head is not in the chain") } @@ -271,25 +268,27 @@ func TestHead_IsInChain(t *testing.T) { hash1 := utils.NewHash() hash2 := utils.NewHash() hash3 := utils.NewHash() - - head := evmtypes.Head{ - Number: 3, + h1 := &evmtypes.Head{ + Number: 1, + Hash: hash1, + } + h2 := &evmtypes.Head{ + Hash: hash2, + ParentHash: hash1, + Number: 2, + } + h2.Parent.Store(h1) + h3 := evmtypes.Head{ Hash: hash3, - Parent: &evmtypes.Head{ - Hash: hash2, - Number: 2, - Parent: &evmtypes.Head{ - Hash: hash1, - Number: 1, - }, - }, + Number: 3, } + h3.Parent.Store(h2) - assert.True(t, head.IsInChain(hash1)) - assert.True(t, head.IsInChain(hash2)) - assert.True(t, head.IsInChain(hash3)) - assert.False(t, head.IsInChain(utils.NewHash())) - assert.False(t, head.IsInChain(common.Hash{})) + assert.True(t, h3.IsInChain(hash1)) + assert.True(t, h3.IsInChain(hash2)) + assert.True(t, h3.IsInChain(hash3)) + assert.False(t, h3.IsInChain(utils.NewHash())) + assert.False(t, h3.IsInChain(common.Hash{})) } func TestTxReceipt_ReceiptIndicatesRunLogFulfillment(t *testing.T) { @@ -316,11 +315,11 @@ func TestHead_UnmarshalJSON(t *testing.T) { tests := []struct { name string json string - expected evmtypes.Head + expected *evmtypes.Head }{ {"geth", `{"difficulty":"0xf3a00","extraData":"0xd883010503846765746887676f312e372e318664617277696e","gasLimit":"0xffc001","gasUsed":"0x0","hash":"0x41800b5c3f1717687d85fc9018faac0a6e90b39deaa0b99e7fe4fe796ddeb26a","logsBloom":"0x00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000","miner":"0xd1aeb42885a43b72b518182ef893125814811048","mixHash":"0x0f98b15f1a4901a7e9204f3c500a7bd527b3fb2c3340e12176a44b83e414a69e","nonce":"0x0ece08ea8c49dfd9","number":"0x100","parentHash":"0x41941023680923e0fe4d74a34bdac8141f2540e3ae90623718e47d66d1ca4a2d","receiptsRoot":"0x56e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421","sha3Uncles":"0x1dcc4de8dec75d7aab85b567b6ccd41ad312451b948a7413f0a142fd40d49347","size":"0x218","stateRoot":"0xc7b01007a10da045eacb90385887dd0c38fcb5db7393006bdde24b93873c334b","timestamp":"0x58318da2","totalDifficulty":"0x1f3a00","transactions":[],"transactionsRoot":"0x56e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421","uncles":[]}`, - evmtypes.Head{ + &evmtypes.Head{ Hash: common.HexToHash("0x41800b5c3f1717687d85fc9018faac0a6e90b39deaa0b99e7fe4fe796ddeb26a"), Number: 0x100, ParentHash: common.HexToHash("0x41941023680923e0fe4d74a34bdac8141f2540e3ae90623718e47d66d1ca4a2d"), @@ -332,7 +331,7 @@ func TestHead_UnmarshalJSON(t *testing.T) { }, {"parity", `{"author":"0xd1aeb42885a43b72b518182ef893125814811048","difficulty":"0xf3a00","extraData":"0xd883010503846765746887676f312e372e318664617277696e","gasLimit":"0xffc001","gasUsed":"0x0","hash":"0x41800b5c3f1717687d85fc9018faac0a6e90b39deaa0b99e7fe4fe796ddeb26a","logsBloom":"0x00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000","miner":"0xd1aeb42885a43b72b518182ef893125814811048","mixHash":"0x0f98b15f1a4901a7e9204f3c500a7bd527b3fb2c3340e12176a44b83e414a69e","nonce":"0x0ece08ea8c49dfd9","number":"0x100","parentHash":"0x41941023680923e0fe4d74a34bdac8141f2540e3ae90623718e47d66d1ca4a2d","receiptsRoot":"0x56e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421","sealFields":["0xa00f98b15f1a4901a7e9204f3c500a7bd527b3fb2c3340e12176a44b83e414a69e","0x880ece08ea8c49dfd9"],"sha3Uncles":"0x1dcc4de8dec75d7aab85b567b6ccd41ad312451b948a7413f0a142fd40d49347","size":"0x218","stateRoot":"0xc7b01007a10da045eacb90385887dd0c38fcb5db7393006bdde24b93873c334b","timestamp":"0x58318da2","totalDifficulty":"0x1f3a00","transactions":[],"transactionsRoot":"0x56e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421","uncles":[]}`, - evmtypes.Head{ + &evmtypes.Head{ Hash: common.HexToHash("0x41800b5c3f1717687d85fc9018faac0a6e90b39deaa0b99e7fe4fe796ddeb26a"), Number: 0x100, ParentHash: common.HexToHash("0x41941023680923e0fe4d74a34bdac8141f2540e3ae90623718e47d66d1ca4a2d"), @@ -344,7 +343,7 @@ func TestHead_UnmarshalJSON(t *testing.T) { }, {"arbitrum", `{"number":"0x15156","hash":"0x752dab43f7a2482db39227d46cd307623b26167841e2207e93e7566ab7ab7871","parentHash":"0x923ad1e27c1d43cb2d2fb09e26d2502ca4b4914a2e0599161d279c6c06117d34","mixHash":"0x0000000000000000000000000000000000000000000000000000000000000000","nonce":"0x0000000000000000","sha3Uncles":"0x1dcc4de8dec75d7aab85b567b6ccd41ad312451b948a7413f0a142fd40d49347","logsBloom":"0x00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000","transactionsRoot":"0x71448077f5ce420a8e24db62d4d58e8d8e6ad2c7e76318868e089d41f7e0faf3","stateRoot":"0x0000000000000000000000000000000000000000000000000000000000000000","receiptsRoot":"0x2c292672b8fc9d223647a2569e19721f0757c96a1421753a93e141f8e56cf504","miner":"0x0000000000000000000000000000000000000000","difficulty":"0x0","totalDifficulty":"0x0","extraData":"0x","size":"0x0","gasLimit":"0x11278208","gasUsed":"0x3d1fe9","timestamp":"0x60d0952d","transactions":["0xa1ea93556b93ed3b45cb24f21c8deb584e6a9049c35209242651bf3533c23b98","0xfc6593c45ba92351d17173aa1381e84734d252ab0169887783039212c4a41024","0x85ee9d04fd0ebb5f62191eeb53cb45d9c0945d43eba444c3548de2ac8421682f","0x50d120936473e5b75f6e04829ad4eeca7a1df7d3c5026ebb5d34af936a39b29c"],"uncles":[],"l1BlockNumber":"0x8652f9"}`, - evmtypes.Head{ + &evmtypes.Head{ Hash: common.HexToHash("0x752dab43f7a2482db39227d46cd307623b26167841e2207e93e7566ab7ab7871"), Number: 0x15156, ParentHash: common.HexToHash("0x923ad1e27c1d43cb2d2fb09e26d2502ca4b4914a2e0599161d279c6c06117d34"), @@ -357,7 +356,7 @@ func TestHead_UnmarshalJSON(t *testing.T) { }, {"arbitrum_empty_l1BlockNumber", `{"number":"0x15156","hash":"0x752dab43f7a2482db39227d46cd307623b26167841e2207e93e7566ab7ab7871","parentHash":"0x923ad1e27c1d43cb2d2fb09e26d2502ca4b4914a2e0599161d279c6c06117d34","mixHash":"0x0000000000000000000000000000000000000000000000000000000000000000","nonce":"0x0000000000000000","sha3Uncles":"0x1dcc4de8dec75d7aab85b567b6ccd41ad312451b948a7413f0a142fd40d49347","logsBloom":"0x00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000","transactionsRoot":"0x71448077f5ce420a8e24db62d4d58e8d8e6ad2c7e76318868e089d41f7e0faf3","stateRoot":"0x0000000000000000000000000000000000000000000000000000000000000000","receiptsRoot":"0x2c292672b8fc9d223647a2569e19721f0757c96a1421753a93e141f8e56cf504","miner":"0x0000000000000000000000000000000000000000","difficulty":"0x0","totalDifficulty":"0x0","extraData":"0x","size":"0x0","gasLimit":"0x11278208","gasUsed":"0x3d1fe9","timestamp":"0x60d0952d","transactions":["0xa1ea93556b93ed3b45cb24f21c8deb584e6a9049c35209242651bf3533c23b98","0xfc6593c45ba92351d17173aa1381e84734d252ab0169887783039212c4a41024","0x85ee9d04fd0ebb5f62191eeb53cb45d9c0945d43eba444c3548de2ac8421682f","0x50d120936473e5b75f6e04829ad4eeca7a1df7d3c5026ebb5d34af936a39b29c"],"uncles":[]}`, - evmtypes.Head{ + &evmtypes.Head{ Hash: common.HexToHash("0x752dab43f7a2482db39227d46cd307623b26167841e2207e93e7566ab7ab7871"), Number: 0x15156, ParentHash: common.HexToHash("0x923ad1e27c1d43cb2d2fb09e26d2502ca4b4914a2e0599161d279c6c06117d34"), @@ -370,7 +369,7 @@ func TestHead_UnmarshalJSON(t *testing.T) { }, {"not found", `null`, - evmtypes.Head{}, + &evmtypes.Head{}, }, } @@ -395,11 +394,11 @@ func TestHead_UnmarshalJSON(t *testing.T) { func TestHead_MarshalJSON(t *testing.T) { tests := []struct { name string - head evmtypes.Head + head *evmtypes.Head expected string }{ {"happy", - evmtypes.Head{ + &evmtypes.Head{ Hash: common.HexToHash("0x41800b5c3f1717687d85fc9018faac0a6e90b39deaa0b99e7fe4fe796ddeb26a"), Number: 0x100, ParentHash: common.HexToHash("0x41941023680923e0fe4d74a34bdac8141f2540e3ae90623718e47d66d1ca4a2d"), @@ -411,7 +410,7 @@ func TestHead_MarshalJSON(t *testing.T) { `{"hash":"0x41800b5c3f1717687d85fc9018faac0a6e90b39deaa0b99e7fe4fe796ddeb26a","number":"0x100","parentHash":"0x41941023680923e0fe4d74a34bdac8141f2540e3ae90623718e47d66d1ca4a2d","timestamp":"0x58318da2","receiptsRoot":"0x56e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421","transactionsRoot":"0x56e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421","stateRoot":"0xc7b01007a10da045eacb90385887dd0c38fcb5db7393006bdde24b93873c334b"}`, }, {"empty", - evmtypes.Head{}, + &evmtypes.Head{}, `{"number":"0x0"}`, }, } diff --git a/core/internal/cltest/cltest.go b/core/internal/cltest/cltest.go index 7d333d94018..b60dd8d73ce 100644 --- a/core/internal/cltest/cltest.go +++ b/core/internal/cltest/cltest.go @@ -1389,7 +1389,7 @@ func (b *Blocks) ForkAt(t *testing.T, blockNum int64, numHashes int) *Blocks { } forked.Heads[blockNum].ParentHash = b.Heads[blockNum].ParentHash - forked.Heads[blockNum].Parent = b.Heads[blockNum].Parent + forked.Heads[blockNum].Parent.Store(b.Heads[blockNum].Parent.Load()) return forked } @@ -1403,10 +1403,10 @@ func (b *Blocks) NewHead(number uint64) *evmtypes.Head { Number: parent.Number + 1, Hash: evmutils.NewHash(), ParentHash: parent.Hash, - Parent: parent, Timestamp: time.Unix(parent.Number+1, 0), EVMChainID: ubig.New(&FixtureChainID), } + head.Parent.Store(parent) return head } @@ -1447,7 +1447,7 @@ func NewBlocks(t *testing.T, numHashes int) *Blocks { heads[i] = &evmtypes.Head{Hash: hash, Number: i, Timestamp: time.Unix(i, 0), EVMChainID: ubig.New(&FixtureChainID)} if i > 0 { parent := heads[i-1] - heads[i].Parent = parent + heads[i].Parent.Store(parent) heads[i].ParentHash = parent.Hash } } diff --git a/core/internal/cltest/factories.go b/core/internal/cltest/factories.go index c488dca94a9..8f4b2260a02 100644 --- a/core/internal/cltest/factories.go +++ b/core/internal/cltest/factories.go @@ -25,6 +25,7 @@ import ( "github.com/smartcontractkit/chainlink-common/pkg/sqlutil" "github.com/smartcontractkit/chainlink-common/pkg/utils/jsonserializable" + txmgrcommon "github.com/smartcontractkit/chainlink/v2/common/txmgr" txmgrtypes "github.com/smartcontractkit/chainlink/v2/common/txmgr/types" "github.com/smartcontractkit/chainlink/v2/core/auth" @@ -318,13 +319,13 @@ func MustGenerateRandomKeyState(_ testing.TB) ethkey.State { return ethkey.State{Address: NewEIP55Address()} } -func MustInsertHead(t *testing.T, ds sqlutil.DataSource, number int64) evmtypes.Head { +func MustInsertHead(t *testing.T, ds sqlutil.DataSource, number int64) *evmtypes.Head { h := evmtypes.NewHead(big.NewInt(number), evmutils.NewHash(), evmutils.NewHash(), 0, ubig.New(&FixtureChainID)) horm := headtracker.NewORM(FixtureChainID, ds) err := horm.IdempotentInsertHead(testutils.Context(t), &h) require.NoError(t, err) - return h + return &h } func MustInsertV2JobSpec(t *testing.T, db *sqlx.DB, transmitterAddress common.Address) job.Job { diff --git a/core/services/headreporter/telemetry_reporter_test.go b/core/services/headreporter/telemetry_reporter_test.go index 85bfea5866a..6d7f4e3ddef 100644 --- a/core/services/headreporter/telemetry_reporter_test.go +++ b/core/services/headreporter/telemetry_reporter_test.go @@ -22,18 +22,18 @@ import ( func Test_TelemetryReporter_NewHead(t *testing.T) { head := evmtypes.Head{ - Number: 42, - EVMChainID: ubig.NewI(100), - Hash: common.HexToHash("0x1010"), - Timestamp: time.UnixMilli(1000), - IsFinalized: false, - Parent: &evmtypes.Head{ - Number: 41, - Hash: common.HexToHash("0x1009"), - Timestamp: time.UnixMilli(999), - IsFinalized: true, - }, + Number: 42, + EVMChainID: ubig.NewI(100), + Hash: common.HexToHash("0x1010"), + Timestamp: time.UnixMilli(1000), + } + h41 := &evmtypes.Head{ + Number: 41, + Hash: common.HexToHash("0x1009"), + Timestamp: time.UnixMilli(999), } + h41.IsFinalized.Store(true) + head.Parent.Store(h41) requestBytes, err := proto.Marshal(&telem.HeadReportRequest{ ChainID: "100", Latest: &telem.Block{ @@ -42,9 +42,9 @@ func Test_TelemetryReporter_NewHead(t *testing.T) { Hash: head.Hash.Hex(), }, Finalized: &telem.Block{ - Timestamp: uint64(head.Parent.Timestamp.UTC().Unix()), + Timestamp: uint64(head.Parent.Load().Timestamp.UTC().Unix()), Number: 41, - Hash: head.Parent.Hash.Hex(), + Hash: head.Parent.Load().Hash.Hex(), }, }) assert.NoError(t, err) @@ -64,11 +64,10 @@ func Test_TelemetryReporter_NewHead(t *testing.T) { func Test_TelemetryReporter_NewHeadMissingFinalized(t *testing.T) { head := evmtypes.Head{ - Number: 42, - EVMChainID: ubig.NewI(100), - Hash: common.HexToHash("0x1010"), - Timestamp: time.UnixMilli(1000), - IsFinalized: false, + Number: 42, + EVMChainID: ubig.NewI(100), + Hash: common.HexToHash("0x1010"), + Timestamp: time.UnixMilli(1000), } requestBytes, err := proto.Marshal(&telem.HeadReportRequest{ ChainID: "100", diff --git a/core/services/ocr2/plugins/ocr2keeper/evmregistry/v21/block_subscriber.go b/core/services/ocr2/plugins/ocr2keeper/evmregistry/v21/block_subscriber.go index a5a00542179..21adc12d30e 100644 --- a/core/services/ocr2/plugins/ocr2keeper/evmregistry/v21/block_subscriber.go +++ b/core/services/ocr2/plugins/ocr2keeper/evmregistry/v21/block_subscriber.go @@ -234,7 +234,7 @@ func (bs *BlockSubscriber) processHead(h *evmtypes.Head) { // head parent is a linked list with EVM finality depth // when re-org happens, new heads will have pointers to the new blocks i := int64(0) - for cp := h; cp != nil; cp = cp.Parent { + for cp := h; cp != nil; cp = cp.Parent.Load() { // we don't stop when a matching (block number/hash) entry is seen in the map because parent linked list may be // cut short during a re-org if head broadcaster backfill is not complete. This can cause some re-orged blocks // left in the map. for example, re-org happens for block 98, 99, 100. next head 101 from broadcaster has parent list diff --git a/core/services/ocr2/plugins/ocr2keeper/evmregistry/v21/block_subscriber_test.go b/core/services/ocr2/plugins/ocr2keeper/evmregistry/v21/block_subscriber_test.go index fefbda77cd7..bdcc37dc6bb 100644 --- a/core/services/ocr2/plugins/ocr2keeper/evmregistry/v21/block_subscriber_test.go +++ b/core/services/ocr2/plugins/ocr2keeper/evmregistry/v21/block_subscriber_test.go @@ -310,23 +310,22 @@ func TestBlockSubscriber_Start(t *testing.T) { h97 := evmtypes.Head{ Number: 97, Hash: common.HexToHash("0xda2f9d1359eadd7b93338703adc07d942021a78195564038321ef53f23f87333"), - Parent: nil, } h98 := evmtypes.Head{ Number: 98, Hash: common.HexToHash("0xc20c7b47466c081a44a3b168994e89affe85cb894547845d938f923b67c633c0"), - Parent: &h97, } + h98.Parent.Store(&h97) h99 := evmtypes.Head{ Number: 99, Hash: common.HexToHash("0x9bc2b51e147f9cad05f1614b7f1d8181cb24c544cbcf841f3155e54e752a3b44"), - Parent: &h98, } + h99.Parent.Store(&h98) h100 := evmtypes.Head{ Number: 100, Hash: common.HexToHash("0x5e7fadfc14e1cfa9c05a91128c16a20c6cbc3be38b4723c3d482d44bf9c0e07b"), - Parent: &h99, } + h100.Parent.Store(&h99) // no subscribers yet bs.headC <- &h100 @@ -353,8 +352,8 @@ func TestBlockSubscriber_Start(t *testing.T) { h101 := &evmtypes.Head{ Number: 101, Hash: common.HexToHash("0xc20c7b47466c081a44a3b168994e89affe85cb894547845d938f923b67c633c0"), - Parent: &h100, } + h101.Parent.Store(&h100) bs.headC <- h101 time.Sleep(100 * time.Millisecond) @@ -387,24 +386,24 @@ func TestBlockSubscriber_Start(t *testing.T) { new99 := &evmtypes.Head{ Number: 99, Hash: common.HexToHash("0x70c03acc4ddbfb253ba41a25dc13fb21b25da8b63bcd1aa7fb55713d33a36c71"), - Parent: &h98, } + new99.Parent.Store(&h98) new100 := &evmtypes.Head{ Number: 100, Hash: common.HexToHash("0x8a876b62d252e63e16cf3487db3486c0a7c0a8e06bc3792a3b116c5ca480503f"), - Parent: new99, } + new100.Parent.Store(new99) new101 := &evmtypes.Head{ Number: 101, Hash: common.HexToHash("0x41b5842b8847dcf834e39556d2ac51cc7d960a7de9471ec504673d0038fd6c8e"), - Parent: new100, } + new101.Parent.Store(new100) new102 := &evmtypes.Head{ Number: 102, Hash: common.HexToHash("0x9ac1ebc307554cf1bcfcc2a49462278e89d6878d613a33df38a64d0aeac971b5"), - Parent: new101, } + new102.Parent.Store(new101) bs.headC <- new102 diff --git a/core/services/ocrcommon/arbitrum_block_translator_test.go b/core/services/ocrcommon/arbitrum_block_translator_test.go index fa6875fb798..6b9abc93bf7 100644 --- a/core/services/ocrcommon/arbitrum_block_translator_test.go +++ b/core/services/ocrcommon/arbitrum_block_translator_test.go @@ -1,6 +1,7 @@ package ocrcommon_test import ( + "context" "database/sql" "math/big" mrand "math/rand" @@ -34,7 +35,7 @@ func TestArbitrumBlockTranslator_BinarySearch(t *testing.T) { var changedInL1Block int64 = 5541 latestBlock := blocks[1000] - client.On("HeadByNumber", ctx, (*big.Int)(nil)).Return(&latestBlock, nil).Once() + client.On("HeadByNumber", ctx, (*big.Int)(nil)).Return(latestBlock, nil).Once() from, to, err := abt.BinarySearch(ctx, changedInL1Block) require.NoError(t, err) @@ -51,11 +52,10 @@ func TestArbitrumBlockTranslator_BinarySearch(t *testing.T) { var changedInL1Block int64 = 42 latestBlock := blocks[1000] - client.On("HeadByNumber", ctx, (*big.Int)(nil)).Return(&latestBlock, nil).Once() + client.On("HeadByNumber", ctx, (*big.Int)(nil)).Return(latestBlock, nil).Once() - tmp := new(evmtypes.Head) - client.On("HeadByNumber", ctx, mock.AnythingOfType("*big.Int")).Return(tmp, nil).Run(func(args mock.Arguments) { - *tmp = blocks[args[1].(*big.Int).Int64()] + client.On("HeadByNumber", ctx, mock.AnythingOfType("*big.Int")).Return(func(_ context.Context, num *big.Int) (*evmtypes.Head, error) { + return blocks[num.Int64()], nil }) _, _, err := abt.BinarySearch(ctx, changedInL1Block) @@ -71,11 +71,10 @@ func TestArbitrumBlockTranslator_BinarySearch(t *testing.T) { var changedInL1Block int64 = 5043 latestBlock := blocks[1000] - client.On("HeadByNumber", ctx, (*big.Int)(nil)).Return(&latestBlock, nil).Once() + client.On("HeadByNumber", ctx, (*big.Int)(nil)).Return(latestBlock, nil).Once() - tmp := new(evmtypes.Head) - client.On("HeadByNumber", ctx, mock.AnythingOfType("*big.Int")).Return(tmp, nil).Run(func(args mock.Arguments) { - *tmp = blocks[args[1].(*big.Int).Int64()] + client.On("HeadByNumber", ctx, mock.AnythingOfType("*big.Int")).Return(func(_ context.Context, num *big.Int) (*evmtypes.Head, error) { + return blocks[num.Int64()], nil }) _, _, err := abt.BinarySearch(ctx, changedInL1Block) @@ -91,12 +90,10 @@ func TestArbitrumBlockTranslator_BinarySearch(t *testing.T) { var changedInL1Block int64 = 5042 latestBlock := blocks[1000] - client.On("HeadByNumber", ctx, (*big.Int)(nil)).Return(&latestBlock, nil).Once() + client.On("HeadByNumber", ctx, (*big.Int)(nil)).Return(latestBlock, nil).Once() - tmp := new(evmtypes.Head) - client.On("HeadByNumber", ctx, mock.AnythingOfType("*big.Int")).Return(tmp, nil).Run(func(args mock.Arguments) { - h := blocks[args[1].(*big.Int).Int64()] - *tmp = h + client.On("HeadByNumber", ctx, mock.AnythingOfType("*big.Int")).Return(func(_ context.Context, num *big.Int) (*evmtypes.Head, error) { + return blocks[num.Int64()], nil }) from, to, err := abt.BinarySearch(ctx, changedInL1Block) @@ -114,12 +111,10 @@ func TestArbitrumBlockTranslator_BinarySearch(t *testing.T) { var changedInL1Block int64 = 5000 latestBlock := blocks[1000] - client.On("HeadByNumber", ctx, (*big.Int)(nil)).Return(&latestBlock, nil).Once() + client.On("HeadByNumber", ctx, (*big.Int)(nil)).Return(latestBlock, nil).Once() - tmp := new(evmtypes.Head) - client.On("HeadByNumber", ctx, mock.AnythingOfType("*big.Int")).Return(tmp, nil).Run(func(args mock.Arguments) { - h := blocks[args[1].(*big.Int).Int64()] - *tmp = h + client.On("HeadByNumber", ctx, mock.AnythingOfType("*big.Int")).Return(func(_ context.Context, num *big.Int) (*evmtypes.Head, error) { + return blocks[num.Int64()], nil }) from, to, err := abt.BinarySearch(ctx, changedInL1Block) @@ -137,12 +132,10 @@ func TestArbitrumBlockTranslator_BinarySearch(t *testing.T) { var changedInL1Block int64 = 5540 latestBlock := blocks[1000] - client.On("HeadByNumber", ctx, (*big.Int)(nil)).Return(&latestBlock, nil).Once() + client.On("HeadByNumber", ctx, (*big.Int)(nil)).Return(latestBlock, nil).Once() - tmp := new(evmtypes.Head) - client.On("HeadByNumber", ctx, mock.AnythingOfType("*big.Int")).Return(tmp, nil).Run(func(args mock.Arguments) { - h := blocks[args[1].(*big.Int).Int64()] - *tmp = h + client.On("HeadByNumber", ctx, mock.AnythingOfType("*big.Int")).Return(func(_ context.Context, num *big.Int) (*evmtypes.Head, error) { + return blocks[num.Int64()], nil }) from, to, err := abt.BinarySearch(ctx, changedInL1Block) @@ -161,12 +154,10 @@ func TestArbitrumBlockTranslator_BinarySearch(t *testing.T) { latestBlock := blocks[1000] // Latest is never cached - client.On("HeadByNumber", ctx, (*big.Int)(nil)).Return(&latestBlock, nil).Once() + client.On("HeadByNumber", ctx, (*big.Int)(nil)).Return(latestBlock, nil).Once() - tmp := new(evmtypes.Head) - client.On("HeadByNumber", ctx, mock.AnythingOfType("*big.Int")).Times(20+18+14).Return(tmp, nil).Run(func(args mock.Arguments) { - h := blocks[args[1].(*big.Int).Int64()] - *tmp = h + client.On("HeadByNumber", ctx, mock.AnythingOfType("*big.Int")).Return(func(_ context.Context, num *big.Int) (*evmtypes.Head, error) { + return blocks[num.Int64()], nil }) // First search, nothing cached (total 21 - bsearch 20) @@ -230,14 +221,14 @@ func TestArbitrumBlockTranslator_NumberToQueryRange(t *testing.T) { }) } -func generateDeterministicL2Blocks() (heads []evmtypes.Head) { +func generateDeterministicL2Blocks() (heads []*evmtypes.Head) { source := mrand.NewSource(0) deterministicRand := mrand.New(source) l2max := 1000 var l1BlockNumber int64 = 5000 var parentHash common.Hash for i := 0; i <= l2max; i++ { - head := evmtypes.Head{ + head := &evmtypes.Head{ Number: int64(i), L1BlockNumber: sql.NullInt64{Int64: l1BlockNumber, Valid: true}, Hash: utils.NewHash(), diff --git a/core/services/relay/evm/mercury/v1/data_source_test.go b/core/services/relay/evm/mercury/v1/data_source_test.go index 197d802a3b3..7f5117a0aa8 100644 --- a/core/services/relay/evm/mercury/v1/data_source_test.go +++ b/core/services/relay/evm/mercury/v1/data_source_test.go @@ -332,16 +332,15 @@ func TestMercury_Observe(t *testing.T) { t.Run("when chain is too short", func(t *testing.T) { h4 := &evmtypes.Head{ Number: 4, - Parent: nil, } h5 := &evmtypes.Head{ Number: 5, - Parent: h4, } + h5.Parent.Store(h4) h6 := &evmtypes.Head{ Number: 6, - Parent: h5, } + h6.Parent.Store(h5) ht2 := htmocks.NewHeadTracker[*evmtypes.Head, common.Hash](t) ht2.On("LatestChain").Return(h6) @@ -362,7 +361,7 @@ func TestMercury_Observe(t *testing.T) { for i := range heads { heads[i] = &evmtypes.Head{Number: int64(i)} if i > 0 { - heads[i].Parent = heads[i-1] + heads[i].Parent.Store(heads[i-1]) } } From ef77fac68b4f6f7b55a869af9b1484ea8a98c845 Mon Sep 17 00:00:00 2001 From: Lukasz <120112546+lukaszcl@users.noreply.github.com> Date: Mon, 23 Sep 2024 13:28:21 +0200 Subject: [PATCH 08/14] Remove unused workflow for e2e tests (#14520) * Remove unused workflow for e2e tests * Fix --- .github/workflows/integration-tests.yml | 2 +- .../on-demand-keeper-smoke-tests.yml | 290 ------------------ 2 files changed, 1 insertion(+), 291 deletions(-) delete mode 100644 .github/workflows/on-demand-keeper-smoke-tests.yml diff --git a/.github/workflows/integration-tests.yml b/.github/workflows/integration-tests.yml index 032ec40b4ea..63893867993 100644 --- a/.github/workflows/integration-tests.yml +++ b/.github/workflows/integration-tests.yml @@ -280,7 +280,7 @@ jobs: id-token: write contents: read needs: [build-chainlink, changes] - if: github.event_name == 'merge_group' + if: github.event_name == 'merge_group' && ( needs.changes.outputs.core_changes == 'true' || needs.changes.outputs.github_ci_changes == 'true') uses: ./.github/workflows/run-e2e-tests-reusable-workflow.yml with: workflow_name: Run Core E2E Tests For Merge Queue diff --git a/.github/workflows/on-demand-keeper-smoke-tests.yml b/.github/workflows/on-demand-keeper-smoke-tests.yml deleted file mode 100644 index a0ed71c0a06..00000000000 --- a/.github/workflows/on-demand-keeper-smoke-tests.yml +++ /dev/null @@ -1,290 +0,0 @@ -name: On Demand Keeper Smoke Tests -run-name: On Demand Keeper Smoke Tests ${{ inputs.distinct_run_name && inputs.distinct_run_name || '' }} -on: - workflow_dispatch: - inputs: - distinct_run_name: - description: 'A unique identifier for this run, only use from other repos' - required: false - type: string - -# Only run 1 of this workflow at a time per PR -concurrency: - group: on-demand-keeper-smoke-tests-${{ github.ref }}-${{ inputs.distinct_run_name }} - cancel-in-progress: true - -env: - # for run-test variables and environment - ENV_JOB_IMAGE: ${{ secrets.QA_AWS_ACCOUNT_NUMBER }}.dkr.ecr.${{ secrets.QA_AWS_REGION }}.amazonaws.com/chainlink-tests:${{ inputs.evm-ref || github.sha }} - CHAINLINK_IMAGE: ${{ secrets.QA_AWS_ACCOUNT_NUMBER }}.dkr.ecr.${{ secrets.QA_AWS_REGION }}.amazonaws.com/chainlink - TEST_SUITE: smoke - TEST_ARGS: -test.timeout 12m - INTERNAL_DOCKER_REPO: ${{ secrets.QA_AWS_ACCOUNT_NUMBER }}.dkr.ecr.${{ secrets.QA_AWS_REGION }}.amazonaws.com - MOD_CACHE_VERSION: 2 - COLLECTION_ID: chainlink-e2e-tests - -jobs: - build-chainlink: - environment: integration - permissions: - id-token: write - contents: read - strategy: - matrix: - image: - - name: "" - dockerfile: core/chainlink.Dockerfile - tag-suffix: "" - name: Build Chainlink Image ${{ matrix.image.name }} - runs-on: ubuntu22.04-16cores-64GB - steps: - - name: Collect Metrics - id: collect-gha-metrics - uses: smartcontractkit/push-gha-metrics-action@d9da21a2747016b3e13de58c7d4115a3d5c97935 # v3.0.1 - with: - id: ${{ env.COLLECTION_ID }}-build-chainlink - org-id: ${{ secrets.GRAFANA_INTERNAL_TENANT_ID }} - basic-auth: ${{ secrets.GRAFANA_INTERNAL_BASIC_AUTH }} - hostname: ${{ secrets.GRAFANA_INTERNAL_HOST }} - this-job-name: Build Chainlink Image ${{ matrix.image.name }} - continue-on-error: true - - name: Checkout the repo - uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633 # v4.1.2 - with: - repository: smartcontractkit/chainlink - ref: ${{ inputs.cl_ref || github.event.pull_request.head.sha || github.event.merge_group.head_sha }} - - name: Build Chainlink Image - uses: ./.github/actions/build-chainlink-image - with: - tag_suffix: ${{ matrix.image.tag-suffix }} - dockerfile: ${{ matrix.image.dockerfile }} - git_commit_sha: ${{ inputs.evm-ref || github.sha }} - AWS_REGION: ${{ secrets.QA_AWS_REGION }} - AWS_ROLE_TO_ASSUME: ${{ secrets.QA_AWS_ROLE_TO_ASSUME }} - dep_evm_sha: ${{ inputs.evm-ref }} - - compare-tests: - runs-on: ubuntu-latest - name: Build Automation Test List - outputs: - automation-matrix: ${{ env.AUTOMATION_JOB_MATRIX_JSON }} - steps: - - name: Checkout the repo - uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633 # v4.1.2 - with: - repository: smartcontractkit/chainlink - ref: ${{ inputs.cl_ref }} - - name: Compare Test Lists - run: | - cd ./integration-tests - ./scripts/compareTestList.sh ./smoke/keeper_test.go - - name: Build Test Matrix Lists - id: build-test-matrix-list - run: | - cd ./integration-tests - KEEPER_JOB_MATRIX_JSON=$(./scripts/buildTestMatrixList.sh ./smoke/keeper_test.go keeper ubuntu-latest 1) - echo "AUTOMATION_JOB_MATRIX_JSON=${KEEPER_JOB_MATRIX_JSON}" >> $GITHUB_ENV - - eth-smoke-tests-matrix-automation: - if: ${{ !contains(join(github.event.pull_request.labels.*.name, ' '), 'skip-smoke-tests') }} - environment: integration - permissions: - checks: write - pull-requests: write - id-token: write - contents: read - needs: [build-chainlink, compare-tests] - env: - SELECTED_NETWORKS: SIMULATED,SIMULATED_1,SIMULATED_2 - CHAINLINK_COMMIT_SHA: ${{ inputs.evm-ref || github.sha }} - CHAINLINK_ENV_USER: ${{ github.actor }} - TEST_LOG_LEVEL: debug - strategy: - fail-fast: false - matrix: - product: ${{fromJson(needs.compare-tests.outputs.automation-matrix)}} - runs-on: ${{ matrix.product.os }} - name: ETH Smoke Tests ${{ matrix.product.name }} - steps: - - name: Collect Metrics - id: collect-gha-metrics - uses: smartcontractkit/push-gha-metrics-action@d9da21a2747016b3e13de58c7d4115a3d5c97935 # v3.0.1 - with: - id: ${{ env.COLLECTION_ID }}-matrix-${{ matrix.product.name }} - basic-auth: ${{ secrets.GRAFANA_INTERNAL_BASIC_AUTH }} - hostname: ${{ secrets.GRAFANA_INTERNAL_HOST }} - org-id: ${{ secrets.GRAFANA_INTERNAL_TENANT_ID }} - this-job-name: ETH Smoke Tests ${{ matrix.product.name }} - test-results-file: '{"testType":"go","filePath":"/tmp/gotest.log"}' - continue-on-error: true - - name: Checkout the repo - uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633 # v4.1.2 - with: - repository: smartcontractkit/chainlink - ref: ${{ inputs.cl_ref || github.event.pull_request.head.sha || github.event.merge_group.head_sha }} - - name: Build Go Test Command - id: build-go-test-command - run: | - # if the matrix.product.run is set, use it for a different command - if [ "${{ matrix.product.run }}" != "" ]; then - echo "run_command=${{ matrix.product.run }} ./smoke/${{ matrix.product.file }}_test.go" >> "$GITHUB_OUTPUT" - else - echo "run_command=./smoke/${{ matrix.product.name }}_test.go" >> "$GITHUB_OUTPUT" - fi - - ## Run this step when changes that require tests to be run are made - - name: Run Tests - uses: smartcontractkit/chainlink-github-actions/chainlink-testing-framework/run-tests@d2f9642bcc24a73400568756f24b72c188ac7a9a # v2.3.31 - with: - test_command_to_run: cd ./integration-tests && go test -timeout 30m -count=1 -json -test.parallel=${{ matrix.product.nodes }} ${{ steps.build-go-test-command.outputs.run_command }} 2>&1 | tee /tmp/gotest.log | gotestloghelper -ci -singlepackage -hidepassingtests=false - test_download_vendor_packages_command: cd ./integration-tests && go mod download - test_config_chainlink_version: ${{ inputs.evm-ref || github.sha }} - test_config_selected_networks: ${{ env.SELECTED_NETWORKS }} - test_config_logging_run_id: ${{ github.run_id }} - test_config_logstream_log_targets: ${{ vars.LOGSTREAM_LOG_TARGETS }} - test_config_test_log_collect: ${{ vars.TEST_LOG_COLLECT }} - cl_repo: ${{ env.CHAINLINK_IMAGE }} - cl_image_tag: ${{ inputs.evm-ref || github.sha }} - aws_registries: ${{ secrets.QA_AWS_ACCOUNT_NUMBER }} - artifacts_name: ${{ matrix.product.name }}-test-logs - artifacts_location: | - ./integration-tests/smoke/logs/ - ./integration-tests/smoke/db_dumps/ - /tmp/gotest.log - publish_check_name: ${{ matrix.product.name }} - token: ${{ secrets.GITHUB_TOKEN }} - go_mod_path: ./integration-tests/go.mod - cache_key_id: core-e2e-${{ env.MOD_CACHE_VERSION }} - cache_restore_only: "true" - QA_AWS_REGION: ${{ secrets.QA_AWS_REGION }} - QA_AWS_ROLE_TO_ASSUME: ${{ secrets.QA_AWS_ROLE_TO_ASSUME }} - QA_KUBECONFIG: "" - should_tidy: "false" - go_coverage_src_dir: /var/tmp/go-coverage - go_coverage_dest_dir: ${{ github.workspace }}/.covdata - DEFAULT_CHAINLINK_IMAGE: ${{ env.CHAINLINK_IMAGE }} - DEFAULT_LOKI_TENANT_ID: ${{ secrets.GRAFANA_INTERNAL_TENANT_ID }} - DEFAULT_LOKI_ENDPOINT: https://${{ secrets.GRAFANA_INTERNAL_HOST }}/loki/api/v1/push - DEFAULT_LOKI_BASIC_AUTH: ${{ secrets.GRAFANA_INTERNAL_BASIC_AUTH }} - DEFAULT_GRAFANA_BASE_URL: ${{ vars.GRAFANA_URL }} - DEFAULT_GRAFANA_DASHBOARD_URL: "/d/ddf75041-1e39-42af-aa46-361fe4c36e9e/ci-e2e-tests-logs" - DEFAULT_GRAFANA_BEARER_TOKEN: ${{ secrets.GRAFANA_INTERNAL_URL_SHORTENER_TOKEN }} - DEFAULT_PYROSCOPE_SERVER_URL: ${{ matrix.product.pyroscope_env == '' && '' || !startsWith(github.ref, 'refs/tags/') && '' || secrets.QA_PYROSCOPE_INSTANCE }} # Avoid sending blank envs https://github.com/orgs/community/discussions/25725 - DEFAULT_PYROSCOPE_KEY: ${{ secrets.QA_PYROSCOPE_KEY }} - DEFAULT_PYROSCOPE_ENVIRONMENT: ${{ matrix.product.pyroscope_env }} - DEFAULT_PYROSCOPE_ENABLED: ${{ matrix.product.pyroscope_env == '' || !startsWith(github.ref, 'refs/tags/') && 'false' || 'true' }} - - - name: Upload Coverage Data - uses: actions/upload-artifact@65462800fd760344b1a7b4382951275a0abb4808 # v4.3.3 - with: - name: cl-node-coverage-data-${{ matrix.product.name }} - path: .covdata - retention-days: 1 - - - name: Print failed test summary - if: always() - uses: smartcontractkit/chainlink-github-actions/chainlink-testing-framework/show-test-summary@5dd916d08c03cb5f9a97304f4f174820421bb946 # v2.3.11 - - ### Used to check the required checks box when the matrix completes - eth-smoke-tests: - if: always() - runs-on: ubuntu-latest - name: ETH Smoke Tests - needs: [eth-smoke-tests-matrix-automation] - steps: - - name: Check smoke test matrix status - if: needs.eth-smoke-tests-matrix-automation.result != 'success' - run: | - echo "Automation: ${{ needs.eth-smoke-tests-matrix-automation.result }}" - exit 1 - - name: Collect Metrics - if: always() - id: collect-gha-metrics - uses: smartcontractkit/push-gha-metrics-action@d9da21a2747016b3e13de58c7d4115a3d5c97935 # v3.0.1 - with: - id: ${{ env.COLLECTION_ID }}-matrix-results - org-id: ${{ secrets.GRAFANA_INTERNAL_TENANT_ID }} - basic-auth: ${{ secrets.GRAFANA_INTERNAL_BASIC_AUTH }} - hostname: ${{ secrets.GRAFANA_INTERNAL_HOST }} - this-job-name: ETH Smoke Tests - matrix-aggregator-status: ${{ needs.eth-smoke-tests-matrix.result }} - continue-on-error: true - - cleanup: - name: Clean up integration environment deployments - if: always() - needs: [eth-smoke-tests] - runs-on: ubuntu-latest - steps: - - name: Checkout repo - if: ${{ github.event_name == 'pull_request' }} - uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633 # v4.1.2 - with: - repository: smartcontractkit/chainlink - ref: ${{ inputs.cl_ref }} - - - name: 🧼 Clean up Environment - if: ${{ github.event_name == 'pull_request' }} - uses: ./.github/actions/delete-deployments - with: - environment: integration - ref: ${{ github.head_ref }} # See https://github.com/github/docs/issues/15319#issuecomment-1476705663 - - - name: Collect Metrics - if: ${{ github.event_name == 'pull_request' }} - id: collect-gha-metrics - uses: smartcontractkit/push-gha-metrics-action@d9da21a2747016b3e13de58c7d4115a3d5c97935 # v3.0.1 - with: - id: ${{ env.COLLECTION_ID }}-env-cleanup - org-id: ${{ secrets.GRAFANA_INTERNAL_TENANT_ID }} - basic-auth: ${{ secrets.GRAFANA_INTERNAL_BASIC_AUTH }} - hostname: ${{ secrets.GRAFANA_INTERNAL_HOST }} - this-job-name: Clean up integration environment deployments - continue-on-error: true - - show-coverage: - name: Show Chainlink Node Go Coverage - if: always() - needs: [cleanup] - runs-on: ubuntu-latest - steps: - - name: Checkout the repo - uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633 # v4.1.2 - with: - repository: smartcontractkit/chainlink - ref: ${{ inputs.cl_ref || github.event.pull_request.head.sha || github.event.merge_group.head_sha }} - - name: Download All Artifacts - uses: actions/download-artifact@9c19ed7fe5d278cd354c7dfd5d3b88589c7e2395 # v4.1.6 - with: - path: cl-node-coverage-data - pattern: cl-node-coverage-data-* - merge-multiple: true - - name: Show Coverage - run: go run ./integration-tests/scripts/show_coverage.go "${{ github.workspace }}/cl-node-coverage-data/*/merged" - - # Run the setup if the matrix finishes but this time save the cache if we have a cache hit miss - # this will also only run if both of the matrix jobs pass - eth-smoke-go-mod-cache: - - environment: integration - needs: [eth-smoke-tests] - runs-on: ubuntu-latest - name: ETH Smoke Tests Go Mod Cache - continue-on-error: true - steps: - - name: Checkout the repo - uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633 # v4.1.2 - with: - repository: smartcontractkit/chainlink - ref: ${{ inputs.cl_ref || github.event.pull_request.head.sha || github.event.merge_group.head_sha }} - - name: Run Setup - uses: smartcontractkit/chainlink-github-actions/chainlink-testing-framework/setup-go@5dd916d08c03cb5f9a97304f4f174820421bb946 # v2.3.11 - with: - test_download_vendor_packages_command: | - cd ./integration-tests - go mod download - # force download of test dependencies - go test -run=NonExistentTest ./smoke/... || echo "ignore expected test failure" - go_mod_path: ./integration-tests/go.mod - cache_key_id: core-e2e-${{ env.MOD_CACHE_VERSION }} - cache_restore_only: "false" From f74ac81d5db7a89b04252938f4b5ff34e3f7bbbe Mon Sep 17 00:00:00 2001 From: Bartek Tofel Date: Mon, 23 Sep 2024 14:44:36 +0200 Subject: [PATCH 09/14] [TT-1693] try more universal Solidity scripts (#14436) * try more universal Solidity scripts * fix error printing in uml generation; test foundry pipeline * Update gethwrappers * a bit more testing * fix Sol * remove test contracts * Update gethwrappers * try reusable artifact workflow * fix syntax * reuse workflow for .github/workflows * use newer reusable Solidity Review Artifacts pipeline * remove actions that were moved to chainlink-github-actions * add test Solidity file * add missing changeset * use scripts from shared repository * modify one contract * set git top level dir manually * [Bot] Update changeset file with jira issue * fix script path * use newer version of the reusable pipeline * update pipeline version, use custom pruning script * use newer action version, remove JIRA scripts * use reusable Solidity Review Artifacts workflow from .github * remove left over changes that are no longer needed * remove left over changes that are no longer needed * fix foundry pipeline * remove test Solidity files * use tagged reusable pipeline version * update all references to .github to use tagged commit --------- Co-authored-by: app-token-issuer-infra-releng[bot] <120227048+app-token-issuer-infra-releng[bot]@users.noreply.github.com> --- .github/actions/setup-slither/action.yaml | 10 - .github/actions/setup-solc-select/action.yaml | 30 - .../validate-solidity-artifacts/action.yaml | 115 -- .github/scripts/functions.sh | 17 - .github/scripts/jira/axios.ts | 97 -- .../scripts/jira/create-jira-traceability.ts | 215 ---- .github/scripts/jira/enforce-jira-issue.ts | 119 -- .github/scripts/jira/lib.test.ts | 149 --- .github/scripts/jira/lib.ts | 147 --- .github/scripts/jira/package.json | 34 - .github/scripts/jira/pnpm-lock.yaml | 1114 ----------------- .github/scripts/jira/tsconfig.json | 108 -- .github/scripts/jira/update-jira-issue.ts | 77 -- .github/workflows/changeset.yml | 19 +- .../workflows/solidity-foundry-artifacts.yml | 341 +---- .github/workflows/solidity-foundry.yml | 81 +- .github/workflows/solidity-tracability.yml | 27 +- .github/workflows/solidity-wrappers.yml | 8 +- contracts/.changeset/quiet-moles-retire.md | 8 + contracts/.tool-versions | 1 + .../scripts/ci/generate_slither_report.sh | 85 -- contracts/scripts/ci/generate_uml.sh | 121 -- contracts/scripts/ci/modify_remappings.sh | 30 - contracts/scripts/ci/select_solc_version.sh | 118 -- 24 files changed, 136 insertions(+), 2935 deletions(-) delete mode 100644 .github/actions/setup-slither/action.yaml delete mode 100644 .github/actions/setup-solc-select/action.yaml delete mode 100644 .github/actions/validate-solidity-artifacts/action.yaml delete mode 100644 .github/scripts/functions.sh delete mode 100644 .github/scripts/jira/axios.ts delete mode 100644 .github/scripts/jira/create-jira-traceability.ts delete mode 100644 .github/scripts/jira/enforce-jira-issue.ts delete mode 100644 .github/scripts/jira/lib.test.ts delete mode 100644 .github/scripts/jira/lib.ts delete mode 100644 .github/scripts/jira/package.json delete mode 100644 .github/scripts/jira/pnpm-lock.yaml delete mode 100644 .github/scripts/jira/tsconfig.json delete mode 100644 .github/scripts/jira/update-jira-issue.ts create mode 100644 contracts/.changeset/quiet-moles-retire.md create mode 100644 contracts/.tool-versions delete mode 100755 contracts/scripts/ci/generate_slither_report.sh delete mode 100755 contracts/scripts/ci/generate_uml.sh delete mode 100755 contracts/scripts/ci/modify_remappings.sh delete mode 100755 contracts/scripts/ci/select_solc_version.sh diff --git a/.github/actions/setup-slither/action.yaml b/.github/actions/setup-slither/action.yaml deleted file mode 100644 index b8bef38575d..00000000000 --- a/.github/actions/setup-slither/action.yaml +++ /dev/null @@ -1,10 +0,0 @@ -name: Setup Slither -description: Installs Slither 0.10.3 for contract analysis. Requires Python 3.6 or higher. -runs: - using: composite - steps: - - name: Install Slither - shell: bash - run: | - python -m pip install --upgrade pip - pip install slither-analyzer==0.10.3 diff --git a/.github/actions/setup-solc-select/action.yaml b/.github/actions/setup-solc-select/action.yaml deleted file mode 100644 index b74ffae018d..00000000000 --- a/.github/actions/setup-solc-select/action.yaml +++ /dev/null @@ -1,30 +0,0 @@ -name: Setup Solc Select -description: Installs Solc Select, required versions and selects the version to use. Requires Python 3.6 or higher. -inputs: - to_install: - description: Comma-separated list of solc versions to install - required: true - to_use: - description: Solc version to use - required: true - -runs: - using: composite - steps: - - name: Install solc-select and solc - shell: bash - run: | - pip3 install solc-select - sudo ln -s /usr/local/bin/solc-select /usr/bin/solc-select - - IFS=',' read -ra versions <<< "${{ inputs.to_install }}" - for version in "${versions[@]}"; do - solc-select install $version - if [ $? -ne 0 ]; then - echo "Failed to install Solc $version" - exit 1 - fi - done - - solc-select install ${{ inputs.to_use }} - solc-select use ${{ inputs.to_use }} diff --git a/.github/actions/validate-solidity-artifacts/action.yaml b/.github/actions/validate-solidity-artifacts/action.yaml deleted file mode 100644 index 5357a87f96b..00000000000 --- a/.github/actions/validate-solidity-artifacts/action.yaml +++ /dev/null @@ -1,115 +0,0 @@ -name: Validate Solidity Artifacts -description: Checks whether Slither reports and UML diagrams were generated for all necessary files. If not, a warning is printed in job summary, but the job is not marked as failed. -inputs: - slither_reports_path: - description: Path to the Slither reports directory (without trailing slash) - required: true - uml_diagrams_path: - description: Path to the UML diagrams directory (without trailing slash) - required: true - validate_slither_reports: - description: Whether Slither reports should be validated - required: true - validate_uml_diagrams: - description: Whether UML diagrams should be validated - required: true - sol_files: - description: Comma-separated (CSV) or space-separated (shell) list of Solidity files to check - required: true - -runs: - using: composite - steps: - - name: Transform input array - id: transform_input_array - shell: bash - run: | - is_csv_format() { - local input="$1" - if [[ "$input" =~ "," ]]; then - return 0 - else - return 1 - fi - } - - is_space_separated_string() { - local input="$1" - if [[ "$input" =~ ^[^[:space:]]+([[:space:]][^[:space:]]+)*$ ]]; then - return 0 - else - return 1 - fi - } - - array="${{ inputs.sol_files }}" - - if is_csv_format "$array"; then - echo "::debug::CSV format detected, nothing to do" - echo "sol_files=$array" >> $GITHUB_OUTPUT - exit 0 - fi - - if is_space_separated_string "$array"; then - echo "::debug::Space-separated format detected, converting to CSV" - csv_array="${array// /,}" - echo "sol_files=$csv_array" >> $GITHUB_OUTPUT - exit 0 - fi - - echo "::error::Invalid input format for sol_files. Please provide a comma-separated (CSV) or space-separated (shell) list of Solidity files" - exit 1 - - - name: Validate UML diagrams - if: ${{ inputs.validate_uml_diagrams == 'true' }} - shell: bash - run: | - echo "Validating UML diagrams" - IFS=',' read -r -a modified_files <<< "${{ steps.transform_input_array.outputs.sol_files }}" - missing_svgs=() - for file in "${modified_files[@]}"; do - svg_file="$(basename "${file%.sol}").svg" - if [ ! -f "${{ inputs.uml_diagrams_path }}/$svg_file" ]; then - echo "Error: UML diagram for $file not found" - missing_svgs+=("$file") - fi - done - - if [ ${#missing_svgs[@]} -gt 0 ]; then - echo "Error: Missing UML diagrams for files: ${missing_svgs[@]}" - echo "# Warning!" >> $GITHUB_STEP_SUMMARY - echo "## Reason: Missing UML diagrams for files:" >> $GITHUB_STEP_SUMMARY - for file in "${missing_svgs[@]}"; do - echo " $file" >> $GITHUB_STEP_SUMMARY - done - echo "## Action required: Please try to generate artifacts for them locally or using a different tool" >> $GITHUB_STEP_SUMMARY - else - echo "All UML diagrams generated successfully" - fi - - - name: Validate Slither reports - if: ${{ inputs.validate_slither_reports == 'true' }} - shell: bash - run: | - echo "Validating Slither reports" - IFS=',' read -r -a modified_files <<< "${{ steps.transform_input_array.outputs.sol_files }}" - missing_reports=() - for file in "${modified_files[@]}"; do - report_file="$(basename "${file%.sol}")-slither-report.md" - if [ ! -f "${{ inputs.slither_reports_path }}/$report_file" ]; then - echo "Error: Slither report for $file not found" - missing_reports+=("$file") - fi - done - - if [ ${#missing_reports[@]} -gt 0 ]; then - echo "Error: Missing Slither reports for files: ${missing_reports[@]}" - echo "# Warning!" >> $GITHUB_STEP_SUMMARY - echo "## Reason: Missing Slither reports for files:" >> $GITHUB_STEP_SUMMARY - for file in "${missing_reports[@]}"; do - echo " $file" >> $GITHUB_STEP_SUMMARY - done - echo "## Action required: Please try to generate artifacts for them locally" >> $GITHUB_STEP_SUMMARY - else - echo "All Slither reports generated successfully" - fi diff --git a/.github/scripts/functions.sh b/.github/scripts/functions.sh deleted file mode 100644 index 53b53392269..00000000000 --- a/.github/scripts/functions.sh +++ /dev/null @@ -1,17 +0,0 @@ -#!/bin/bash - -# Function to convert a comma-separated list into a TOML array format. -# Usage: convert_to_toml_array "elem1,elem2,elem3" -# Effect: "a,b,c" -> ["a","b","c"] -function convert_to_toml_array() { - local IFS=',' - local input_array=($1) - local toml_array_format="[" - - for element in "${input_array[@]}"; do - toml_array_format+="\"$element\"," - done - - toml_array_format="${toml_array_format%,}]" - echo "$toml_array_format" -} \ No newline at end of file diff --git a/.github/scripts/jira/axios.ts b/.github/scripts/jira/axios.ts deleted file mode 100644 index 3a912797713..00000000000 --- a/.github/scripts/jira/axios.ts +++ /dev/null @@ -1,97 +0,0 @@ -import { - AxiosRequestConfig, - AxiosResponse, - AxiosError, - InternalAxiosRequestConfig, -} from "axios"; -import { Readable } from "stream"; - -interface AxiosErrorFormat { - config: Pick; - code?: string; - response: Partial, (typeof RESPONSE_KEYS)[number]>>; - isAxiosError: boolean; -} - -interface AxiosErrorFormatError - extends Error, - AxiosErrorFormat {} - -export function formatAxiosError( - origErr: AxiosError -): AxiosErrorFormatError { - const { message, name, stack, code, config, response, isAxiosError } = - origErr; - - const err: AxiosErrorFormatError = { - ...new Error(message), - name, - stack, - code, - isAxiosError, - config: {}, - response: {}, - }; - - for (const k of CONFIG_KEYS) { - if (config?.[k] === undefined) { - continue; - } - - err.config[k] = formatValue(config[k], k); - } - - for (const k of RESPONSE_KEYS) { - if (response?.[k] === undefined) { - continue; - } - - err.response[k] = formatValue(response[k], k); - } - - return err as any; -} - -const CONFIG_KEYS: (keyof InternalAxiosRequestConfig)[] = [ - "url", - "method", - "baseURL", - "params", - "data", - "timeout", - "timeoutErrorMessage", - "withCredentials", - "auth", - "responseType", - "xsrfCookieName", - "xsrfHeaderName", - "maxContentLength", - "maxBodyLength", - "maxRedirects", - "socketPath", - "proxy", - "decompress", -] as const; - -const RESPONSE_KEYS: (keyof AxiosResponse)[] = [ - "data", - "status", - "statusText", -] as const; - -function formatValue( - value: any, - key: (typeof CONFIG_KEYS)[number] | (typeof RESPONSE_KEYS)[number] -): any { - if (key !== "data") { - return value; - } - - if (process.env.BROWSER !== "true") { - if (value instanceof Readable) { - return "[Readable]"; - } - } - - return value; -} diff --git a/.github/scripts/jira/create-jira-traceability.ts b/.github/scripts/jira/create-jira-traceability.ts deleted file mode 100644 index cda038a7cc9..00000000000 --- a/.github/scripts/jira/create-jira-traceability.ts +++ /dev/null @@ -1,215 +0,0 @@ -import * as jira from "jira.js"; -import { - createJiraClient, - extractJiraIssueNumbersFrom, - generateIssueLabel, - generateJiraIssuesLink, - getJiraEnvVars, - handleError, -} from "./lib"; -import * as core from "@actions/core"; - -/** - * Extracts the list of changeset files. Intended to be used with https://github.com/dorny/paths-filter with - * the 'csv' output format. - * - * @returns An array of strings representing the changeset files. - * @throws {Error} If the required environment variable CHANGESET_FILES is missing. - * @throws {Error} If no changeset file exists. - */ -function extractChangesetFiles(): string[] { - const changesetFiles = process.env.CHANGESET_FILES; - if (!changesetFiles) { - throw Error("Missing required environment variable CHANGESET_FILES"); - } - const parsedChangesetFiles = changesetFiles.split(","); - if (parsedChangesetFiles.length === 0) { - throw Error("At least one changeset file must exist"); - } - - core.info( - `Changeset to extract issues from: ${parsedChangesetFiles.join(", ")}` - ); - return parsedChangesetFiles; -} - -/** - * Adds traceability to JIRA issues by commenting on each issue with a link to the artifact payload - * along with a label to connect all issues to the same chainlink product review. - * - * @param client The jira client - * @param issues The list of JIRA issue numbers to add traceability to - * @param label The label to add to each issue - * @param artifactUrl The url to the artifact payload that we'll comment on each issue with - */ -async function addTraceabillityToJiraIssues( - client: jira.Version3Client, - issues: string[], - label: string, - artifactUrl: string -) { - for (const issue of issues) { - await checkAndAddArtifactPayloadComment(client, issue, artifactUrl); - - // CHECK: We don't need to see if the label exists, should no-op - core.info(`Adding label ${label} to issue ${issue}`); - await client.issues.editIssue({ - issueIdOrKey: issue, - update: { - labels: [{ add: label }], - }, - }); - } -} - -/** - * Checks if the artifact payload already exists as a comment on the issue, if not, adds it. - */ -async function checkAndAddArtifactPayloadComment( - client: jira.Version3.Version3Client, - issue: string, - artifactUrl: string -) { - const maxResults = 5000; - const getCommentsResponse = await client.issueComments.getComments({ - issueIdOrKey: issue, - maxResults, // this is the default maxResults, see https://developer.atlassian.com/cloud/jira/platform/rest/v3/api-group-issue-comments/#api-rest-api-3-issue-issueidorkey-comment-get - }); - core.debug(JSON.stringify(getCommentsResponse.comments)); - if ((getCommentsResponse.total ?? 0) > maxResults) { - throw Error( - `Too many (${getCommentsResponse.total}) comments on issue ${issue}, please increase maxResults (${maxResults})` - ); - } - - // Search path is getCommentsResponse.comments[].body.content[].content[].marks[].attrs.href - // - // Example: - // [ // getCommentsResponse.comments - // { - // body: { - // type: "doc", - // version: 1, - // content: [ - // { - // type: "paragraph", - // content: [ - // { - // type: "text", - // text: "Artifact URL", - // marks: [ - // { - // type: "link", - // attrs: { - // href: "https://github.com/smartcontractkit/chainlink/actions/runs/10517121836/artifacts/1844867108", - // }, - // }, - // ], - // }, - // ], - // }, - // ], - // }, - // }, - // ]; - const commentExists = getCommentsResponse.comments?.some((c) => - c?.body?.content?.some((innerContent) => - innerContent?.content?.some((c) => - c.marks?.some((m) => m.attrs?.href === artifactUrl) - ) - ) - ); - - if (commentExists) { - core.info(`Artifact payload already exists as comment on issue, skipping`); - } else { - core.info(`Adding artifact payload as comment on issue ${issue}`); - await client.issueComments.addComment({ - issueIdOrKey: issue, - comment: { - type: "doc", - version: 1, - content: [ - { - type: "paragraph", - content: [ - { - type: "text", - text: "Artifact Download URL", - marks: [ - { - type: "link", - attrs: { - href: artifactUrl, - }, - }, - ], - }, - ], - }, - ], - }, - }); - } -} - -function fetchEnvironmentVariables() { - const product = process.env.CHAINLINK_PRODUCT; - if (!product) { - throw Error("CHAINLINK_PRODUCT environment variable is missing"); - } - const baseRef = process.env.BASE_REF; - if (!baseRef) { - throw Error("BASE_REF environment variable is missing"); - } - const headRef = process.env.HEAD_REF; - if (!headRef) { - throw Error("HEAD_REF environment variable is missing"); - } - - const artifactUrl = process.env.ARTIFACT_URL; - if (!artifactUrl) { - throw Error("ARTIFACT_URL environment variable is missing"); - } - return { product, baseRef, headRef, artifactUrl }; -} - -/** - * For all affected jira issues listed within the changeset files supplied, - * we update each jira issue so that they are all labelled and have a comment linking them - * to the relevant artifact URL. - */ -async function main() { - const { product, baseRef, headRef, artifactUrl } = - fetchEnvironmentVariables(); - const changesetFiles = extractChangesetFiles(); - core.info( - `Extracting Jira issue numbers from changeset files: ${changesetFiles.join( - ", " - )}` - ); - const jiraIssueNumbers = await extractJiraIssueNumbersFrom(changesetFiles); - - const client = createJiraClient(); - const label = generateIssueLabel(product, baseRef, headRef); - try { - await addTraceabillityToJiraIssues( - client, - jiraIssueNumbers, - label, - artifactUrl - ); - } catch (e) { - handleError(e); - - process.exit(1); - } - - const { jiraHost } = getJiraEnvVars(); - core.summary.addLink( - "Jira Issues", - generateJiraIssuesLink(`${jiraHost}/issues/`, label) - ); - core.summary.write(); -} -main(); diff --git a/.github/scripts/jira/enforce-jira-issue.ts b/.github/scripts/jira/enforce-jira-issue.ts deleted file mode 100644 index 153f397e021..00000000000 --- a/.github/scripts/jira/enforce-jira-issue.ts +++ /dev/null @@ -1,119 +0,0 @@ -import * as core from "@actions/core"; -import jira from "jira.js"; -import { createJiraClient, getGitTopLevel, handleError, parseIssueNumberFrom } from "./lib"; -import { promises as fs } from "fs"; -import { join } from "path"; - -async function doesIssueExist( - client: jira.Version3Client, - issueNumber: string, - dryRun: boolean -) { - const payload = { - issueIdOrKey: issueNumber, - }; - - if (dryRun) { - core.info("Dry run enabled, skipping JIRA issue enforcement"); - return true; - } - - try { - /** - * The issue is identified by its ID or key, however, if the identifier doesn't match an issue, a case-insensitive search and check for moved issues is performed. - * If a matching issue is found its details are returned, a 302 or other redirect is not returned. The issue key returned in the response is the key of the issue found. - */ - const issue = await client.issues.getIssue(payload); - core.debug( - `JIRA issue id:${issue.id} key: ${issue.key} found while querying for ${issueNumber}` - ); - if (issue.key !== issueNumber) { - core.error( - `JIRA issue key ${issueNumber} not found, but found issue key ${issue.key} instead. This can happen if the identifier doesn't match an issue, in which case a case-insensitive search and check for moved issues is performed. Make sure the issue key is correct.` - ); - return false; - } - - return true; - } catch (e) { - handleError(e) - return false; - } -} - -async function main() { - const prTitle = process.env.PR_TITLE; - const commitMessage = process.env.COMMIT_MESSAGE; - const branchName = process.env.BRANCH_NAME; - const dryRun = !!process.env.DRY_RUN; - const { changesetFile } = extractChangesetFile(); - - const client = createJiraClient(); - - // Checks for the Jira issue number and exit if it can't find it - const issueNumber = parseIssueNumberFrom(prTitle, commitMessage, branchName); - if (!issueNumber) { - const msg = - "No JIRA issue number found in PR title, commit message, or branch name. This pull request must be associated with a JIRA issue."; - - core.setFailed(msg); - return; - } - - const exists = await doesIssueExist(client, issueNumber, dryRun); - if (!exists) { - core.setFailed( - `JIRA issue ${issueNumber} not found, this pull request must be associated with a JIRA issue.` - ); - return; - } - - core.info(`Appending JIRA issue ${issueNumber} to changeset file`); - await appendIssueNumberToChangesetFile(changesetFile, issueNumber); -} - -async function appendIssueNumberToChangesetFile( - changesetFile: string, - issueNumber: string -) { - const gitTopLevel = await getGitTopLevel(); - const fullChangesetPath = join(gitTopLevel, changesetFile); - const changesetContents = await fs.readFile(fullChangesetPath, "utf-8"); - // Check if the issue number is already in the changeset file - if (changesetContents.includes(issueNumber)) { - core.info("Issue number already exists in changeset file, skipping..."); - return; - } - - const updatedChangesetContents = `${changesetContents}\n\n${issueNumber}`; - await fs.writeFile(fullChangesetPath, updatedChangesetContents); -} - -function extractChangesetFile() { - const changesetFiles = process.env.CHANGESET_FILES; - if (!changesetFiles) { - throw Error("Missing required environment variable CHANGESET_FILES"); - } - const parsedChangesetFiles = JSON.parse(changesetFiles); - if (parsedChangesetFiles.length !== 1) { - throw Error( - "This action only supports one changeset file per pull request." - ); - } - const [changesetFile] = parsedChangesetFiles; - - return { changesetFile }; -} - -async function run() { - try { - await main(); - } catch (error) { - if (error instanceof Error) { - return core.setFailed(error.message); - } - core.setFailed(error as any); - } -} - -run(); diff --git a/.github/scripts/jira/lib.test.ts b/.github/scripts/jira/lib.test.ts deleted file mode 100644 index 6ef629a53ed..00000000000 --- a/.github/scripts/jira/lib.test.ts +++ /dev/null @@ -1,149 +0,0 @@ -import { expect, describe, it, vi } from "vitest"; -import { - generateIssueLabel, - generateJiraIssuesLink, - getGitTopLevel, - parseIssueNumberFrom, - tagsToLabels, -} from "./lib"; -import * as core from "@actions/core"; - -describe("parseIssueNumberFrom", () => { - it("should return the first JIRA issue number found", () => { - let r = parseIssueNumberFrom("CORE-123", "CORE-456", "CORE-789"); - expect(r).to.equal("CORE-123"); - - r = parseIssueNumberFrom( - "2f3df5gf", - "chore/test-RE-78-branch", - "RE-78 Create new test branches" - ); - expect(r).to.equal("RE-78"); - - // handle lower case - r = parseIssueNumberFrom("core-123", "CORE-456", "CORE-789"); - expect(r).to.equal("CORE-123"); - }); - - it("works with multiline commit bodies", () => { - const r = parseIssueNumberFrom( - `This is a multiline commit body - -CORE-1011`, - "CORE-456", - "CORE-789" - ); - expect(r).to.equal("CORE-1011"); - }); - - it("should return undefined if no JIRA issue number is found", () => { - const result = parseIssueNumberFrom("No issue number"); - expect(result).to.be.undefined; - }); - - it("works when the label is in the middle of the commit message", () => { - let r = parseIssueNumberFrom( - "This is a commit message with CORE-123 in the middle", - "CORE-456", - "CORE-789" - ); - expect(r).to.equal("CORE-123"); - - r = parseIssueNumberFrom( - "#internal address security vulnerabilities RE-2917 around updating nodes and node operators on capabilities registry" - ); - expect(r).to.equal("RE-2917"); - }); -}); - -describe("tagsToLabels", () => { - it("should convert an array of tags to an array of labels", () => { - const tags = ["v1.0.0", "v1.1.0"]; - const result = tagsToLabels(tags); - expect(result).to.deep.equal([ - { add: "core-release/1.0.0" }, - { add: "core-release/1.1.0" }, - ]); - }); -}); - -const mockExecPromise = vi.fn(); -vi.mock("util", () => ({ - promisify: () => mockExecPromise, -})); - -describe("getGitTopLevel", () => { - it("should log the top-level directory when git command succeeds", async () => { - mockExecPromise.mockResolvedValueOnce({ - stdout: "/path/to/top-level-dir", - stderr: "", - }); - - const mockConsoleLog = vi.spyOn(core, "info"); - await getGitTopLevel(); - - expect(mockExecPromise).toHaveBeenCalledWith( - "git rev-parse --show-toplevel" - ); - expect(mockConsoleLog).toHaveBeenCalledWith( - "Top-level directory: /path/to/top-level-dir" - ); - }); - - it("should log an error message when git command fails", async () => { - mockExecPromise.mockRejectedValueOnce({ - message: "Command failed", - }); - - const mockConsoleError = vi.spyOn(core, "error"); - await getGitTopLevel().catch(() => {}); - - expect(mockExecPromise).toHaveBeenCalledWith( - "git rev-parse --show-toplevel" - ); - expect(mockConsoleError).toHaveBeenCalledWith( - "Error executing command: Command failed" - ); - }); - - it("should log an error message when git command output contains an error", async () => { - mockExecPromise.mockResolvedValueOnce({ - stdout: "", - stderr: "Error: Command failed", - }); - - const mockConsoleError = vi.spyOn(core, "error"); - await getGitTopLevel().catch(() => {}); - - expect(mockExecPromise).toHaveBeenCalledWith( - "git rev-parse --show-toplevel" - ); - expect(mockConsoleError).toHaveBeenCalledWith( - "Error in command output: Error: Command failed" - ); - }); -}); - -describe("generateJiraIssuesLink", () => { - it("should generate a Jira issues link", () => { - expect( - generateJiraIssuesLink( - "https://smartcontract-it.atlassian.net/issues/", - "review-artifacts-automation-base:0de9b3b-head:e5b3b9d" - ) - ).toMatchInlineSnapshot( - `"https://smartcontract-it.atlassian.net/issues/?jql=labels+%3D+%22review-artifacts-automation-base%3A0de9b3b-head%3Ae5b3b9d%22"` - ); - }); -}); - -describe("generateIssueLabel", () => { - it("should generate an issue label", () => { - const product = "automation"; - const baseRef = "0de9b3b"; - const headRef = "e5b3b9d"; - expect(generateIssueLabel(product, baseRef, headRef)).toMatchInlineSnapshot( - `"review-artifacts-automation-base:0de9b3b-head:e5b3b9d"` - ); - }); -}); diff --git a/.github/scripts/jira/lib.ts b/.github/scripts/jira/lib.ts deleted file mode 100644 index 8be295ab14f..00000000000 --- a/.github/scripts/jira/lib.ts +++ /dev/null @@ -1,147 +0,0 @@ -import { readFile } from "fs/promises"; -import * as core from "@actions/core"; -import * as jira from "jira.js"; -import { exec } from "child_process"; -import { promisify } from "util"; -import { join } from "path"; -import { isAxiosError } from "axios"; -import { formatAxiosError } from "./axios"; -export function generateJiraIssuesLink(baseUrl: string, label: string) { - // https://smartcontract-it.atlassian.net/issues/?jql=labels%20%3D%20%22review-artifacts-automation-base%3A8d818ea265ff08887e61ace4f83364a3ee149ef0-head%3A3c45b71f3610de28f429cef0163936eaa448e63c%22 - const jqlQuery = `labels = "${label}"`; - const fullUrl = new URL(baseUrl); - fullUrl.searchParams.set("jql", jqlQuery); - - const urlStr = fullUrl.toString(); - core.info(`Jira issues link: ${urlStr}`); - return urlStr; -} - -export function generateIssueLabel( - product: string, - baseRef: string, - headRef: string -) { - return `review-artifacts-${product}-base:${baseRef}-head:${headRef}`; -} - -export async function getGitTopLevel(): Promise { - const execPromise = promisify(exec); - try { - const { stdout, stderr } = await execPromise( - "git rev-parse --show-toplevel" - ); - - if (stderr) { - const msg = `Error in command output: ${stderr}`; - core.error(msg); - throw Error(msg); - } - - const topLevelDir = stdout.trim(); - core.info(`Top-level directory: ${topLevelDir}`); - return topLevelDir; - } catch (error) { - const msg = `Error executing command: ${(error as any).message}`; - core.error(msg); - throw Error(msg); - } -} - -/** - * Given a list of strings, this function will return the first JIRA issue number it finds. - * - * @example parseIssueNumberFrom("CORE-123", "CORE-456", "CORE-789") => "CORE-123" - * @example parseIssueNumberFrom("2f3df5gf", "chore/test-RE-78-branch", "RE-78 Create new test branches") => "RE-78" - */ -export function parseIssueNumberFrom( - ...inputs: (string | undefined)[] -): string | undefined { - function parse(str?: string) { - const jiraIssueRegex = /[A-Z]{2,}-\d+/; - - return str?.toUpperCase().match(jiraIssueRegex)?.[0]; - } - - core.debug(`Parsing issue number from: ${inputs.join(", ")}`); - const parsed: string[] = inputs.map(parse).filter((x) => x !== undefined); - core.debug(`Found issue number: ${parsed[0]}`); - - return parsed[0]; -} - -export async function extractJiraIssueNumbersFrom(filePaths: string[]) { - const issueNumbers: string[] = []; - const gitTopLevel = await getGitTopLevel(); - - for (const path of filePaths) { - const fullPath = join(gitTopLevel, path); - core.info(`Reading file: ${fullPath}`); - const content = await readFile(fullPath, "utf-8"); - const issueNumber = parseIssueNumberFrom(content); - core.info(`Extracted issue number: ${issueNumber}`); - if (issueNumber) { - issueNumbers.push(issueNumber); - } - } - - return issueNumbers; -} - -/** - * Converts an array of tags to an array of labels. - * - * A label is a string that is formatted as `core-release/{tag}`, with the leading `v` removed from the tag. - * - * @example tagsToLabels(["v1.0.0", "v1.1.0"]) => [{ add: "core-release/1.0.0" }, { add: "core-release/1.1.0" }] - */ -export function tagsToLabels(tags: string[]) { - const labelPrefix = "core-release"; - - return tags.map((t) => ({ - add: `${labelPrefix}/${t.substring(1)}`, - })); -} - -export function getJiraEnvVars() { - const jiraHost = process.env.JIRA_HOST; - const jiraUserName = process.env.JIRA_USERNAME; - const jiraApiToken = process.env.JIRA_API_TOKEN; - - if (!jiraHost || !jiraUserName || !jiraApiToken) { - core.setFailed( - "Error: Missing required environment variables: JIRA_HOST and JIRA_USERNAME and JIRA_API_TOKEN." - ); - process.exit(1); - } - - return { jiraHost, jiraUserName, jiraApiToken }; -} - -export function createJiraClient() { - const { jiraHost, jiraUserName, jiraApiToken } = getJiraEnvVars(); - return new jira.Version3Client({ - host: jiraHost, - authentication: { - basic: { - email: jiraUserName, - apiToken: jiraApiToken, - }, - }, - }); -} - -export function handleError(e: unknown) { - if (e instanceof Error) { - if (isAxiosError(e)) { - core.error(formatAxiosError(e)); - } else if (isAxiosError(e.cause)) { - core.error(formatAxiosError(e.cause)); - } else { - core.error(e); - } - } else { - core.error(JSON.stringify(e)); - } - core.setFailed("Error adding traceability to Jira issues"); -} diff --git a/.github/scripts/jira/package.json b/.github/scripts/jira/package.json deleted file mode 100644 index 6081e489818..00000000000 --- a/.github/scripts/jira/package.json +++ /dev/null @@ -1,34 +0,0 @@ -{ - "name": "jira", - "version": "0.1.0", - "description": "Updates Jira issue with release information like the version and tags for a PR.", - "main": "update-jira-issue.js", - "type": "module", - "private": true, - "keywords": [], - "author": "", - "license": "MIT", - "engines": { - "node": ">=18", - "pnpm": ">=9" - }, - "scripts": { - "issue:update": "tsx update-jira-issue.ts", - "issue:enforce": "tsx enforce-jira-issue.ts", - "issue:traceability": "tsx create-jira-traceability.ts", - "test": "vitest" - }, - "dependencies": { - "@actions/core": "^1.10.1", - "jira.js": "^4.0.1", - "tsx": "^4.16.2" - }, - "devDependencies": { - "@types/node": "^20.14.10", - "typescript": "^5.5.3", - "vitest": "^2.0.3" - }, - "peerDependencies": { - "axios": "^1.7.7" - } -} diff --git a/.github/scripts/jira/pnpm-lock.yaml b/.github/scripts/jira/pnpm-lock.yaml deleted file mode 100644 index a52fa9dd0c8..00000000000 --- a/.github/scripts/jira/pnpm-lock.yaml +++ /dev/null @@ -1,1114 +0,0 @@ -lockfileVersion: '9.0' - -settings: - autoInstallPeers: true - excludeLinksFromLockfile: false - -importers: - - .: - dependencies: - '@actions/core': - specifier: ^1.10.1 - version: 1.10.1 - axios: - specifier: ^1.7.7 - version: 1.7.7 - jira.js: - specifier: ^4.0.1 - version: 4.0.1 - tsx: - specifier: ^4.16.2 - version: 4.16.2 - devDependencies: - '@types/node': - specifier: ^20.14.10 - version: 20.14.10 - typescript: - specifier: ^5.5.3 - version: 5.5.3 - vitest: - specifier: ^2.0.3 - version: 2.0.3(@types/node@20.14.10) - -packages: - - '@actions/core@1.10.1': - resolution: {integrity: sha512-3lBR9EDAY+iYIpTnTIXmWcNbX3T2kCkAEQGIQx4NVQ0575nk2k3GRZDTPQG+vVtS2izSLmINlxXf0uLtnrTP+g==} - - '@actions/http-client@2.2.1': - resolution: {integrity: sha512-KhC/cZsq7f8I4LfZSJKgCvEwfkE8o1538VoBeoGzokVLLnbFDEAdFD3UhoMklxo2un9NJVBdANOresx7vTHlHw==} - - '@ampproject/remapping@2.3.0': - resolution: {integrity: sha512-30iZtAPgz+LTIYoeivqYo853f02jBYSd5uGnGpkFV0M3xOt9aN73erkgYAmZU43x4VfqcnLxW9Kpg3R5LC4YYw==} - engines: {node: '>=6.0.0'} - - '@esbuild/aix-ppc64@0.21.5': - resolution: {integrity: sha512-1SDgH6ZSPTlggy1yI6+Dbkiz8xzpHJEVAlF/AM1tHPLsf5STom9rwtjE4hKAF20FfXXNTFqEYXyJNWh1GiZedQ==} - engines: {node: '>=12'} - cpu: [ppc64] - os: [aix] - - '@esbuild/android-arm64@0.21.5': - resolution: {integrity: sha512-c0uX9VAUBQ7dTDCjq+wdyGLowMdtR/GoC2U5IYk/7D1H1JYC0qseD7+11iMP2mRLN9RcCMRcjC4YMclCzGwS/A==} - engines: {node: '>=12'} - cpu: [arm64] - os: [android] - - '@esbuild/android-arm@0.21.5': - resolution: {integrity: sha512-vCPvzSjpPHEi1siZdlvAlsPxXl7WbOVUBBAowWug4rJHb68Ox8KualB+1ocNvT5fjv6wpkX6o/iEpbDrf68zcg==} - engines: {node: '>=12'} - cpu: [arm] - os: [android] - - '@esbuild/android-x64@0.21.5': - resolution: {integrity: sha512-D7aPRUUNHRBwHxzxRvp856rjUHRFW1SdQATKXH2hqA0kAZb1hKmi02OpYRacl0TxIGz/ZmXWlbZgjwWYaCakTA==} - engines: {node: '>=12'} - cpu: [x64] - os: [android] - - '@esbuild/darwin-arm64@0.21.5': - resolution: {integrity: sha512-DwqXqZyuk5AiWWf3UfLiRDJ5EDd49zg6O9wclZ7kUMv2WRFr4HKjXp/5t8JZ11QbQfUS6/cRCKGwYhtNAY88kQ==} - engines: {node: '>=12'} - cpu: [arm64] - os: [darwin] - - '@esbuild/darwin-x64@0.21.5': - resolution: {integrity: sha512-se/JjF8NlmKVG4kNIuyWMV/22ZaerB+qaSi5MdrXtd6R08kvs2qCN4C09miupktDitvh8jRFflwGFBQcxZRjbw==} - engines: {node: '>=12'} - cpu: [x64] - os: [darwin] - - '@esbuild/freebsd-arm64@0.21.5': - resolution: {integrity: sha512-5JcRxxRDUJLX8JXp/wcBCy3pENnCgBR9bN6JsY4OmhfUtIHe3ZW0mawA7+RDAcMLrMIZaf03NlQiX9DGyB8h4g==} - engines: {node: '>=12'} - cpu: [arm64] - os: [freebsd] - - '@esbuild/freebsd-x64@0.21.5': - resolution: {integrity: sha512-J95kNBj1zkbMXtHVH29bBriQygMXqoVQOQYA+ISs0/2l3T9/kj42ow2mpqerRBxDJnmkUDCaQT/dfNXWX/ZZCQ==} - engines: {node: '>=12'} - cpu: [x64] - os: [freebsd] - - '@esbuild/linux-arm64@0.21.5': - resolution: {integrity: sha512-ibKvmyYzKsBeX8d8I7MH/TMfWDXBF3db4qM6sy+7re0YXya+K1cem3on9XgdT2EQGMu4hQyZhan7TeQ8XkGp4Q==} - engines: {node: '>=12'} - cpu: [arm64] - os: [linux] - - '@esbuild/linux-arm@0.21.5': - resolution: {integrity: sha512-bPb5AHZtbeNGjCKVZ9UGqGwo8EUu4cLq68E95A53KlxAPRmUyYv2D6F0uUI65XisGOL1hBP5mTronbgo+0bFcA==} - engines: {node: '>=12'} - cpu: [arm] - os: [linux] - - '@esbuild/linux-ia32@0.21.5': - resolution: {integrity: sha512-YvjXDqLRqPDl2dvRODYmmhz4rPeVKYvppfGYKSNGdyZkA01046pLWyRKKI3ax8fbJoK5QbxblURkwK/MWY18Tg==} - engines: {node: '>=12'} - cpu: [ia32] - os: [linux] - - '@esbuild/linux-loong64@0.21.5': - resolution: {integrity: sha512-uHf1BmMG8qEvzdrzAqg2SIG/02+4/DHB6a9Kbya0XDvwDEKCoC8ZRWI5JJvNdUjtciBGFQ5PuBlpEOXQj+JQSg==} - engines: {node: '>=12'} - cpu: [loong64] - os: [linux] - - '@esbuild/linux-mips64el@0.21.5': - resolution: {integrity: sha512-IajOmO+KJK23bj52dFSNCMsz1QP1DqM6cwLUv3W1QwyxkyIWecfafnI555fvSGqEKwjMXVLokcV5ygHW5b3Jbg==} - engines: {node: '>=12'} - cpu: [mips64el] - os: [linux] - - '@esbuild/linux-ppc64@0.21.5': - resolution: {integrity: sha512-1hHV/Z4OEfMwpLO8rp7CvlhBDnjsC3CttJXIhBi+5Aj5r+MBvy4egg7wCbe//hSsT+RvDAG7s81tAvpL2XAE4w==} - engines: {node: '>=12'} - cpu: [ppc64] - os: [linux] - - '@esbuild/linux-riscv64@0.21.5': - resolution: {integrity: sha512-2HdXDMd9GMgTGrPWnJzP2ALSokE/0O5HhTUvWIbD3YdjME8JwvSCnNGBnTThKGEB91OZhzrJ4qIIxk/SBmyDDA==} - engines: {node: '>=12'} - cpu: [riscv64] - os: [linux] - - '@esbuild/linux-s390x@0.21.5': - resolution: {integrity: sha512-zus5sxzqBJD3eXxwvjN1yQkRepANgxE9lgOW2qLnmr8ikMTphkjgXu1HR01K4FJg8h1kEEDAqDcZQtbrRnB41A==} - engines: {node: '>=12'} - cpu: [s390x] - os: [linux] - - '@esbuild/linux-x64@0.21.5': - resolution: {integrity: sha512-1rYdTpyv03iycF1+BhzrzQJCdOuAOtaqHTWJZCWvijKD2N5Xu0TtVC8/+1faWqcP9iBCWOmjmhoH94dH82BxPQ==} - engines: {node: '>=12'} - cpu: [x64] - os: [linux] - - '@esbuild/netbsd-x64@0.21.5': - resolution: {integrity: sha512-Woi2MXzXjMULccIwMnLciyZH4nCIMpWQAs049KEeMvOcNADVxo0UBIQPfSmxB3CWKedngg7sWZdLvLczpe0tLg==} - engines: {node: '>=12'} - cpu: [x64] - os: [netbsd] - - '@esbuild/openbsd-x64@0.21.5': - resolution: {integrity: sha512-HLNNw99xsvx12lFBUwoT8EVCsSvRNDVxNpjZ7bPn947b8gJPzeHWyNVhFsaerc0n3TsbOINvRP2byTZ5LKezow==} - engines: {node: '>=12'} - cpu: [x64] - os: [openbsd] - - '@esbuild/sunos-x64@0.21.5': - resolution: {integrity: sha512-6+gjmFpfy0BHU5Tpptkuh8+uw3mnrvgs+dSPQXQOv3ekbordwnzTVEb4qnIvQcYXq6gzkyTnoZ9dZG+D4garKg==} - engines: {node: '>=12'} - cpu: [x64] - os: [sunos] - - '@esbuild/win32-arm64@0.21.5': - resolution: {integrity: sha512-Z0gOTd75VvXqyq7nsl93zwahcTROgqvuAcYDUr+vOv8uHhNSKROyU961kgtCD1e95IqPKSQKH7tBTslnS3tA8A==} - engines: {node: '>=12'} - cpu: [arm64] - os: [win32] - - '@esbuild/win32-ia32@0.21.5': - resolution: {integrity: sha512-SWXFF1CL2RVNMaVs+BBClwtfZSvDgtL//G/smwAc5oVK/UPu2Gu9tIaRgFmYFFKrmg3SyAjSrElf0TiJ1v8fYA==} - engines: {node: '>=12'} - cpu: [ia32] - os: [win32] - - '@esbuild/win32-x64@0.21.5': - resolution: {integrity: sha512-tQd/1efJuzPC6rCFwEvLtci/xNFcTZknmXs98FYDfGE4wP9ClFV98nyKrzJKVPMhdDnjzLhdUyMX4PsQAPjwIw==} - engines: {node: '>=12'} - cpu: [x64] - os: [win32] - - '@fastify/busboy@2.1.1': - resolution: {integrity: sha512-vBZP4NlzfOlerQTnba4aqZoMhE/a9HY7HRqoOPaETQcSQuWEIyZMHGfVu6w9wGtGK5fED5qRs2DteVCjOH60sA==} - engines: {node: '>=14'} - - '@jridgewell/gen-mapping@0.3.5': - resolution: {integrity: sha512-IzL8ZoEDIBRWEzlCcRhOaCupYyN5gdIK+Q6fbFdPDg6HqX6jpkItn7DFIpW9LQzXG6Df9sA7+OKnq0qlz/GaQg==} - engines: {node: '>=6.0.0'} - - '@jridgewell/resolve-uri@3.1.2': - resolution: {integrity: sha512-bRISgCIjP20/tbWSPWMEi54QVPRZExkuD9lJL+UIxUKtwVJA8wW1Trb1jMs1RFXo1CBTNZ/5hpC9QvmKWdopKw==} - engines: {node: '>=6.0.0'} - - '@jridgewell/set-array@1.2.1': - resolution: {integrity: sha512-R8gLRTZeyp03ymzP/6Lil/28tGeGEzhx1q2k703KGWRAI1VdvPIXdG70VJc2pAMw3NA6JKL5hhFu1sJX0Mnn/A==} - engines: {node: '>=6.0.0'} - - '@jridgewell/sourcemap-codec@1.5.0': - resolution: {integrity: sha512-gv3ZRaISU3fjPAgNsriBRqGWQL6quFx04YMPW/zD8XMLsU32mhCCbfbO6KZFLjvYpCZ8zyDEgqsgf+PwPaM7GQ==} - - '@jridgewell/trace-mapping@0.3.25': - resolution: {integrity: sha512-vNk6aEwybGtawWmy/PzwnGDOjCkLWSD2wqvjGGAgOAwCGWySYXfYoxt00IJkTF+8Lb57DwOb3Aa0o9CApepiYQ==} - - '@rollup/rollup-android-arm-eabi@4.18.1': - resolution: {integrity: sha512-lncuC4aHicncmbORnx+dUaAgzee9cm/PbIqgWz1PpXuwc+sa1Ct83tnqUDy/GFKleLiN7ZIeytM6KJ4cAn1SxA==} - cpu: [arm] - os: [android] - - '@rollup/rollup-android-arm64@4.18.1': - resolution: {integrity: sha512-F/tkdw0WSs4ojqz5Ovrw5r9odqzFjb5LIgHdHZG65dFI1lWTWRVy32KDJLKRISHgJvqUeUhdIvy43fX41znyDg==} - cpu: [arm64] - os: [android] - - '@rollup/rollup-darwin-arm64@4.18.1': - resolution: {integrity: sha512-vk+ma8iC1ebje/ahpxpnrfVQJibTMyHdWpOGZ3JpQ7Mgn/3QNHmPq7YwjZbIE7km73dH5M1e6MRRsnEBW7v5CQ==} - cpu: [arm64] - os: [darwin] - - '@rollup/rollup-darwin-x64@4.18.1': - resolution: {integrity: sha512-IgpzXKauRe1Tafcej9STjSSuG0Ghu/xGYH+qG6JwsAUxXrnkvNHcq/NL6nz1+jzvWAnQkuAJ4uIwGB48K9OCGA==} - cpu: [x64] - os: [darwin] - - '@rollup/rollup-linux-arm-gnueabihf@4.18.1': - resolution: {integrity: sha512-P9bSiAUnSSM7EmyRK+e5wgpqai86QOSv8BwvkGjLwYuOpaeomiZWifEos517CwbG+aZl1T4clSE1YqqH2JRs+g==} - cpu: [arm] - os: [linux] - - '@rollup/rollup-linux-arm-musleabihf@4.18.1': - resolution: {integrity: sha512-5RnjpACoxtS+aWOI1dURKno11d7krfpGDEn19jI8BuWmSBbUC4ytIADfROM1FZrFhQPSoP+KEa3NlEScznBTyQ==} - cpu: [arm] - os: [linux] - - '@rollup/rollup-linux-arm64-gnu@4.18.1': - resolution: {integrity: sha512-8mwmGD668m8WaGbthrEYZ9CBmPug2QPGWxhJxh/vCgBjro5o96gL04WLlg5BA233OCWLqERy4YUzX3bJGXaJgQ==} - cpu: [arm64] - os: [linux] - - '@rollup/rollup-linux-arm64-musl@4.18.1': - resolution: {integrity: sha512-dJX9u4r4bqInMGOAQoGYdwDP8lQiisWb9et+T84l2WXk41yEej8v2iGKodmdKimT8cTAYt0jFb+UEBxnPkbXEQ==} - cpu: [arm64] - os: [linux] - - '@rollup/rollup-linux-powerpc64le-gnu@4.18.1': - resolution: {integrity: sha512-V72cXdTl4EI0x6FNmho4D502sy7ed+LuVW6Ym8aI6DRQ9hQZdp5sj0a2usYOlqvFBNKQnLQGwmYnujo2HvjCxQ==} - cpu: [ppc64] - os: [linux] - - '@rollup/rollup-linux-riscv64-gnu@4.18.1': - resolution: {integrity: sha512-f+pJih7sxoKmbjghrM2RkWo2WHUW8UbfxIQiWo5yeCaCM0TveMEuAzKJte4QskBp1TIinpnRcxkquY+4WuY/tg==} - cpu: [riscv64] - os: [linux] - - '@rollup/rollup-linux-s390x-gnu@4.18.1': - resolution: {integrity: sha512-qb1hMMT3Fr/Qz1OKovCuUM11MUNLUuHeBC2DPPAWUYYUAOFWaxInaTwTQmc7Fl5La7DShTEpmYwgdt2hG+4TEg==} - cpu: [s390x] - os: [linux] - - '@rollup/rollup-linux-x64-gnu@4.18.1': - resolution: {integrity: sha512-7O5u/p6oKUFYjRbZkL2FLbwsyoJAjyeXHCU3O4ndvzg2OFO2GinFPSJFGbiwFDaCFc+k7gs9CF243PwdPQFh5g==} - cpu: [x64] - os: [linux] - - '@rollup/rollup-linux-x64-musl@4.18.1': - resolution: {integrity: sha512-pDLkYITdYrH/9Cv/Vlj8HppDuLMDUBmgsM0+N+xLtFd18aXgM9Nyqupb/Uw+HeidhfYg2lD6CXvz6CjoVOaKjQ==} - cpu: [x64] - os: [linux] - - '@rollup/rollup-win32-arm64-msvc@4.18.1': - resolution: {integrity: sha512-W2ZNI323O/8pJdBGil1oCauuCzmVd9lDmWBBqxYZcOqWD6aWqJtVBQ1dFrF4dYpZPks6F+xCZHfzG5hYlSHZ6g==} - cpu: [arm64] - os: [win32] - - '@rollup/rollup-win32-ia32-msvc@4.18.1': - resolution: {integrity: sha512-ELfEX1/+eGZYMaCIbK4jqLxO1gyTSOIlZr6pbC4SRYFaSIDVKOnZNMdoZ+ON0mrFDp4+H5MhwNC1H/AhE3zQLg==} - cpu: [ia32] - os: [win32] - - '@rollup/rollup-win32-x64-msvc@4.18.1': - resolution: {integrity: sha512-yjk2MAkQmoaPYCSu35RLJ62+dz358nE83VfTePJRp8CG7aMg25mEJYpXFiD+NcevhX8LxD5OP5tktPXnXN7GDw==} - cpu: [x64] - os: [win32] - - '@types/estree@1.0.5': - resolution: {integrity: sha512-/kYRxGDLWzHOB7q+wtSUQlFrtcdUccpfy+X+9iMBpHK8QLLhx2wIPYuS5DYtR9Wa/YlZAbIovy7qVdB1Aq6Lyw==} - - '@types/node@20.14.10': - resolution: {integrity: sha512-MdiXf+nDuMvY0gJKxyfZ7/6UFsETO7mGKF54MVD/ekJS6HdFtpZFBgrh6Pseu64XTb2MLyFPlbW6hj8HYRQNOQ==} - - '@vitest/expect@2.0.3': - resolution: {integrity: sha512-X6AepoOYePM0lDNUPsGXTxgXZAl3EXd0GYe/MZyVE4HzkUqyUVC6S3PrY5mClDJ6/7/7vALLMV3+xD/Ko60Hqg==} - - '@vitest/pretty-format@2.0.3': - resolution: {integrity: sha512-URM4GLsB2xD37nnTyvf6kfObFafxmycCL8un3OC9gaCs5cti2u+5rJdIflZ2fUJUen4NbvF6jCufwViAFLvz1g==} - - '@vitest/runner@2.0.3': - resolution: {integrity: sha512-EmSP4mcjYhAcuBWwqgpjR3FYVeiA4ROzRunqKltWjBfLNs1tnMLtF+qtgd5ClTwkDP6/DGlKJTNa6WxNK0bNYQ==} - - '@vitest/snapshot@2.0.3': - resolution: {integrity: sha512-6OyA6v65Oe3tTzoSuRPcU6kh9m+mPL1vQ2jDlPdn9IQoUxl8rXhBnfICNOC+vwxWY684Vt5UPgtcA2aPFBb6wg==} - - '@vitest/spy@2.0.3': - resolution: {integrity: sha512-sfqyAw/ypOXlaj4S+w8689qKM1OyPOqnonqOc9T91DsoHbfN5mU7FdifWWv3MtQFf0lEUstEwR9L/q/M390C+A==} - - '@vitest/utils@2.0.3': - resolution: {integrity: sha512-c/UdELMuHitQbbc/EVctlBaxoYAwQPQdSNwv7z/vHyBKy2edYZaFgptE27BRueZB7eW8po+cllotMNTDpL3HWg==} - - assertion-error@2.0.1: - resolution: {integrity: sha512-Izi8RQcffqCeNVgFigKli1ssklIbpHnCYc6AknXGYoB6grJqyeby7jv12JUQgmTAnIDnbck1uxksT4dzN3PWBA==} - engines: {node: '>=12'} - - asynckit@0.4.0: - resolution: {integrity: sha512-Oei9OH4tRh0YqU3GxhX79dM/mwVgvbZJaSNaRk+bshkj0S5cfHcgYakreBjrHwatXKbz+IoIdYLxrKim2MjW0Q==} - - axios@1.7.7: - resolution: {integrity: sha512-S4kL7XrjgBmvdGut0sN3yJxqYzrDOnivkBiN0OFs6hLiUam3UPvswUo0kqGyhqUZGEOytHyumEdXsAkgCOUf3Q==} - - cac@6.7.14: - resolution: {integrity: sha512-b6Ilus+c3RrdDk+JhLKUAQfzzgLEPy6wcXqS7f/xe1EETvsDP6GORG7SFuOs6cID5YkqchW/LXZbX5bc8j7ZcQ==} - engines: {node: '>=8'} - - chai@5.1.1: - resolution: {integrity: sha512-pT1ZgP8rPNqUgieVaEY+ryQr6Q4HXNg8Ei9UnLUrjN4IA7dvQC5JB+/kxVcPNDHyBcc/26CXPkbNzq3qwrOEKA==} - engines: {node: '>=12'} - - check-error@2.1.1: - resolution: {integrity: sha512-OAlb+T7V4Op9OwdkjmguYRqncdlx5JiofwOAUkmTF+jNdHwzTaTs4sRAGpzLF3oOz5xAyDGrPgeIDFQmDOTiJw==} - engines: {node: '>= 16'} - - combined-stream@1.0.8: - resolution: {integrity: sha512-FQN4MRfuJeHf7cBbBMJFXhKSDq+2kAArBlmRBvcvFE5BB1HZKXtSFASDhdlz9zOYwxh8lDdnvmMOe/+5cdoEdg==} - engines: {node: '>= 0.8'} - - cross-spawn@7.0.3: - resolution: {integrity: sha512-iRDPJKUPVEND7dHPO8rkbOnPpyDygcDFtWjpeWNCgy8WP2rXcxXL8TskReQl6OrB2G7+UJrags1q15Fudc7G6w==} - engines: {node: '>= 8'} - - debug@4.3.5: - resolution: {integrity: sha512-pt0bNEmneDIvdL1Xsd9oDQ/wrQRkXDT4AUWlNZNPKvW5x/jyO9VFXkJUP07vQ2upmw5PlaITaPKc31jK13V+jg==} - engines: {node: '>=6.0'} - peerDependencies: - supports-color: '*' - peerDependenciesMeta: - supports-color: - optional: true - - deep-eql@5.0.2: - resolution: {integrity: sha512-h5k/5U50IJJFpzfL6nO9jaaumfjO/f2NjK/oYB2Djzm4p9L+3T9qWpZqZ2hAbLPuuYq9wrU08WQyBTL5GbPk5Q==} - engines: {node: '>=6'} - - delayed-stream@1.0.0: - resolution: {integrity: sha512-ZySD7Nf91aLB0RxL4KGrKHBXl7Eds1DAmEdcoVawXnLD7SDhpNgtuII2aAkg7a7QS41jxPSZ17p4VdGnMHk3MQ==} - engines: {node: '>=0.4.0'} - - esbuild@0.21.5: - resolution: {integrity: sha512-mg3OPMV4hXywwpoDxu3Qda5xCKQi+vCTZq8S9J/EpkhB2HzKXq4SNFZE3+NK93JYxc8VMSep+lOUSC/RVKaBqw==} - engines: {node: '>=12'} - hasBin: true - - estree-walker@3.0.3: - resolution: {integrity: sha512-7RUKfXgSMMkzt6ZuXmqapOurLGPPfgj6l9uRZ7lRGolvk0y2yocc35LdcxKC5PQZdn2DMqioAQ2NoWcrTKmm6g==} - - execa@8.0.1: - resolution: {integrity: sha512-VyhnebXciFV2DESc+p6B+y0LjSm0krU4OgJN44qFAhBY0TJ+1V61tYD2+wHusZ6F9n5K+vl8k0sTy7PEfV4qpg==} - engines: {node: '>=16.17'} - - follow-redirects@1.15.6: - resolution: {integrity: sha512-wWN62YITEaOpSK584EZXJafH1AGpO8RVgElfkuXbTOrPX4fIfOyEpW/CsiNd8JdYrAoOvafRTOEnvsO++qCqFA==} - engines: {node: '>=4.0'} - peerDependencies: - debug: '*' - peerDependenciesMeta: - debug: - optional: true - - form-data@4.0.0: - resolution: {integrity: sha512-ETEklSGi5t0QMZuiXoA/Q6vcnxcLQP5vdugSpuAyi6SVGi2clPPp+xgEhuMaHC+zGgn31Kd235W35f7Hykkaww==} - engines: {node: '>= 6'} - - fsevents@2.3.3: - resolution: {integrity: sha512-5xoDfX+fL7faATnagmWPpbFtwh/R77WmMMqqHGS65C3vvB0YHrgF+B1YmZ3441tMj5n63k0212XNoJwzlhffQw==} - engines: {node: ^8.16.0 || ^10.6.0 || >=11.0.0} - os: [darwin] - - get-func-name@2.0.2: - resolution: {integrity: sha512-8vXOvuE167CtIc3OyItco7N/dpRtBbYOsPsXCz7X/PMnlGjYjSGuZJgM1Y7mmew7BKf9BqvLX2tnOVy1BBUsxQ==} - - get-stream@8.0.1: - resolution: {integrity: sha512-VaUJspBffn/LMCJVoMvSAdmscJyS1auj5Zulnn5UoYcY531UWmdwhRWkcGKnGU93m5HSXP9LP2usOryrBtQowA==} - engines: {node: '>=16'} - - get-tsconfig@4.7.5: - resolution: {integrity: sha512-ZCuZCnlqNzjb4QprAzXKdpp/gh6KTxSJuw3IBsPnV/7fV4NxC9ckB+vPTt8w7fJA0TaSD7c55BR47JD6MEDyDw==} - - human-signals@5.0.0: - resolution: {integrity: sha512-AXcZb6vzzrFAUE61HnN4mpLqd/cSIwNQjtNWR0euPm6y0iqx3G4gOXaIDdtdDwZmhwe82LA6+zinmW4UBWVePQ==} - engines: {node: '>=16.17.0'} - - is-stream@3.0.0: - resolution: {integrity: sha512-LnQR4bZ9IADDRSkvpqMGvt/tEJWclzklNgSw48V5EAaAeDd6qGvN8ei6k5p0tvxSR171VmGyHuTiAOfxAbr8kA==} - engines: {node: ^12.20.0 || ^14.13.1 || >=16.0.0} - - isexe@2.0.0: - resolution: {integrity: sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw==} - - jira.js@4.0.1: - resolution: {integrity: sha512-2zf8LozW9rgx5wgTdGSJMhUXDK1g8a/ngm1xDWnREX/h8kuBhNkMro4XELA2XRVvaNTbRMIK3PBgOvWFDddhIw==} - - loupe@3.1.1: - resolution: {integrity: sha512-edNu/8D5MKVfGVFRhFf8aAxiTM6Wumfz5XsaatSxlD3w4R1d/WEKUTydCdPGbl9K7QG/Ca3GnDV2sIKIpXRQcw==} - - magic-string@0.30.10: - resolution: {integrity: sha512-iIRwTIf0QKV3UAnYK4PU8uiEc4SRh5jX0mwpIwETPpHdhVM4f53RSwS/vXvN1JhGX+Cs7B8qIq3d6AH49O5fAQ==} - - merge-stream@2.0.0: - resolution: {integrity: sha512-abv/qOcuPfk3URPfDzmZU1LKmuw8kT+0nIHvKrKgFrwifol/doWcdA4ZqsWQ8ENrFKkd67Mfpo/LovbIUsbt3w==} - - mime-db@1.52.0: - resolution: {integrity: sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg==} - engines: {node: '>= 0.6'} - - mime-types@2.1.35: - resolution: {integrity: sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw==} - engines: {node: '>= 0.6'} - - mimic-fn@4.0.0: - resolution: {integrity: sha512-vqiC06CuhBTUdZH+RYl8sFrL096vA45Ok5ISO6sE/Mr1jRbGH4Csnhi8f3wKVl7x8mO4Au7Ir9D3Oyv1VYMFJw==} - engines: {node: '>=12'} - - ms@2.1.2: - resolution: {integrity: sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==} - - nanoid@3.3.7: - resolution: {integrity: sha512-eSRppjcPIatRIMC1U6UngP8XFcz8MQWGQdt1MTBQ7NaAmvXDfvNxbvWV3x2y6CdEUciCSsDHDQZbhYaB8QEo2g==} - engines: {node: ^10 || ^12 || ^13.7 || ^14 || >=15.0.1} - hasBin: true - - npm-run-path@5.3.0: - resolution: {integrity: sha512-ppwTtiJZq0O/ai0z7yfudtBpWIoxM8yE6nHi1X47eFR2EWORqfbu6CnPlNsjeN683eT0qG6H/Pyf9fCcvjnnnQ==} - engines: {node: ^12.20.0 || ^14.13.1 || >=16.0.0} - - onetime@6.0.0: - resolution: {integrity: sha512-1FlR+gjXK7X+AsAHso35MnyN5KqGwJRi/31ft6x0M194ht7S+rWAvd7PHss9xSKMzE0asv1pyIHaJYq+BbacAQ==} - engines: {node: '>=12'} - - path-key@3.1.1: - resolution: {integrity: sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q==} - engines: {node: '>=8'} - - path-key@4.0.0: - resolution: {integrity: sha512-haREypq7xkM7ErfgIyA0z+Bj4AGKlMSdlQE2jvJo6huWD1EdkKYV+G/T4nq0YEF2vgTT8kqMFKo1uHn950r4SQ==} - engines: {node: '>=12'} - - pathe@1.1.2: - resolution: {integrity: sha512-whLdWMYL2TwI08hn8/ZqAbrVemu0LNaNNJZX73O6qaIdCTfXutsLhMkjdENX0qhsQ9uIimo4/aQOmXkoon2nDQ==} - - pathval@2.0.0: - resolution: {integrity: sha512-vE7JKRyES09KiunauX7nd2Q9/L7lhok4smP9RZTDeD4MVs72Dp2qNFVz39Nz5a0FVEW0BJR6C0DYrq6unoziZA==} - engines: {node: '>= 14.16'} - - picocolors@1.0.1: - resolution: {integrity: sha512-anP1Z8qwhkbmu7MFP5iTt+wQKXgwzf7zTyGlcdzabySa9vd0Xt392U0rVmz9poOaBj0uHJKyyo9/upk0HrEQew==} - - postcss@8.4.39: - resolution: {integrity: sha512-0vzE+lAiG7hZl1/9I8yzKLx3aR9Xbof3fBHKunvMfOCYAtMhrsnccJY2iTURb9EZd5+pLuiNV9/c/GZJOHsgIw==} - engines: {node: ^10 || ^12 || >=14} - - proxy-from-env@1.1.0: - resolution: {integrity: sha512-D+zkORCbA9f1tdWRK0RaCR3GPv50cMxcrz4X8k5LTSUD1Dkw47mKJEZQNunItRTkWwgtaUSo1RVFRIG9ZXiFYg==} - - resolve-pkg-maps@1.0.0: - resolution: {integrity: sha512-seS2Tj26TBVOC2NIc2rOe2y2ZO7efxITtLZcGSOnHHNOQ7CkiUBfw0Iw2ck6xkIhPwLhKNLS8BO+hEpngQlqzw==} - - rollup@4.18.1: - resolution: {integrity: sha512-Elx2UT8lzxxOXMpy5HWQGZqkrQOtrVDDa/bm9l10+U4rQnVzbL/LgZ4NOM1MPIDyHk69W4InuYDF5dzRh4Kw1A==} - engines: {node: '>=18.0.0', npm: '>=8.0.0'} - hasBin: true - - shebang-command@2.0.0: - resolution: {integrity: sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA==} - engines: {node: '>=8'} - - shebang-regex@3.0.0: - resolution: {integrity: sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A==} - engines: {node: '>=8'} - - siginfo@2.0.0: - resolution: {integrity: sha512-ybx0WO1/8bSBLEWXZvEd7gMW3Sn3JFlW3TvX1nREbDLRNQNaeNN8WK0meBwPdAaOI7TtRRRJn/Es1zhrrCHu7g==} - - signal-exit@4.1.0: - resolution: {integrity: sha512-bzyZ1e88w9O1iNJbKnOlvYTrWPDl46O1bG0D3XInv+9tkPrxrN8jUUTiFlDkkmKWgn1M6CfIA13SuGqOa9Korw==} - engines: {node: '>=14'} - - source-map-js@1.2.0: - resolution: {integrity: sha512-itJW8lvSA0TXEphiRoawsCksnlf8SyvmFzIhltqAHluXd88pkCd+cXJVHTDwdCr0IzwptSm035IHQktUu1QUMg==} - engines: {node: '>=0.10.0'} - - stackback@0.0.2: - resolution: {integrity: sha512-1XMJE5fQo1jGH6Y/7ebnwPOBEkIEnT4QF32d5R1+VXdXveM0IBMJt8zfaxX1P3QhVwrYe+576+jkANtSS2mBbw==} - - std-env@3.7.0: - resolution: {integrity: sha512-JPbdCEQLj1w5GilpiHAx3qJvFndqybBysA3qUOnznweH4QbNYUsW/ea8QzSrnh0vNsezMMw5bcVool8lM0gwzg==} - - strip-final-newline@3.0.0: - resolution: {integrity: sha512-dOESqjYr96iWYylGObzd39EuNTa5VJxyvVAEm5Jnh7KGo75V43Hk1odPQkNDyXNmUR6k+gEiDVXnjB8HJ3crXw==} - engines: {node: '>=12'} - - tinybench@2.8.0: - resolution: {integrity: sha512-1/eK7zUnIklz4JUUlL+658n58XO2hHLQfSk1Zf2LKieUjxidN16eKFEoDEfjHc3ohofSSqK3X5yO6VGb6iW8Lw==} - - tinypool@1.0.0: - resolution: {integrity: sha512-KIKExllK7jp3uvrNtvRBYBWBOAXSX8ZvoaD8T+7KB/QHIuoJW3Pmr60zucywjAlMb5TeXUkcs/MWeWLu0qvuAQ==} - engines: {node: ^18.0.0 || >=20.0.0} - - tinyrainbow@1.2.0: - resolution: {integrity: sha512-weEDEq7Z5eTHPDh4xjX789+fHfF+P8boiFB+0vbWzpbnbsEr/GRaohi/uMKxg8RZMXnl1ItAi/IUHWMsjDV7kQ==} - engines: {node: '>=14.0.0'} - - tinyspy@3.0.0: - resolution: {integrity: sha512-q5nmENpTHgiPVd1cJDDc9cVoYN5x4vCvwT3FMilvKPKneCBZAxn2YWQjDF0UMcE9k0Cay1gBiDfTMU0g+mPMQA==} - engines: {node: '>=14.0.0'} - - tslib@2.6.3: - resolution: {integrity: sha512-xNvxJEOUiWPGhUuUdQgAJPKOOJfGnIyKySOc09XkKsgdUV/3E2zvwZYdejjmRgPCgcym1juLH3226yA7sEFJKQ==} - - tsx@4.16.2: - resolution: {integrity: sha512-C1uWweJDgdtX2x600HjaFaucXTilT7tgUZHbOE4+ypskZ1OP8CRCSDkCxG6Vya9EwaFIVagWwpaVAn5wzypaqQ==} - engines: {node: '>=18.0.0'} - hasBin: true - - tunnel@0.0.6: - resolution: {integrity: sha512-1h/Lnq9yajKY2PEbBadPXj3VxsDDu844OnaAo52UVmIzIvwwtBPIuNvkjuzBlTWpfJyUbG3ez0KSBibQkj4ojg==} - engines: {node: '>=0.6.11 <=0.7.0 || >=0.7.3'} - - typescript@5.5.3: - resolution: {integrity: sha512-/hreyEujaB0w76zKo6717l3L0o/qEUtRgdvUBvlkhoWeOVMjMuHNHk0BRBzikzuGDqNmPQbg5ifMEqsHLiIUcQ==} - engines: {node: '>=14.17'} - hasBin: true - - undici-types@5.26.5: - resolution: {integrity: sha512-JlCMO+ehdEIKqlFxk6IfVoAUVmgz7cU7zD/h9XZ0qzeosSHmUJVOzSQvvYSYWXkFXC+IfLKSIffhv0sVZup6pA==} - - undici@5.28.4: - resolution: {integrity: sha512-72RFADWFqKmUb2hmmvNODKL3p9hcB6Gt2DOQMis1SEBaV6a4MH8soBvzg+95CYhCKPFedut2JY9bMfrDl9D23g==} - engines: {node: '>=14.0'} - - uuid@8.3.2: - resolution: {integrity: sha512-+NYs2QeMWy+GWFOEm9xnn6HCDp0l7QBD7ml8zLUmJ+93Q5NF0NocErnwkTkXVFNiX3/fpC6afS8Dhb/gz7R7eg==} - hasBin: true - - vite-node@2.0.3: - resolution: {integrity: sha512-14jzwMx7XTcMB+9BhGQyoEAmSl0eOr3nrnn+Z12WNERtOvLN+d2scbRUvyni05rT3997Bg+rZb47NyP4IQPKXg==} - engines: {node: ^18.0.0 || >=20.0.0} - hasBin: true - - vite@5.3.3: - resolution: {integrity: sha512-NPQdeCU0Dv2z5fu+ULotpuq5yfCS1BzKUIPhNbP3YBfAMGJXbt2nS+sbTFu+qchaqWTD+H3JK++nRwr6XIcp6A==} - engines: {node: ^18.0.0 || >=20.0.0} - hasBin: true - peerDependencies: - '@types/node': ^18.0.0 || >=20.0.0 - less: '*' - lightningcss: ^1.21.0 - sass: '*' - stylus: '*' - sugarss: '*' - terser: ^5.4.0 - peerDependenciesMeta: - '@types/node': - optional: true - less: - optional: true - lightningcss: - optional: true - sass: - optional: true - stylus: - optional: true - sugarss: - optional: true - terser: - optional: true - - vitest@2.0.3: - resolution: {integrity: sha512-o3HRvU93q6qZK4rI2JrhKyZMMuxg/JRt30E6qeQs6ueaiz5hr1cPj+Sk2kATgQzMMqsa2DiNI0TIK++1ULx8Jw==} - engines: {node: ^18.0.0 || >=20.0.0} - hasBin: true - peerDependencies: - '@edge-runtime/vm': '*' - '@types/node': ^18.0.0 || >=20.0.0 - '@vitest/browser': 2.0.3 - '@vitest/ui': 2.0.3 - happy-dom: '*' - jsdom: '*' - peerDependenciesMeta: - '@edge-runtime/vm': - optional: true - '@types/node': - optional: true - '@vitest/browser': - optional: true - '@vitest/ui': - optional: true - happy-dom: - optional: true - jsdom: - optional: true - - which@2.0.2: - resolution: {integrity: sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA==} - engines: {node: '>= 8'} - hasBin: true - - why-is-node-running@2.3.0: - resolution: {integrity: sha512-hUrmaWBdVDcxvYqnyh09zunKzROWjbZTiNy8dBEjkS7ehEDQibXJ7XvlmtbwuTclUiIyN+CyXQD4Vmko8fNm8w==} - engines: {node: '>=8'} - hasBin: true - -snapshots: - - '@actions/core@1.10.1': - dependencies: - '@actions/http-client': 2.2.1 - uuid: 8.3.2 - - '@actions/http-client@2.2.1': - dependencies: - tunnel: 0.0.6 - undici: 5.28.4 - - '@ampproject/remapping@2.3.0': - dependencies: - '@jridgewell/gen-mapping': 0.3.5 - '@jridgewell/trace-mapping': 0.3.25 - - '@esbuild/aix-ppc64@0.21.5': - optional: true - - '@esbuild/android-arm64@0.21.5': - optional: true - - '@esbuild/android-arm@0.21.5': - optional: true - - '@esbuild/android-x64@0.21.5': - optional: true - - '@esbuild/darwin-arm64@0.21.5': - optional: true - - '@esbuild/darwin-x64@0.21.5': - optional: true - - '@esbuild/freebsd-arm64@0.21.5': - optional: true - - '@esbuild/freebsd-x64@0.21.5': - optional: true - - '@esbuild/linux-arm64@0.21.5': - optional: true - - '@esbuild/linux-arm@0.21.5': - optional: true - - '@esbuild/linux-ia32@0.21.5': - optional: true - - '@esbuild/linux-loong64@0.21.5': - optional: true - - '@esbuild/linux-mips64el@0.21.5': - optional: true - - '@esbuild/linux-ppc64@0.21.5': - optional: true - - '@esbuild/linux-riscv64@0.21.5': - optional: true - - '@esbuild/linux-s390x@0.21.5': - optional: true - - '@esbuild/linux-x64@0.21.5': - optional: true - - '@esbuild/netbsd-x64@0.21.5': - optional: true - - '@esbuild/openbsd-x64@0.21.5': - optional: true - - '@esbuild/sunos-x64@0.21.5': - optional: true - - '@esbuild/win32-arm64@0.21.5': - optional: true - - '@esbuild/win32-ia32@0.21.5': - optional: true - - '@esbuild/win32-x64@0.21.5': - optional: true - - '@fastify/busboy@2.1.1': {} - - '@jridgewell/gen-mapping@0.3.5': - dependencies: - '@jridgewell/set-array': 1.2.1 - '@jridgewell/sourcemap-codec': 1.5.0 - '@jridgewell/trace-mapping': 0.3.25 - - '@jridgewell/resolve-uri@3.1.2': {} - - '@jridgewell/set-array@1.2.1': {} - - '@jridgewell/sourcemap-codec@1.5.0': {} - - '@jridgewell/trace-mapping@0.3.25': - dependencies: - '@jridgewell/resolve-uri': 3.1.2 - '@jridgewell/sourcemap-codec': 1.5.0 - - '@rollup/rollup-android-arm-eabi@4.18.1': - optional: true - - '@rollup/rollup-android-arm64@4.18.1': - optional: true - - '@rollup/rollup-darwin-arm64@4.18.1': - optional: true - - '@rollup/rollup-darwin-x64@4.18.1': - optional: true - - '@rollup/rollup-linux-arm-gnueabihf@4.18.1': - optional: true - - '@rollup/rollup-linux-arm-musleabihf@4.18.1': - optional: true - - '@rollup/rollup-linux-arm64-gnu@4.18.1': - optional: true - - '@rollup/rollup-linux-arm64-musl@4.18.1': - optional: true - - '@rollup/rollup-linux-powerpc64le-gnu@4.18.1': - optional: true - - '@rollup/rollup-linux-riscv64-gnu@4.18.1': - optional: true - - '@rollup/rollup-linux-s390x-gnu@4.18.1': - optional: true - - '@rollup/rollup-linux-x64-gnu@4.18.1': - optional: true - - '@rollup/rollup-linux-x64-musl@4.18.1': - optional: true - - '@rollup/rollup-win32-arm64-msvc@4.18.1': - optional: true - - '@rollup/rollup-win32-ia32-msvc@4.18.1': - optional: true - - '@rollup/rollup-win32-x64-msvc@4.18.1': - optional: true - - '@types/estree@1.0.5': {} - - '@types/node@20.14.10': - dependencies: - undici-types: 5.26.5 - - '@vitest/expect@2.0.3': - dependencies: - '@vitest/spy': 2.0.3 - '@vitest/utils': 2.0.3 - chai: 5.1.1 - tinyrainbow: 1.2.0 - - '@vitest/pretty-format@2.0.3': - dependencies: - tinyrainbow: 1.2.0 - - '@vitest/runner@2.0.3': - dependencies: - '@vitest/utils': 2.0.3 - pathe: 1.1.2 - - '@vitest/snapshot@2.0.3': - dependencies: - '@vitest/pretty-format': 2.0.3 - magic-string: 0.30.10 - pathe: 1.1.2 - - '@vitest/spy@2.0.3': - dependencies: - tinyspy: 3.0.0 - - '@vitest/utils@2.0.3': - dependencies: - '@vitest/pretty-format': 2.0.3 - estree-walker: 3.0.3 - loupe: 3.1.1 - tinyrainbow: 1.2.0 - - assertion-error@2.0.1: {} - - asynckit@0.4.0: {} - - axios@1.7.7: - dependencies: - follow-redirects: 1.15.6 - form-data: 4.0.0 - proxy-from-env: 1.1.0 - transitivePeerDependencies: - - debug - - cac@6.7.14: {} - - chai@5.1.1: - dependencies: - assertion-error: 2.0.1 - check-error: 2.1.1 - deep-eql: 5.0.2 - loupe: 3.1.1 - pathval: 2.0.0 - - check-error@2.1.1: {} - - combined-stream@1.0.8: - dependencies: - delayed-stream: 1.0.0 - - cross-spawn@7.0.3: - dependencies: - path-key: 3.1.1 - shebang-command: 2.0.0 - which: 2.0.2 - - debug@4.3.5: - dependencies: - ms: 2.1.2 - - deep-eql@5.0.2: {} - - delayed-stream@1.0.0: {} - - esbuild@0.21.5: - optionalDependencies: - '@esbuild/aix-ppc64': 0.21.5 - '@esbuild/android-arm': 0.21.5 - '@esbuild/android-arm64': 0.21.5 - '@esbuild/android-x64': 0.21.5 - '@esbuild/darwin-arm64': 0.21.5 - '@esbuild/darwin-x64': 0.21.5 - '@esbuild/freebsd-arm64': 0.21.5 - '@esbuild/freebsd-x64': 0.21.5 - '@esbuild/linux-arm': 0.21.5 - '@esbuild/linux-arm64': 0.21.5 - '@esbuild/linux-ia32': 0.21.5 - '@esbuild/linux-loong64': 0.21.5 - '@esbuild/linux-mips64el': 0.21.5 - '@esbuild/linux-ppc64': 0.21.5 - '@esbuild/linux-riscv64': 0.21.5 - '@esbuild/linux-s390x': 0.21.5 - '@esbuild/linux-x64': 0.21.5 - '@esbuild/netbsd-x64': 0.21.5 - '@esbuild/openbsd-x64': 0.21.5 - '@esbuild/sunos-x64': 0.21.5 - '@esbuild/win32-arm64': 0.21.5 - '@esbuild/win32-ia32': 0.21.5 - '@esbuild/win32-x64': 0.21.5 - - estree-walker@3.0.3: - dependencies: - '@types/estree': 1.0.5 - - execa@8.0.1: - dependencies: - cross-spawn: 7.0.3 - get-stream: 8.0.1 - human-signals: 5.0.0 - is-stream: 3.0.0 - merge-stream: 2.0.0 - npm-run-path: 5.3.0 - onetime: 6.0.0 - signal-exit: 4.1.0 - strip-final-newline: 3.0.0 - - follow-redirects@1.15.6: {} - - form-data@4.0.0: - dependencies: - asynckit: 0.4.0 - combined-stream: 1.0.8 - mime-types: 2.1.35 - - fsevents@2.3.3: - optional: true - - get-func-name@2.0.2: {} - - get-stream@8.0.1: {} - - get-tsconfig@4.7.5: - dependencies: - resolve-pkg-maps: 1.0.0 - - human-signals@5.0.0: {} - - is-stream@3.0.0: {} - - isexe@2.0.0: {} - - jira.js@4.0.1: - dependencies: - axios: 1.7.7 - form-data: 4.0.0 - tslib: 2.6.3 - transitivePeerDependencies: - - debug - - loupe@3.1.1: - dependencies: - get-func-name: 2.0.2 - - magic-string@0.30.10: - dependencies: - '@jridgewell/sourcemap-codec': 1.5.0 - - merge-stream@2.0.0: {} - - mime-db@1.52.0: {} - - mime-types@2.1.35: - dependencies: - mime-db: 1.52.0 - - mimic-fn@4.0.0: {} - - ms@2.1.2: {} - - nanoid@3.3.7: {} - - npm-run-path@5.3.0: - dependencies: - path-key: 4.0.0 - - onetime@6.0.0: - dependencies: - mimic-fn: 4.0.0 - - path-key@3.1.1: {} - - path-key@4.0.0: {} - - pathe@1.1.2: {} - - pathval@2.0.0: {} - - picocolors@1.0.1: {} - - postcss@8.4.39: - dependencies: - nanoid: 3.3.7 - picocolors: 1.0.1 - source-map-js: 1.2.0 - - proxy-from-env@1.1.0: {} - - resolve-pkg-maps@1.0.0: {} - - rollup@4.18.1: - dependencies: - '@types/estree': 1.0.5 - optionalDependencies: - '@rollup/rollup-android-arm-eabi': 4.18.1 - '@rollup/rollup-android-arm64': 4.18.1 - '@rollup/rollup-darwin-arm64': 4.18.1 - '@rollup/rollup-darwin-x64': 4.18.1 - '@rollup/rollup-linux-arm-gnueabihf': 4.18.1 - '@rollup/rollup-linux-arm-musleabihf': 4.18.1 - '@rollup/rollup-linux-arm64-gnu': 4.18.1 - '@rollup/rollup-linux-arm64-musl': 4.18.1 - '@rollup/rollup-linux-powerpc64le-gnu': 4.18.1 - '@rollup/rollup-linux-riscv64-gnu': 4.18.1 - '@rollup/rollup-linux-s390x-gnu': 4.18.1 - '@rollup/rollup-linux-x64-gnu': 4.18.1 - '@rollup/rollup-linux-x64-musl': 4.18.1 - '@rollup/rollup-win32-arm64-msvc': 4.18.1 - '@rollup/rollup-win32-ia32-msvc': 4.18.1 - '@rollup/rollup-win32-x64-msvc': 4.18.1 - fsevents: 2.3.3 - - shebang-command@2.0.0: - dependencies: - shebang-regex: 3.0.0 - - shebang-regex@3.0.0: {} - - siginfo@2.0.0: {} - - signal-exit@4.1.0: {} - - source-map-js@1.2.0: {} - - stackback@0.0.2: {} - - std-env@3.7.0: {} - - strip-final-newline@3.0.0: {} - - tinybench@2.8.0: {} - - tinypool@1.0.0: {} - - tinyrainbow@1.2.0: {} - - tinyspy@3.0.0: {} - - tslib@2.6.3: {} - - tsx@4.16.2: - dependencies: - esbuild: 0.21.5 - get-tsconfig: 4.7.5 - optionalDependencies: - fsevents: 2.3.3 - - tunnel@0.0.6: {} - - typescript@5.5.3: {} - - undici-types@5.26.5: {} - - undici@5.28.4: - dependencies: - '@fastify/busboy': 2.1.1 - - uuid@8.3.2: {} - - vite-node@2.0.3(@types/node@20.14.10): - dependencies: - cac: 6.7.14 - debug: 4.3.5 - pathe: 1.1.2 - tinyrainbow: 1.2.0 - vite: 5.3.3(@types/node@20.14.10) - transitivePeerDependencies: - - '@types/node' - - less - - lightningcss - - sass - - stylus - - sugarss - - supports-color - - terser - - vite@5.3.3(@types/node@20.14.10): - dependencies: - esbuild: 0.21.5 - postcss: 8.4.39 - rollup: 4.18.1 - optionalDependencies: - '@types/node': 20.14.10 - fsevents: 2.3.3 - - vitest@2.0.3(@types/node@20.14.10): - dependencies: - '@ampproject/remapping': 2.3.0 - '@vitest/expect': 2.0.3 - '@vitest/pretty-format': 2.0.3 - '@vitest/runner': 2.0.3 - '@vitest/snapshot': 2.0.3 - '@vitest/spy': 2.0.3 - '@vitest/utils': 2.0.3 - chai: 5.1.1 - debug: 4.3.5 - execa: 8.0.1 - magic-string: 0.30.10 - pathe: 1.1.2 - std-env: 3.7.0 - tinybench: 2.8.0 - tinypool: 1.0.0 - tinyrainbow: 1.2.0 - vite: 5.3.3(@types/node@20.14.10) - vite-node: 2.0.3(@types/node@20.14.10) - why-is-node-running: 2.3.0 - optionalDependencies: - '@types/node': 20.14.10 - transitivePeerDependencies: - - less - - lightningcss - - sass - - stylus - - sugarss - - supports-color - - terser - - which@2.0.2: - dependencies: - isexe: 2.0.0 - - why-is-node-running@2.3.0: - dependencies: - siginfo: 2.0.0 - stackback: 0.0.2 diff --git a/.github/scripts/jira/tsconfig.json b/.github/scripts/jira/tsconfig.json deleted file mode 100644 index 3e3216e6e05..00000000000 --- a/.github/scripts/jira/tsconfig.json +++ /dev/null @@ -1,108 +0,0 @@ -{ - "compilerOptions": { - /* Visit https://aka.ms/tsconfig to read more about this file */ - - /* Projects */ - // "incremental": true, /* Save .tsbuildinfo files to allow for incremental compilation of projects. */ - // "composite": true, /* Enable constraints that allow a TypeScript project to be used with project references. */ - // "tsBuildInfoFile": "./.tsbuildinfo", /* Specify the path to .tsbuildinfo incremental compilation file. */ - // "disableSourceOfProjectReferenceRedirect": true, /* Disable preferring source files instead of declaration files when referencing composite projects. */ - // "disableSolutionSearching": true, /* Opt a project out of multi-project reference checking when editing. */ - // "disableReferencedProjectLoad": true, /* Reduce the number of projects loaded automatically by TypeScript. */ - - /* Language and Environment */ - "target": "ES2022", /* Set the JavaScript language version for emitted JavaScript and include compatible library declarations. */ - // "lib": [], /* Specify a set of bundled library declaration files that describe the target runtime environment. */ - // "jsx": "preserve", /* Specify what JSX code is generated. */ - // "experimentalDecorators": true, /* Enable experimental support for legacy experimental decorators. */ - // "emitDecoratorMetadata": true, /* Emit design-type metadata for decorated declarations in source files. */ - // "jsxFactory": "", /* Specify the JSX factory function used when targeting React JSX emit, e.g. 'React.createElement' or 'h'. */ - // "jsxFragmentFactory": "", /* Specify the JSX Fragment reference used for fragments when targeting React JSX emit e.g. 'React.Fragment' or 'Fragment'. */ - // "jsxImportSource": "", /* Specify module specifier used to import the JSX factory functions when using 'jsx: react-jsx*'. */ - // "reactNamespace": "", /* Specify the object invoked for 'createElement'. This only applies when targeting 'react' JSX emit. */ - // "noLib": true, /* Disable including any library files, including the default lib.d.ts. */ - // "useDefineForClassFields": true, /* Emit ECMAScript-standard-compliant class fields. */ - // "moduleDetection": "auto", /* Control what method is used to detect module-format JS files. */ - - /* Modules */ - "module": "commonjs", /* Specify what module code is generated. */ - // "rootDir": "./", /* Specify the root folder within your source files. */ - // "moduleResolution": "node10", /* Specify how TypeScript looks up a file from a given module specifier. */ - // "baseUrl": "./", /* Specify the base directory to resolve non-relative module names. */ - // "paths": {}, /* Specify a set of entries that re-map imports to additional lookup locations. */ - // "rootDirs": [], /* Allow multiple folders to be treated as one when resolving modules. */ - // "typeRoots": [], /* Specify multiple folders that act like './node_modules/@types'. */ - // "types": [], /* Specify type package names to be included without being referenced in a source file. */ - // "allowUmdGlobalAccess": true, /* Allow accessing UMD globals from modules. */ - // "moduleSuffixes": [], /* List of file name suffixes to search when resolving a module. */ - // "allowImportingTsExtensions": true, /* Allow imports to include TypeScript file extensions. Requires '--moduleResolution bundler' and either '--noEmit' or '--emitDeclarationOnly' to be set. */ - // "resolvePackageJsonExports": true, /* Use the package.json 'exports' field when resolving package imports. */ - // "resolvePackageJsonImports": true, /* Use the package.json 'imports' field when resolving imports. */ - // "customConditions": [], /* Conditions to set in addition to the resolver-specific defaults when resolving imports. */ - // "resolveJsonModule": true, /* Enable importing .json files. */ - // "allowArbitraryExtensions": true, /* Enable importing files with any extension, provided a declaration file is present. */ - // "noResolve": true, /* Disallow 'import's, 'require's or ''s from expanding the number of files TypeScript should add to a project. */ - - /* JavaScript Support */ - // "allowJs": true, /* Allow JavaScript files to be a part of your program. Use the 'checkJS' option to get errors from these files. */ - // "checkJs": true, /* Enable error reporting in type-checked JavaScript files. */ - // "maxNodeModuleJsDepth": 1, /* Specify the maximum folder depth used for checking JavaScript files from 'node_modules'. Only applicable with 'allowJs'. */ - - /* Emit */ - // "declaration": true, /* Generate .d.ts files from TypeScript and JavaScript files in your project. */ - // "declarationMap": true, /* Create sourcemaps for d.ts files. */ - // "emitDeclarationOnly": true, /* Only output d.ts files and not JavaScript files. */ - // "sourceMap": true, /* Create source map files for emitted JavaScript files. */ - // "inlineSourceMap": true, /* Include sourcemap files inside the emitted JavaScript. */ - // "outFile": "./", /* Specify a file that bundles all outputs into one JavaScript file. If 'declaration' is true, also designates a file that bundles all .d.ts output. */ - // "outDir": "./", /* Specify an output folder for all emitted files. */ - // "removeComments": true, /* Disable emitting comments. */ - // "noEmit": true, /* Disable emitting files from a compilation. */ - // "importHelpers": true, /* Allow importing helper functions from tslib once per project, instead of including them per-file. */ - // "downlevelIteration": true, /* Emit more compliant, but verbose and less performant JavaScript for iteration. */ - // "sourceRoot": "", /* Specify the root path for debuggers to find the reference source code. */ - // "mapRoot": "", /* Specify the location where debugger should locate map files instead of generated locations. */ - // "inlineSources": true, /* Include source code in the sourcemaps inside the emitted JavaScript. */ - // "emitBOM": true, /* Emit a UTF-8 Byte Order Mark (BOM) in the beginning of output files. */ - // "newLine": "crlf", /* Set the newline character for emitting files. */ - // "stripInternal": true, /* Disable emitting declarations that have '@internal' in their JSDoc comments. */ - // "noEmitHelpers": true, /* Disable generating custom helper functions like '__extends' in compiled output. */ - // "noEmitOnError": true, /* Disable emitting files if any type checking errors are reported. */ - // "preserveConstEnums": true, /* Disable erasing 'const enum' declarations in generated code. */ - // "declarationDir": "./", /* Specify the output directory for generated declaration files. */ - - /* Interop Constraints */ - // "isolatedModules": true, /* Ensure that each file can be safely transpiled without relying on other imports. */ - // "verbatimModuleSyntax": true, /* Do not transform or elide any imports or exports not marked as type-only, ensuring they are written in the output file's format based on the 'module' setting. */ - // "isolatedDeclarations": true, /* Require sufficient annotation on exports so other tools can trivially generate declaration files. */ - // "allowSyntheticDefaultImports": true, /* Allow 'import x from y' when a module doesn't have a default export. */ - "esModuleInterop": true, /* Emit additional JavaScript to ease support for importing CommonJS modules. This enables 'allowSyntheticDefaultImports' for type compatibility. */ - // "preserveSymlinks": true, /* Disable resolving symlinks to their realpath. This correlates to the same flag in node. */ - "forceConsistentCasingInFileNames": true, /* Ensure that casing is correct in imports. */ - - /* Type Checking */ - "strict": true, /* Enable all strict type-checking options. */ - // "noImplicitAny": true, /* Enable error reporting for expressions and declarations with an implied 'any' type. */ - // "strictNullChecks": true, /* When type checking, take into account 'null' and 'undefined'. */ - // "strictFunctionTypes": true, /* When assigning functions, check to ensure parameters and the return values are subtype-compatible. */ - // "strictBindCallApply": true, /* Check that the arguments for 'bind', 'call', and 'apply' methods match the original function. */ - // "strictPropertyInitialization": true, /* Check for class properties that are declared but not set in the constructor. */ - // "noImplicitThis": true, /* Enable error reporting when 'this' is given the type 'any'. */ - // "useUnknownInCatchVariables": true, /* Default catch clause variables as 'unknown' instead of 'any'. */ - // "alwaysStrict": true, /* Ensure 'use strict' is always emitted. */ - // "noUnusedLocals": true, /* Enable error reporting when local variables aren't read. */ - // "noUnusedParameters": true, /* Raise an error when a function parameter isn't read. */ - // "exactOptionalPropertyTypes": true, /* Interpret optional property types as written, rather than adding 'undefined'. */ - // "noImplicitReturns": true, /* Enable error reporting for codepaths that do not explicitly return in a function. */ - // "noFallthroughCasesInSwitch": true, /* Enable error reporting for fallthrough cases in switch statements. */ - // "noUncheckedIndexedAccess": true, /* Add 'undefined' to a type when accessed using an index. */ - // "noImplicitOverride": true, /* Ensure overriding members in derived classes are marked with an override modifier. */ - // "noPropertyAccessFromIndexSignature": true, /* Enforces using indexed accessors for keys declared using an indexed type. */ - // "allowUnusedLabels": true, /* Disable error reporting for unused labels. */ - // "allowUnreachableCode": true, /* Disable error reporting for unreachable code. */ - - /* Completeness */ - // "skipDefaultLibCheck": true, /* Skip type checking .d.ts files that are included with TypeScript. */ - "skipLibCheck": true /* Skip type checking all .d.ts files. */ - } -} diff --git a/.github/scripts/jira/update-jira-issue.ts b/.github/scripts/jira/update-jira-issue.ts deleted file mode 100644 index 6e539c7ffa8..00000000000 --- a/.github/scripts/jira/update-jira-issue.ts +++ /dev/null @@ -1,77 +0,0 @@ -import * as core from "@actions/core"; -import jira from "jira.js"; -import { tagsToLabels, createJiraClient, parseIssueNumberFrom } from "./lib"; - -function updateJiraIssue( - client: jira.Version3Client, - issueNumber: string, - tags: string[], - fixVersionName: string, - dryRun: boolean -) { - const payload = { - issueIdOrKey: issueNumber, - update: { - labels: tagsToLabels(tags), - fixVersions: [{ set: [{ name: fixVersionName }] }], - }, - }; - - core.info( - `Updating JIRA issue ${issueNumber} with fix version ${fixVersionName} and labels [${payload.update.labels.join( - ", " - )}]` - ); - if (dryRun) { - core.info("Dry run enabled, skipping JIRA issue update"); - return; - } - - return client.issues.editIssue(payload); -} - -async function main() { - const prTitle = process.env.PR_TITLE; - const commitMessage = process.env.COMMIT_MESSAGE; - const branchName = process.env.BRANCH_NAME; - - const chainlinkVersion = process.env.CHAINLINK_VERSION; - const dryRun = !!process.env.DRY_RUN; - // tags are not getting used at the current moment so will always default to [] - const tags = process.env.FOUND_TAGS ? process.env.FOUND_TAGS.split(",") : []; - - const client = createJiraClient(); - - // Checks for the Jira issue number and exit if it can't find it - const issueNumber = parseIssueNumberFrom(prTitle, commitMessage, branchName); - if (!issueNumber) { - const msg = - "No JIRA issue number found in: PR title, commit message, or branch name. Please include the issue ID in one of these."; - - core.info(msg); - core.notice(msg); - core.setOutput("jiraComment", `> :medal_military: ${msg}`); - - return; - } - - const fixVersionName = `chainlink-v${chainlinkVersion}`; - await updateJiraIssue(client, issueNumber, tags, fixVersionName, dryRun); - - core.setOutput("jiraComment", ""); -} - -async function run() { - try { - await main(); - } catch (error) { - if (error instanceof Error) { - core.setFailed(error.message); - } - core.setFailed( - "Error: Failed to update JIRA issue with fix version and labels." - ); - } -} - -run(); diff --git a/.github/workflows/changeset.yml b/.github/workflows/changeset.yml index 0d8c3a828b6..9d953bc73a0 100644 --- a/.github/workflows/changeset.yml +++ b/.github/workflows/changeset.yml @@ -82,19 +82,34 @@ jobs: env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + # we need to set the top level directory for the jira-tracing action manually + # because now we are working with two repositories and automatic detection would + # select the repository with jira-tracing and not the chainlink repository + - name: Setup git top level directory + id: find-git-top-level-dir + run: echo "top_level_dir=$(pwd)" >> $GITHUB_OUTPUT + + - name: Checkout .Github repository + uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633 # v4.1.2 + with: + repository: smartcontractkit/.github + ref: 228acc0a7f9f0092450a7673786462832bf3d19c + path: ./dot_github + - name: Update Jira ticket for core id: jira if: ${{ steps.files-changed.outputs.core == 'true' || steps.files-changed.outputs.shared == 'true' }} shell: bash - working-directory: ./.github/scripts/jira + working-directory: ./dot_github/libs/jira-tracing run: | echo "COMMIT_MESSAGE=$(git log -1 --pretty=format:'%s')" >> $GITHUB_ENV - pnpm install && pnpm issue:update + pnpm install && pnpm issue:update env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} JIRA_HOST: ${{ vars.JIRA_HOST }} JIRA_USERNAME: ${{ secrets.JIRA_USERNAME }} JIRA_API_TOKEN: ${{ secrets.JIRA_API_TOKEN }} + GIT_TOP_LEVEL_DIR: ${{ steps.find-git-top-level-dir.outputs.top_level_dir }} CHAINLINK_VERSION: ${{ steps.chainlink-version.outputs.chainlink_version }} PR_TITLE: ${{ github.event.pull_request.title }} BRANCH_NAME: ${{ github.event.pull_request.head.ref }} diff --git a/.github/workflows/solidity-foundry-artifacts.yml b/.github/workflows/solidity-foundry-artifacts.yml index b8f56d977ee..2a3f6677b04 100644 --- a/.github/workflows/solidity-foundry-artifacts.yml +++ b/.github/workflows/solidity-foundry-artifacts.yml @@ -25,6 +25,11 @@ on: base_ref: description: 'commit or tag to use as base reference, when looking for modified Solidity files' required: true + link_with_jira: + description: 'link generated artifacts with Jira issues?' + type: boolean + default: true + required: false env: FOUNDRY_PROFILE: ci @@ -70,12 +75,12 @@ jobs: - *ignored sol: - modified|added: 'contracts/src/v0.8/**/*.sol' - - *ignored + - *ignored product: &product - - modified|added: 'contracts/src/v0.8/${{ inputs.product }}/**/*.sol' + - modified|added: 'contracts/src/v0.8/${{ inputs.product }}/**/*.sol' - *ignored changeset: - - modified|added: 'contracts/.changeset/!(README)*.md' + - modified|added: 'contracts/.changeset/!(README)*.md' # Manual transformation needed, because shared contracts have a different folder structure - name: Transform modified files @@ -113,18 +118,16 @@ jobs: sol_files: ${{ steps.changes-dorny.outputs.sol_files }} product: ${{ inputs.product }} - gather-basic-info: - name: Gather basic info - if: ${{ needs.changes.outputs.product_changes == 'true' }} + prepare-workflow-inputs: + name: Prepare workflow inputs runs-on: ubuntu-22.04 needs: [ changes ] outputs: foundry_version: ${{ steps.extract-foundry-version.outputs.foundry-version }} + generate_code_coverage: ${{ steps.skip-code-coverage.outputs.generate_code_coverage }} steps: - name: Checkout the repo uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633 # v4.1.2 - with: - fetch-depth: 0 - name: Extract Foundry version id: extract-foundry-version @@ -132,297 +135,35 @@ jobs: with: working-directory: contracts - - name: Copy modified changesets - if: ${{ needs.changes.outputs.changeset_changes == 'true' }} - run: | - mkdir -p contracts/changesets - files="${{ needs.changes.outputs.changeset_files }}" - IFS="," - for changeset in $files; do - echo "::debug:: Copying $changeset" - cp $changeset contracts/changesets - done - - - name: Generate basic info and modified contracts list - shell: bash - run: | - echo "Product: ${{ inputs.product }}" > contracts/commit_sha_base_ref.txt - echo "Commit SHA used to generate artifacts: ${{ env.head_ref }}" >> contracts/commit_sha_base_ref.txt - echo "Base reference SHA used to find modified contracts: ${{ inputs.base_ref }}" >> contracts/commit_sha_base_ref.txt - - IFS=',' read -r -a modified_files <<< "${{ needs.changes.outputs.product_files }}" - echo "# Modified contracts:" > contracts/modified_contracts.md - for file in "${modified_files[@]}"; do - echo " - [$file](${{ github.server_url }}/${{ github.repository }}/blob/${{ env.head_ref }}/$file)" >> contracts/modified_contracts.md - echo "$file" >> contracts/modified_contracts.txt - done - - - name: Upload basic info and modified contracts list - uses: actions/upload-artifact@0b2256b8c012f0828dc542b3febcab082c67f72b # v4.3.4 - timeout-minutes: 2 - continue-on-error: true - with: - name: tmp-basic-info - path: | - contracts/modified_contracts.md - contracts/modified_contracts.txt - contracts/commit_sha_base_ref.txt - contracts/changesets - retention-days: 7 - - # some of the artifacts can only be generated on product level, and we cannot scope them to single contracts - # some product-level modifications might also require shared contracts changes, so if these happened we need to generate artifacts for shared contracts as well - coverage-and-book: - if: ${{ needs.changes.outputs.product_changes == 'true' }} - name: Generate Docs and Code Coverage reports - runs-on: ubuntu-22.04 - needs: [changes, gather-basic-info] - steps: - - name: Prepare exclusion list - id: prepare-exclusion-list + - name: Should skip code coverage report + id: skip-code-coverage run: | - cat < coverage_exclusions.json - ["automation", "functions", "vrf"] - EOF - coverage_exclusions=$(cat coverage_exclusions.json | jq -c .) - echo "coverage_exclusions=$coverage_exclusions" >> $GITHUB_OUTPUT - - - name: Checkout the repo - uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633 # v4.1.2 - with: - ref: ${{ env.head_ref }} - - - name: Setup NodeJS - uses: ./.github/actions/setup-nodejs - - - name: Create directories - shell: bash - run: | - mkdir -p contracts/code-coverage - - - name: Install Foundry - uses: foundry-rs/foundry-toolchain@8f1998e9878d786675189ef566a2e4bf24869773 # v1.2.0 - with: - version: ${{ needs.gather-basic-info.outputs.foundry_version }} - - # required for code coverage report generation - - name: Setup LCOV - uses: hrishikesh-kadam/setup-lcov@f5da1b26b0dcf5d893077a3c4f29cf78079c841d # v1.0.0 - - - name: Run Forge build for product contracts - if: ${{ needs.changes.outputs.product_changes == 'true' }} - run: | - forge --version - forge build - working-directory: contracts - env: - FOUNDRY_PROFILE: ${{ inputs.product }} - - - name: Run coverage for product contracts - if: ${{ !contains(fromJson(steps.prepare-exclusion-list.outputs.coverage_exclusions), inputs.product) && needs.changes.outputs.product_changes == 'true' }} - working-directory: contracts - run: forge coverage --report lcov --report-file code-coverage/lcov.info - env: - FOUNDRY_PROFILE: ${{ inputs.product }} - - - name: Prune lcov report - if: ${{ !contains(fromJson(steps.prepare-exclusion-list.outputs.coverage_exclusions), inputs.product) && needs.changes.outputs.product_changes == 'true' }} - shell: bash - working-directory: contracts - run: | - ./scripts/lcov_prune ${{ inputs.product }} ./code-coverage/lcov.info ./code-coverage/lcov.info.pruned - - - name: Generate Code Coverage HTML report for product contracts - if: ${{ !contains(fromJson(steps.prepare-exclusion-list.outputs.coverage_exclusions), inputs.product) && needs.changes.outputs.product_changes == 'true' }} - shell: bash - working-directory: contracts - run: genhtml code-coverage/lcov.info.pruned --branch-coverage --output-directory code-coverage - - - name: Run Forge doc for product contracts - if: ${{ needs.changes.outputs.product_changes == 'true' }} - run: forge doc --build -o docs - working-directory: contracts - env: - FOUNDRY_PROFILE: ${{ inputs.product }} - - - name: Upload Artifacts for product contracts - if: ${{ needs.changes.outputs.product_changes == 'true' }} - uses: actions/upload-artifact@0b2256b8c012f0828dc542b3febcab082c67f72b # v4.3.4 - timeout-minutes: 2 - continue-on-error: true - with: - name: tmp-${{ inputs.product }}-artifacts - path: | - contracts/docs - contracts/code-coverage/lcov-.info - contracts/code-coverage - retention-days: 7 - - # Generates UML diagrams and Slither reports for modified contracts - uml-static-analysis: - if: ${{ needs.changes.outputs.product_changes == 'true' }} - name: Generate UML and Slither reports for modified contracts - runs-on: ubuntu-22.04 - needs: [changes, gather-basic-info] - env: - GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} - steps: - - name: Checkout the repo - uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633 # v4.1.2 - with: - fetch-depth: 0 - ref: ${{ env.head_ref }} - - - name: Setup NodeJS - uses: ./.github/actions/setup-nodejs - - - name: Install Foundry - uses: foundry-rs/foundry-toolchain@8f1998e9878d786675189ef566a2e4bf24869773 # v1.2.0 - with: - version: ${{ needs.gather-basic-info.outputs.foundry_version }} - - - name: Install Sol2uml - run: | - npm link sol2uml --only=production - - - name: Set up Python - uses: actions/setup-python@39cd14951b08e74b54015e9e001cdefcf80e669f #v5.1.1 - with: - python-version: '3.8' - - - name: Install solc-select and solc - uses: ./.github/actions/setup-solc-select - with: - to_install: '0.8.19' - to_use: '0.8.19' - - - name: Install Slither - uses: ./.github/actions/setup-slither - - - name: Generate UML - shell: bash - run: | - contract_list="${{ needs.changes.outputs.product_files }}" - - # modify remappings so that solc can find dependencies - ./contracts/scripts/ci/modify_remappings.sh contracts contracts/remappings.txt - mv remappings_modified.txt remappings.txt - - ./contracts/scripts/ci/generate_uml.sh "./" "contracts/uml-diagrams" "$contract_list" - - - name: Generate Slither Markdown reports - run: | - contract_list="${{ needs.changes.outputs.product_files }}" - - # without it Slither sometimes fails to use remappings correctly - cp contracts/foundry.toml foundry.toml - - echo "::debug::Processing contracts: $contract_list" - ./contracts/scripts/ci/generate_slither_report.sh "${{ github.server_url }}/${{ github.repository }}/blob/${{ env.head_ref }}/" contracts/configs/slither/.slither.config-artifacts.json "." "$contract_list" "contracts/slither-reports" "--solc-remaps @=contracts/node_modules/@" - - - name: Upload UMLs and Slither reports - uses: actions/upload-artifact@0b2256b8c012f0828dc542b3febcab082c67f72b # v4.3.4 - timeout-minutes: 10 - continue-on-error: true - with: - name: tmp-contracts-artifacts - path: | - contracts/uml-diagrams - contracts/slither-reports - retention-days: 7 - - - name: Validate if all Slither run for all contracts - uses: ./.github/actions/validate-solidity-artifacts - with: - validate_slither_reports: 'true' - validate_uml_diagrams: 'true' - slither_reports_path: 'contracts/slither-reports' - uml_diagrams_path: 'contracts/uml-diagrams' - sol_files: ${{ needs.changes.outputs.product_files }} - - gather-all-artifacts: - name: Gather all artifacts - if: ${{ needs.changes.outputs.product_changes == 'true' }} - runs-on: ubuntu-latest - needs: [coverage-and-book, uml-static-analysis, gather-basic-info, changes] - steps: - - name: Checkout the repo - uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633 # v4.1.2 - with: - ref: ${{ env.head_ref }} - - - name: Download all artifacts - uses: actions/download-artifact@65a9edc5881444af0b9093a5e628f2fe47ea3b2e # v4.1.7 - with: - path: review_artifacts - merge-multiple: true - - - name: Upload all artifacts as single package - uses: actions/upload-artifact@0b2256b8c012f0828dc542b3febcab082c67f72b # v4.3.4 - with: - name: review-artifacts-${{ inputs.product }}-${{ inputs.base_ref }}-${{ env.head_ref }} - path: review_artifacts - - - name: Remove temporary artifacts - uses: geekyeggo/delete-artifact@24928e75e6e6590170563b8ddae9fac674508aa1 # v5.0 - with: - name: tmp-* - - - name: Print Artifact URL in job summary - id: gather-all-artifacts - env: - GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} - run: | - ARTIFACTS=$(gh api -X GET repos/${{ github.repository }}/actions/runs/${{ github.run_id }}/artifacts) - ARTIFACT_ID=$(echo "$ARTIFACTS" | jq '.artifacts[] | select(.name=="review-artifacts-${{ inputs.product }}-${{ inputs.base_ref }}-${{ env.head_ref }}") | .id') - echo "Artifact ID: $ARTIFACT_ID" - - echo "# Solidity Review Artifact Generated" >> $GITHUB_STEP_SUMMARY - echo "Product: **${{ inputs.product }}**" >> $GITHUB_STEP_SUMMARY - echo "Base Ref used: **${{ inputs.base_ref }}**" >> $GITHUB_STEP_SUMMARY - echo "Commit SHA used: **${{ env.head_ref }}**" >> $GITHUB_STEP_SUMMARY - - artifact_url="https://github.com/${{ github.repository }}/actions/runs/${{ github.run_id }}/artifacts/$ARTIFACT_ID" - echo "[Artifact URL]($artifact_url)" >> $GITHUB_STEP_SUMMARY - echo "artifact-url=$artifact_url" >> $GITHUB_OUTPUT - - - name: Setup NodeJS - uses: ./.github/actions/setup-nodejs - - - name: Setup Jira - working-directory: ./.github/scripts/jira - run: pnpm i - - - name: Create Traceability - working-directory: ./.github/scripts/jira - run: | - pnpm issue:traceability - env: - CHANGESET_FILES: ${{ needs.changes.outputs.changeset_files }} - CHAINLINK_PRODUCT: ${{ inputs.product }} - BASE_REF: ${{ inputs.base_ref }} - HEAD_REF: ${{ env.head_ref }} - ARTIFACT_URL: ${{ steps.gather-all-artifacts.outputs.artifact-url }} - - JIRA_HOST: ${{ vars.JIRA_HOST }} - JIRA_USERNAME: ${{ secrets.JIRA_USERNAME }} - JIRA_API_TOKEN: ${{ secrets.JIRA_API_TOKEN }} - - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + if [[ "${{ inputs.product }}" = "automation" || "${{ inputs.product }}" = "vrf" || "${{ inputs.product }}" = "functions" ]]; then + echo "generate_code_coverage=false" >> $GITHUB_OUTPUT + else + echo "generate_code_coverage=true" >> $GITHUB_OUTPUT + fi - notify-no-changes: - if: ${{ needs.changes.outputs.product_changes == 'false' }} - needs: [changes] - runs-on: ubuntu-latest - steps: - - name: Print warning in job summary - shell: bash - run: | - echo "# Solidity Review Artifact NOT Generated" >> $GITHUB_STEP_SUMMARY - echo "Base Ref used: **${{ inputs.base_ref }}**" >> $GITHUB_STEP_SUMMARY - echo "Commit SHA used: **${{ env.head_ref }}**" >> $GITHUB_STEP_SUMMARY - echo "## Reason: No modified Solidity files found for ${{ inputs.product }}" >> $GITHUB_STEP_SUMMARY - echo "* no modified Solidity files found between ${{ inputs.base_ref }} and ${{ env.head_ref }} commits" >> $GITHUB_STEP_SUMMARY - echo "* or they are located outside of ./contracts/src/v0.8 folder" >> $GITHUB_STEP_SUMMARY - echo "* or they were limited to test files" >> $GITHUB_STEP_SUMMARY - exit 1 + generate-artifacts: + name: Generate Solidity Review Artifacts + needs: [changes, prepare-workflow-inputs] + uses: smartcontractkit/.github/.github/workflows/solidity-review-artifacts.yml@b6e37806737eef87e8c9137ceeb23ef0bff8b1db # validate-solidity-artifacts@0.1.0 + with: + product: ${{ inputs.product }} + commit_to_use: ${{ inputs.commit_to_use }} + base_ref: ${{ inputs.base_ref }} + product_changes: ${{ needs.changes.outputs.product_changes }} + product_files: ${{ needs.changes.outputs.product_files }} + changeset_changes: ${{ needs.changes.outputs.changeset_changes }} + changeset_files: ${{ needs.changes.outputs.changeset_files }} + foundry_version: ${{ needs.prepare-workflow-inputs.outputs.foundry_version }} + contracts_directory: './contracts' + generate_code_coverage: ${{ needs.prepare-workflow-inputs.outputs.generate_code_coverage == 'true' }} + link_with_jira: ${{ inputs.link_with_jira }} + jira_host: ${{ vars.JIRA_HOST }} + install_semver: false + slither_config_file_path: 'contracts/configs/slither/.slither.config-artifacts.json' + lcov_prune_script_path: 'scripts/lcov_prune' + secrets: + jira_username: ${{ secrets.JIRA_USERNAME }} + jira_api_token: ${{ secrets.JIRA_API_TOKEN }} diff --git a/.github/workflows/solidity-foundry.yml b/.github/workflows/solidity-foundry.yml index 4b2d7b9b96f..e7e80893107 100644 --- a/.github/workflows/solidity-foundry.yml +++ b/.github/workflows/solidity-foundry.yml @@ -40,7 +40,7 @@ jobs: { "name": "vrf", "setup": { "run-coverage": false, "min-coverage": 98.5, "run-gas-snapshot": false, "run-forge-fmt": false }} ] EOF - + matrix=$(cat matrix.json | jq -c .) echo "matrix=$matrix" >> $GITHUB_OUTPUT @@ -74,7 +74,7 @@ jobs: list-files: 'shell' filters: | non_src: - - '.github/workflows/solidity-foundry.yml' + - '.github/workflows/solidity-foundry.yml' - 'contracts/foundry.toml' - 'contracts/gas-snapshots/*.gas-snapshot' - 'contracts/package.json' @@ -130,7 +130,7 @@ jobs: - '!contracts/src/v0.8/*.t.sol' - '!contracts/src/v0.8/**/testhelpers/**' - '!contracts/src/v0.8/testhelpers/**' - - '!contracts/src/v0.8/vendor/**' + - '!contracts/src/v0.8/vendor/**' tests: if: ${{ needs.changes.outputs.non_src_changes == 'true' || needs.changes.outputs.sol_modified_added == 'true' }} @@ -161,6 +161,8 @@ jobs: || contains(fromJson(needs.changes.outputs.all_changes), 'shared') || needs.changes.outputs.non_src_changes == 'true' }} uses: ./.github/actions/setup-nodejs + with: + prod: "true" - name: Install Foundry if: ${{ contains(fromJson(needs.changes.outputs.all_changes), matrix.product.name) @@ -271,10 +273,15 @@ jobs: if: needs.changes.outputs.not_test_sol_modified == 'true' runs-on: ubuntu-22.04 steps: - - name: Checkout the repo + - name: Checkout this repository + uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633 # v4.1.2 + + - name: Checkout .github repository uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633 # v4.1.2 with: - submodules: recursive + repository: smartcontractkit/.github + ref: b6e37806737eef87e8c9137ceeb23ef0bff8b1db # validate-solidity-artifacts@0.1.0 + path: ./dot_github - name: Setup NodeJS uses: ./.github/actions/setup-nodejs @@ -290,26 +297,26 @@ jobs: python-version: '3.8' - name: Install solc-select and solc - uses: ./.github/actions/setup-solc-select + uses: smartcontractkit/.github/actions/setup-solc-select@b6e37806737eef87e8c9137ceeb23ef0bff8b1db # validate-solidity-artifacts@0.1.0 with: - to_install: '0.8.19' - to_use: '0.8.19' + to_install: '0.8.24' + to_use: '0.8.24' - name: Install Slither - uses: ./.github/actions/setup-slither + uses: smartcontractkit/.github/actions/setup-slither@b6e37806737eef87e8c9137ceeb23ef0bff8b1db # validate-solidity-artifacts@0.1.0 - name: Run Slither shell: bash - run: | + run: | # modify remappings so that solc can find dependencies - ./contracts/scripts/ci/modify_remappings.sh contracts contracts/remappings.txt + ./dot_github/tools/scripts/solidity/modify_remappings.sh contracts contracts/remappings.txt mv remappings_modified.txt remappings.txt - + # without it Slither sometimes fails to use remappings correctly - cp contracts/foundry.toml foundry.toml + cp contracts/foundry.toml foundry.toml + + FILES="${{ needs.changes.outputs.not_test_sol_modified_files }}" - FILES="${{ needs.changes.outputs.not_test_sol_modified_files }}" - for FILE in $FILES; do PRODUCT=$(echo "$FILE" | awk -F'src/[^/]*/' '{print $2}' | cut -d'/' -f1) echo "::debug::Running Slither for $FILE in $PRODUCT" @@ -318,7 +325,7 @@ jobs: echo "::debug::No Slither config found for $PRODUCT, using default" SLITHER_CONFIG="contracts/configs/slither/.slither.config-default-pr.json" fi - ./contracts/scripts/ci/generate_slither_report.sh "${{ github.server_url }}/${{ github.repository }}/blob/${{ github.sha }}/" "$SLITHER_CONFIG" "." "$FILE" "contracts/slither-reports-current" "--solc-remaps @=contracts/node_modules/@" + ./dot_github/tools/scripts/solidity/generate_slither_report.sh "${{ github.server_url }}/${{ github.repository }}/blob/${{ github.sha }}/" "$SLITHER_CONFIG" "./contracts" "$FILE" "contracts/slither-reports-current" "--solc-remaps @=contracts/node_modules/@" done # all the actions below, up to printing results, run only if any existing contracts were modified @@ -341,7 +348,7 @@ jobs: continue-on-error: true with: name: tmp-slither-scripts-${{ github.sha }} - path: contracts/scripts/ci + path: ./dot_github/tools/scripts/solidity retention-days: 7 - name: Upload configs @@ -354,7 +361,7 @@ jobs: path: contracts/configs retention-days: 7 - - name: Checkout the repo + - name: Checkout earlier version of this repository if: needs.changes.outputs.sol_mod_only == 'true' uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633 # v4.1.2 with: @@ -365,7 +372,7 @@ jobs: uses: actions/download-artifact@65a9edc5881444af0b9093a5e628f2fe47ea3b2e # v4.1.7 with: name: tmp-slither-scripts-${{ github.sha }} - path: contracts/scripts/ci + path: ./dot_github/tools/scripts/solidity - name: Download configs if: needs.changes.outputs.sol_mod_only == 'true' @@ -384,19 +391,19 @@ jobs: shell: bash run: | # we need to set file permission again since they are lost during download - for file in contracts/scripts/ci/*.sh; do + for file in ./dot_github/tools/scripts/solidity/*.sh; do chmod +x "$file" done - + # modify remappings so that solc can find dependencies - ./contracts/scripts/ci/modify_remappings.sh contracts contracts/remappings.txt + ./dot_github/tools/scripts/solidity/modify_remappings.sh contracts contracts/remappings.txt mv remappings_modified.txt remappings.txt - + # without it Slither sometimes fails to use remappings correctly cp contracts/foundry.toml foundry.toml - + FILES="${{ needs.changes.outputs.sol_mod_only_files }}" - + for FILE in $FILES; do PRODUCT=$(echo "$FILE" | awk -F'src/[^/]*/' '{print $2}' | cut -d'/' -f1) echo "::debug::Running Slither for $FILE in $PRODUCT" @@ -405,8 +412,8 @@ jobs: echo "::debug::No Slither config found for $PRODUCT, using default" SLITHER_CONFIG="contracts/configs/slither/.slither.config-default-pr.json" fi - ./contracts/scripts/ci/generate_slither_report.sh "${{ github.server_url }}/${{ github.repository }}/blob/${{ github.sha }}/" "$SLITHER_CONFIG" "." "$FILE" "contracts/slither-reports-base-ref" "--solc-remaps @=contracts/node_modules/@" - done + ./dot_github/tools/scripts/solidity/generate_slither_report.sh "${{ github.server_url }}/${{ github.repository }}/blob/${{ github.sha }}/" "$SLITHER_CONFIG" "./contracts" "$FILE" "contracts/slither-reports-base-ref" "--solc-remaps @=contracts/node_modules/@" + done - name: Upload Slither report if: needs.changes.outputs.sol_mod_only == 'true' @@ -438,19 +445,19 @@ jobs: current_report="contracts/slither-reports-current/$filename" new_issues_report="contracts/slither-reports-current/${filename%.md}_new_issues.md" if [ -f "$current_report" ]; then - if ./contracts/scripts/ci/find_slither_report_diff.sh "$base_report" "$current_report" "$new_issues_report" "contracts/scripts/ci/prompt-difference.md" "contracts/scripts/ci/prompt-validation.md"; then - if [[ -s $new_issues_report ]]; then - awk 'NR==2{print "*This new issues report has been automatically generated by LLM model using two Slither reports. One based on `${{ github.base_ref}}` and another on `${{ github.sha }}` commits.*"}1' $new_issues_report > tmp.md && mv tmp.md $new_issues_report - echo "Replacing full Slither report with diff for $current_report" + if ./contracts/scripts/ci/find_slither_report_diff.sh "$base_report" "$current_report" "$new_issues_report" "contracts/scripts/ci/prompt-difference.md" "contracts/scripts/ci/prompt-validation.md"; then + if [[ -s $new_issues_report ]]; then + awk 'NR==2{print "*This new issues report has been automatically generated by LLM model using two Slither reports. One based on `${{ github.base_ref}}` and another on `${{ github.sha }}` commits.*"}1' $new_issues_report > tmp.md && mv tmp.md $new_issues_report + echo "Replacing full Slither report with diff for $current_report" rm $current_report && mv $new_issues_report $current_report - else + else echo "No difference detected between $base_report and $current_report reports. Won't include any of them." rm $current_report fi else echo "::warning::Failed to generate a diff report with new issues for $base_report using an LLM model, will use full report." fi - + else echo "::warning::Failed to find current commit's equivalent of $base_report (file $current_report doesn't exist, but should have been generated). Please check Slither logs." fi @@ -468,7 +475,7 @@ jobs: done - name: Validate if all Slither run for all contracts - uses: ./.github/actions/validate-solidity-artifacts + uses: smartcontractkit/.github/actions/validate-solidity-artifacts@b6e37806737eef87e8c9137ceeb23ef0bff8b1db # validate-solidity-artifacts@0.1.0 with: validate_slither_reports: 'true' slither_reports_path: 'contracts/slither-reports-current' @@ -506,9 +513,9 @@ jobs: ARTIFACTS=$(gh api -X GET repos/${{ github.repository }}/actions/runs/${{ github.run_id }}/artifacts) ARTIFACT_ID=$(echo "$ARTIFACTS" | jq '.artifacts[] | select(.name=="slither-reports-${{ github.sha }}") | .id') echo "Artifact ID: $ARTIFACT_ID" - + slither_artifact_url="https://github.com/${{ github.repository }}/actions/runs/${{ github.run_id }}/artifacts/$ARTIFACT_ID" - echo "slither_artifact_url=$slither_artifact_url" >> $GITHUB_OUTPUT + echo "slither_artifact_url=$slither_artifact_url" >> $GITHUB_OUTPUT - name: Create or update Slither comment in the PR uses: peter-evans/create-or-update-comment@71345be0265236311c031f5c7866368bd1eff043 # v4.0.0 @@ -518,7 +525,7 @@ jobs: body: | ## Static analysis results are available Hey @${{ github.event.push && github.event.push.pusher && github.event.push.pusher.username || github.actor }}, you can view Slither reports in the job summary [here](${{ steps.job-summary-url.outputs.job_summary_url }}) or download them as artifact [here](${{ steps.build-slither-artifact-url.outputs.slither_artifact_url }}). - + Please check them before merging and make sure you have addressed all issues. edit-mode: replace diff --git a/.github/workflows/solidity-tracability.yml b/.github/workflows/solidity-tracability.yml index 9c61d4adbc2..2110dd7dd64 100644 --- a/.github/workflows/solidity-tracability.yml +++ b/.github/workflows/solidity-tracability.yml @@ -91,23 +91,38 @@ jobs: - name: Setup NodeJS uses: ./.github/actions/setup-nodejs + - name: Checkout .Github repository + uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633 # v4.1.2 + with: + repository: smartcontractkit/.github + ref: b6e37806737eef87e8c9137ceeb23ef0bff8b1db # validate-solidity-artifacts@0.1.0 + path: ./dot_github + + # we need to set the top level directory for the jira-tracing action manually + # because now we are working with two repositories and automatic detection would + # select the repository with jira-tracing and not the chainlink repository + - name: Setup git top level directory + id: find-git-top-level-dir + run: echo "top_level_dir=$(pwd)" >> $GITHUB_OUTPUT + - name: Setup Jira - working-directory: ./.github/scripts/jira - run: pnpm i + working-directory: ./dot_github + run: pnpm install --filter jira-tracing # Because of our earlier checks, we know that both the source and changeset files have changed - name: Enforce Traceability - working-directory: ./.github/scripts/jira + working-directory: ./dot_github run: | echo "COMMIT_MESSAGE=$(git log -1 --pretty=format:'%s')" >> $GITHUB_ENV - pnpm issue:enforce + pnpm --filter jira-tracing issue:enforce env: CHANGESET_FILES: ${{ needs.files-changed.outputs.changesets_files }} + GIT_TOP_LEVEL_DIR: ${{ steps.find-git-top-level-dir.outputs.top_level_dir }} PR_TITLE: ${{ github.event.pull_request.title }} BRANCH_NAME: ${{ github.event.pull_request.head.ref }} - JIRA_HOST: ${{ vars.JIRA_HOST }} + JIRA_HOST: ${{ vars.JIRA_HOST }} JIRA_USERNAME: ${{ secrets.JIRA_USERNAME }} JIRA_API_TOKEN: ${{ secrets.JIRA_API_TOKEN }} @@ -119,7 +134,7 @@ jobs: commit_message: "[Bot] Update changeset file with jira issue" repo: ${{ github.repository }} branch: ${{ github.head_ref }} - file_pattern: "contracts/.changeset/*" + file_pattern: "contracts/.changeset/*" env: GITHUB_TOKEN: ${{ steps.get-gh-token.outputs.access-token }} diff --git a/.github/workflows/solidity-wrappers.yml b/.github/workflows/solidity-wrappers.yml index bbd7ac0c670..5f593f3a334 100644 --- a/.github/workflows/solidity-wrappers.yml +++ b/.github/workflows/solidity-wrappers.yml @@ -2,7 +2,7 @@ name: Solidity Wrappers # This is its own workflow file rather than being merged into "solidity.yml" to avoid over complicating the conditionals # used for job execution. The jobs in "solidity.yml" are configured around push events, whereas # we only want to generate gethwrappers during pull requests. -on: +on: pull_request: types: - opened @@ -31,7 +31,7 @@ jobs: # On a pull request event, make updates to gethwrappers if there are changes. update-wrappers: needs: [changes] - if: needs.changes.outputs.changes == 'true' + if: needs.changes.outputs.changes == 'true' name: Update Wrappers permissions: actions: read @@ -65,7 +65,7 @@ jobs: - name: Commit any wrapper changes uses: planetscale/ghcommit-action@21a8cda29f55e5cc2cdae0cdbdd08e38dd148c25 # v0.1.37 with: - commit_message: "Update gethwrappers" + commit_message: "Update gethwrappers" repo: ${{ github.repository }} branch: ${{ github.head_ref }} file_pattern: "core/gethwrappers/**/generated/*.go core/gethwrappers/**/generated-wrapper-dependency-versions-do-not-edit.txt" @@ -80,5 +80,5 @@ jobs: org-id: ${{ secrets.GRAFANA_INTERNAL_TENANT_ID }} basic-auth: ${{ secrets.GRAFANA_INTERNAL_BASIC_AUTH }} hostname: ${{ secrets.GRAFANA_INTERNAL_HOST }} - this-job-name: Update Wrappers + this-job-name: Update Wrappers continue-on-error: true diff --git a/contracts/.changeset/quiet-moles-retire.md b/contracts/.changeset/quiet-moles-retire.md new file mode 100644 index 00000000000..f40eda6a0e5 --- /dev/null +++ b/contracts/.changeset/quiet-moles-retire.md @@ -0,0 +1,8 @@ +--- +'@chainlink/contracts': patch +--- + +Use reusable workflow for Solidity Artifacts pipeline, move some actions to chainlink-github-actions repository + + +TT-1693 \ No newline at end of file diff --git a/contracts/.tool-versions b/contracts/.tool-versions new file mode 100644 index 00000000000..dfe63496b38 --- /dev/null +++ b/contracts/.tool-versions @@ -0,0 +1 @@ +nodejs 20.13.1 diff --git a/contracts/scripts/ci/generate_slither_report.sh b/contracts/scripts/ci/generate_slither_report.sh deleted file mode 100755 index bc876ae1182..00000000000 --- a/contracts/scripts/ci/generate_slither_report.sh +++ /dev/null @@ -1,85 +0,0 @@ -#!/usr/bin/env bash - -set -euo pipefail - -function check_chainlink_dir() { - local param_dir="chainlink" - current_dir=$(pwd) - - current_base=$(basename "$current_dir") - - if [[ "$current_base" != "$param_dir" ]]; then - >&2 echo "The script must be run from the root of $param_dir directory" - exit 1 - fi -} - -check_chainlink_dir - -if [ "$#" -lt 5 ]; then - >&2 echo "Generates Markdown Slither reports and saves them to a target directory." - >&2 echo "Usage: $0 [slither extra params]" - exit 1 -fi - -REPO_URL=$1 -CONFIG_FILE=$2 -SOURCE_DIR=$3 -FILES=${4// /} # Remove any spaces from the list of files -TARGET_DIR=$5 -SLITHER_EXTRA_PARAMS=$6 - -run_slither() { - local FILE=$1 - local TARGET_DIR=$2 - - if [[ ! -f "$FILE" ]]; then - >&2 echo "::error:File not found: $FILE" - return 1 - fi - - set +e - source ./contracts/scripts/ci/select_solc_version.sh "$FILE" - if [[ $? -ne 0 ]]; then - >&2 echo "::error::Failed to select Solc version for $FILE" - return 1 - fi - - SLITHER_OUTPUT_FILE="$TARGET_DIR/$(basename "${FILE%.sol}")-slither-report.md" - if ! output=$(slither --config-file "$CONFIG_FILE" "$FILE" --checklist --markdown-root "$REPO_URL" --fail-none $SLITHER_EXTRA_PARAMS); then - >&2 echo "::warning::Slither failed for $FILE" - return 0 - fi - set -e - output=$(echo "$output" | sed '/\*\*THIS CHECKLIST IS NOT COMPLETE\*\*. Use `--show-ignored-findings` to show all the results./d' | sed '/Summary/d') - - echo "# Summary for $FILE" > "$SLITHER_OUTPUT_FILE" - echo "$output" >> "$SLITHER_OUTPUT_FILE" - - if [[ -z "$output" ]]; then - echo "No issues found." >> "$SLITHER_OUTPUT_FILE" - fi -} - -process_files() { - local SOURCE_DIR=$1 - local TARGET_DIR=$2 - local FILES=(${3//,/ }) # Split the comma-separated list into an array - - mkdir -p "$TARGET_DIR" - - for FILE in "${FILES[@]}"; do - FILE=${FILE//\"/} - run_slither "$SOURCE_DIR/$FILE" "$TARGET_DIR" - done -} - -set +e -process_files "$SOURCE_DIR" "$TARGET_DIR" "${FILES[@]}" - -if [[ $? -ne 0 ]]; then - >&2 echo "::warning::Failed to generate some Slither reports" - exit 0 -fi - -echo "Slither reports saved in $TARGET_DIR folder" diff --git a/contracts/scripts/ci/generate_uml.sh b/contracts/scripts/ci/generate_uml.sh deleted file mode 100755 index c71d0a1ac7d..00000000000 --- a/contracts/scripts/ci/generate_uml.sh +++ /dev/null @@ -1,121 +0,0 @@ -#!/usr/bin/env bash - -set -euo pipefail - -function check_chainlink_dir() { - local param_dir="chainlink" - current_dir=$(pwd) - - current_base=$(basename "$current_dir") - - if [[ "$current_base" != "$param_dir" ]]; then - >&2 echo "The script must be run from the root of $param_dir directory" - exit 1 - fi -} - -check_chainlink_dir - -if [ "$#" -lt 2 ]; then - >&2 echo "Generates UML diagrams for all contracts in a directory after flattening them to avoid call stack overflows." - >&2 echo "Usage: $0 [comma-separated list of files]" - exit 1 -fi - -SOURCE_DIR="$1" -TARGET_DIR="$2" -FILES=${3// /} # Remove any spaces from the list of files -FAILED_FILES=() - -flatten_and_generate_uml() { - local FILE=$1 - local TARGET_DIR=$2 - - set +e - FLATTENED_FILE="$TARGET_DIR/flattened_$(basename "$FILE")" - echo "::debug::Flattening $FILE to $FLATTENED_FILE" - forge flatten "$FILE" -o "$FLATTENED_FILE" --root contracts - if [[ $? -ne 0 ]]; then - >&2 echo "::error::Failed to flatten $FILE" - FAILED_FILES+=("$FILE") - return - fi - - OUTPUT_FILE=${FLATTENED_FILE//"flattened_"/""} - OUTPUT_FILE_SVG="${OUTPUT_FILE%.sol}.svg" - echo "::debug::Generating SVG UML for $FLATTENED_FILE to $OUTPUT_FILE_SVG" - sol2uml "$FLATTENED_FILE" -o "$OUTPUT_FILE_SVG" - if [[ $? -ne 0 ]]; then - >&2 echo "::error::Failed to generate UML diagram in SVG format for $FILE" - FAILED_FILES+=("$FILE") - rm "$FLATTENED_FILE" - return - fi - OUTPUT_FILE_DOT="${OUTPUT_FILE%.sol}.dot" - echo "::debug::Generating DOT UML for $FLATTENED_FILE to $OUTPUT_FILE_DOT" - sol2uml "$FLATTENED_FILE" -o "$OUTPUT_FILE_DOT" -f dot - if [[ $? -ne 0 ]]; then - >&2 echo "::error::Failed to generate UML diagram in DOT format for $FILE" - FAILED_FILES+=("$FILE") - rm "$FLATTENED_FILE" - return - fi - - rm "$FLATTENED_FILE" - set -e -} - -process_all_files_in_directory() { - local SOURCE_DIR=$1 - local TARGET_DIR=$2 - - mkdir -p "$TARGET_DIR" - - find "$SOURCE_DIR" -type f -name '*.sol' | while read -r ITEM; do - flatten_and_generate_uml "$ITEM" "$TARGET_DIR" - done -} - -process_selected_files() { - local SOURCE_DIR=$1 - local TARGET_DIR=$2 - local FILES=(${3//,/ }) # Split the comma-separated list into an array - - mkdir -p "$TARGET_DIR" - - for FILE in "${FILES[@]}"; do - FILE=${FILE//\"/} - MATCHES=($(find "$SOURCE_DIR" -type f -path "*/$FILE")) - - if [[ ${#MATCHES[@]} -gt 1 ]]; then - >&2 echo "::error:: Multiple matches found for $FILE:" - for MATCH in "${MATCHES[@]}"; do - >&2 echo " $MATCH" - done - exit 1 - elif [[ ${#MATCHES[@]} -eq 1 ]]; then - >&2 echo "::debug::File found: ${MATCHES[0]}" - flatten_and_generate_uml "${MATCHES[0]}" "$TARGET_DIR" - else - >&2 echo "::error::File $FILE does not exist within the source directory $SOURCE_DIR." - exit 1 - fi - done -} - -# if FILES is empty, process all files in the directory, otherwise process only the selected files -if [[ -z "$FILES" ]]; then - process_all_files_in_directory "$SOURCE_DIR" "$TARGET_DIR" -else - process_selected_files "$SOURCE_DIR" "$TARGET_DIR" "$FILES" -fi - -if [[ "${#FAILED_FILES[@]}" -gt 0 ]]; then - >&2 echo ":error::Failed to generate UML diagrams for ${#FAILED_FILES[@]} files:" - for FILE in "${FAILED_FILES[@]}"; do - >&2 echo " $FILE" - echo "$FILE" >> "$TARGET_DIR/uml_generation_failures.txt" - done -fi - -echo "UML diagrams saved in $TARGET_DIR folder" diff --git a/contracts/scripts/ci/modify_remappings.sh b/contracts/scripts/ci/modify_remappings.sh deleted file mode 100755 index e64ca369b0c..00000000000 --- a/contracts/scripts/ci/modify_remappings.sh +++ /dev/null @@ -1,30 +0,0 @@ -#!/usr/bin/env bash - -set -euo pipefail - -if [ "$#" -ne 2 ]; then - >&2 echo "Usage: $0 " - exit 1 -fi - -DIR_PREFIX=$1 -REMAPPINGS_FILE=$2 - -if [ ! -f "$REMAPPINGS_FILE" ]; then - >&2 echo "::error:: Remappings file '$REMAPPINGS_FILE' not found." - exit 1 -fi - -OUTPUT_FILE="remappings_modified.txt" - -while IFS= read -r line; do - if [[ "$line" =~ ^[^=]+= ]]; then - REMAPPED_PATH="${line#*=}" - MODIFIED_LINE="${line%=*}=${DIR_PREFIX}/${REMAPPED_PATH}" - echo "$MODIFIED_LINE" >> "$OUTPUT_FILE" - else - echo "$line" >> "$OUTPUT_FILE" - fi -done < "$REMAPPINGS_FILE" - -echo "Modified remappings have been saved to: $OUTPUT_FILE" diff --git a/contracts/scripts/ci/select_solc_version.sh b/contracts/scripts/ci/select_solc_version.sh deleted file mode 100755 index cfa13de0f60..00000000000 --- a/contracts/scripts/ci/select_solc_version.sh +++ /dev/null @@ -1,118 +0,0 @@ -#!/usr/bin/env bash - -set -euo pipefail - -function check_chainlink_dir() { - local param_dir="chainlink" - current_dir=$(pwd) - - current_base=$(basename "$current_dir") - - if [[ "$current_base" != "$param_dir" ]]; then - >&2 echo "::error::The script must be run from the root of $param_dir directory" - exit 1 - fi -} - -check_chainlink_dir - -FILE="$1" - -if [[ "$#" -lt 1 ]]; then - echo "Detects the Solidity version of a file and selects the appropriate Solc version." - echo "If the version is not installed, it will be installed and selected." - echo "Will prefer to use the version from Foundry profile if it satisfies the version in the file." - echo "Usage: $0 " - exit 1 -fi - -if [[ -z "$FILE" ]]; then - >&2 echo "::error:: File not provided." - exit 1 -fi - -extract_product() { - local path=$1 - - echo "$path" | awk -F'src/[^/]*/' '{print $2}' | cut -d'/' -f1 -} - -extract_pragma() { - local FILE=$1 - - if [[ -f "$FILE" ]]; then - SOLCVER="$(grep --no-filename '^pragma solidity' "$FILE" | cut -d' ' -f3)" - else - >&2 echo ":error::$FILE is not a file or it could not be found. Exiting." - return 1 - fi - SOLCVER="$(echo "$SOLCVER" | sed 's/[^0-9\.^]//g')" - >&2 echo "::debug::Detected Solidity version in pragma: $SOLCVER" - echo "$SOLCVER" -} - -echo "Detecting Solc version for $FILE" - -# Set FOUNDRY_PROFILE to the product name only if it is set; otherwise either already set value will be used or it will be empty -PRODUCT=$(extract_product "$FILE") -if [ -n "$PRODUCT" ]; then - FOUNDRY_PROFILE="$PRODUCT" -fi -SOLC_IN_PROFILE=$(forge config --json --root contracts | jq ".solc") -SOLC_IN_PROFILE=$(echo "$SOLC_IN_PROFILE" | tr -d "'\"") -echo "::debug::Detected Solidity version in profile: $SOLC_IN_PROFILE" - -set +e -SOLCVER=$(extract_pragma "$FILE") - -if [[ $? -ne 0 ]]; then - >&2 echo "::error:: Failed to extract the Solidity version from $FILE." - return 1 -fi - -set -e - -SOLCVER=$(echo "$SOLCVER" | tr -d "'\"") - -if [[ "$SOLC_IN_PROFILE" != "null" && -n "$SOLCVER" ]]; then - set +e - COMPAT_SOLC_VERSION=$(npx semver "$SOLC_IN_PROFILE" -r "$SOLCVER") - exit_code=$? - set -e - if [[ $exit_code -eq 0 && -n "$COMPAT_SOLC_VERSION" ]]; then - echo "::debug::Version $SOLC_IN_PROFILE satisfies the constraint $SOLCVER" - SOLC_TO_USE="$SOLC_IN_PROFILE" - else - echo "::debug::Version $SOLC_IN_PROFILE does not satisfy the constraint $SOLCVER" - SOLC_TO_USE="$SOLCVER" - fi - elif [[ "$SOLC_IN_PROFILE" != "null" && -z "$SOLCVER" ]]; then - >&2 echo "::error::No version found in the Solidity file. Exiting" - return 1 - elif [[ "$SOLC_IN_PROFILE" == "null" && -n "$SOLCVER" ]]; then - echo "::debug::Using the version from the file: $SOLCVER" - SOLC_TO_USE="$SOLCVER" - else - >&2 echo "::error::No version found in the profile or the Solidity file." - return 1 -fi - -echo "Will use $SOLC_TO_USE" -SOLC_TO_USE=$(echo "$SOLC_TO_USE" | tr -d "'\"") -SOLC_TO_USE="$(echo "$SOLC_TO_USE" | sed 's/[^0-9\.]//g')" - -INSTALLED_VERSIONS=$(solc-select versions) - -if echo "$INSTALLED_VERSIONS" | grep -q "$SOLC_TO_USE"; then - echo "::debug::Version $SOLCVER is already installed." - if echo "$INSTALLED_VERSIONS" | grep "$SOLC_TO_USE" | grep -q "current"; then - echo "::debug::Version $SOLCVER is already selected." - else - echo "::debug::Selecting $SOLC_TO_USE" - solc-select use "$SOLC_TO_USE" - fi -else - echo "::debug::Version $SOLC_TO_USE is not installed." - solc-select install "$SOLC_TO_USE" - solc-select use "$SOLC_TO_USE" -fi From 818498a098c42e55f3703377f9dcfdb935b3ab38 Mon Sep 17 00:00:00 2001 From: Lukasz <120112546+lukaszcl@users.noreply.github.com> Date: Mon, 23 Sep 2024 15:24:01 +0200 Subject: [PATCH 10/14] TT-1459 Use CTF actions from .github repo (#14522) * Update GHA in client-compatibility-tests workflow * bump ctf * Auto fill the version based on the docker image * bump ctf --- .../workflows/client-compatibility-tests.yml | 49 +++++++++---------- .../run-e2e-tests-reusable-workflow.yml | 6 +-- integration-tests/go.mod | 2 +- integration-tests/go.sum | 4 +- integration-tests/load/go.mod | 2 +- integration-tests/load/go.sum | 4 +- 6 files changed, 33 insertions(+), 34 deletions(-) diff --git a/.github/workflows/client-compatibility-tests.yml b/.github/workflows/client-compatibility-tests.yml index 761759193e6..c98cecf89d8 100644 --- a/.github/workflows/client-compatibility-tests.yml +++ b/.github/workflows/client-compatibility-tests.yml @@ -644,19 +644,10 @@ jobs: # comment_on_pr: false # theme: 'dark' - name: Run Tests - uses: smartcontractkit/chainlink-github-actions/chainlink-testing-framework/run-tests@d2f9642bcc24a73400568756f24b72c188ac7a9a # v2.3.31 + uses: smartcontractkit/.github/actions/ctf-run-tests@b6e37806737eef87e8c9137ceeb23ef0bff8b1db # ctf-run-tests@0.1.0 with: test_command_to_run: cd ./integration-tests && touch .root_dir && go test -timeout 30m -count=1 -json ${{ matrix.evm_node.run }} 2>&1 | tee /tmp/gotest.log | gotestloghelper -ci -singlepackage -hidepassingtests=false -hidepassinglogs test_download_vendor_packages_command: cd ./integration-tests && go mod download - test_config_chainlink_version: ${{ needs.select-versions.outputs.chainlink_image_version }} - test_config_selected_networks: ${{ env.SELECTED_NETWORKS}} - test_config_logging_run_id: ${{ github.run_id }} - test_config_test_log_collect: ${{ vars.TEST_LOG_COLLECT }} - test_config_logstream_log_targets: ${{ vars.LOGSTREAM_LOG_TARGETS }} - test_config_private_ethereum_network_execution_layer: ${{ matrix.evm_node.eth_implementation || 'geth' }} - test_config_private_ethereum_network_custom_docker_image: ${{ matrix.evm_node.docker_image }} - cl_repo: ${{ env.CHAINLINK_IMAGE }} - cl_image_tag: ${{ needs.select-versions.outputs.chainlink_version }} aws_registries: ${{ secrets.QA_AWS_ACCOUNT_NUMBER }} artifacts_name: ${{ env.TEST_LOG_NAME }} artifacts_location: | @@ -674,22 +665,30 @@ jobs: should_tidy: "false" go_coverage_src_dir: /var/tmp/go-coverage go_coverage_dest_dir: ${{ github.workspace }}/.covdata - DEFAULT_CHAINLINK_IMAGE: ${{ env.CHAINLINK_IMAGE }} - DEFAULT_LOKI_TENANT_ID: ${{ secrets.GRAFANA_INTERNAL_TENANT_ID }} - DEFAULT_LOKI_ENDPOINT: https://${{ secrets.GRAFANA_INTERNAL_HOST }}/loki/api/v1/push - DEFAULT_LOKI_BASIC_AUTH: ${{ secrets.GRAFANA_INTERNAL_BASIC_AUTH }} - DEFAULT_GRAFANA_BASE_URL: "http://localhost:8080/primary" - DEFAULT_GRAFANA_DASHBOARD_URL: "/d/ddf75041-1e39-42af-aa46-361fe4c36e9e/ci-e2e-tests-logs" - DEFAULT_GRAFANA_BEARER_TOKEN: ${{ secrets.GRAFANA_INTERNAL_URL_SHORTENER_TOKEN }} - DEFAULT_PYROSCOPE_SERVER_URL: ${{ secrets.QA_PYROSCOPE_INSTANCE }} - DEFAULT_PYROSCOPE_KEY: ${{ secrets.QA_PYROSCOPE_KEY }} - DEFAULT_PYROSCOPE_ENVIRONMENT: ci-client-compatability-${{ matrix.eth_client }}-testnet - DEFAULT_PYROSCOPE_ENABLED: "true" - - - name: Print failed test summary + env: + E2E_TEST_SELECTED_NETWORK: ${{ env.SELECTED_NETWORKS}} + E2E_TEST_CHAINLINK_IMAGE: ${{ env.CHAINLINK_IMAGE }} + E2E_TEST_CHAINLINK_VERSION: ${{ needs.select-versions.outputs.chainlink_image_version }} + E2E_TEST_LOKI_TENANT_ID: ${{ secrets.GRAFANA_INTERNAL_TENANT_ID }} + E2E_TEST_LOKI_ENDPOINT: https://${{ secrets.GRAFANA_INTERNAL_HOST }}/loki/api/v1/push + E2E_TEST_LOKI_BASIC_AUTH: ${{ secrets.GRAFANA_INTERNAL_BASIC_AUTH }} + E2E_TEST_GRAFANA_DASHBOARD_URL: "/d/ddf75041-1e39-42af-aa46-361fe4c36e9e/ci-e2e-tests-logs" + E2E_TEST_GRAFANA_BEARER_TOKEN: ${{ secrets.GRAFANA_INTERNAL_URL_SHORTENER_TOKEN }} + E2E_TEST_PYROSCOPE_SERVER_URL: ${{ secrets.QA_PYROSCOPE_INSTANCE }} + E2E_TEST_PYROSCOPE_KEY: ${{ secrets.QA_PYROSCOPE_KEY }} + E2E_TEST_PYROSCOPE_ENVIRONMENT: ci-client-compatability-${{ matrix.eth_client }}-testnet + E2E_TEST_PYROSCOPE_ENABLED: "true" + E2E_TEST_LOGGING_RUN_ID: ${{ github.run_id }} + E2E_TEST_LOG_COLLECT: ${{ vars.TEST_LOG_COLLECT }} + E2E_TEST_LOG_STREAM_LOG_TARGETS: ${{ vars.LOGSTREAM_LOG_TARGETS }} + E2E_TEST_PRIVATE_ETHEREUM_EXECUTION_LAYER: ${{ matrix.evm_node.eth_implementation || 'geth' }} + E2E_TEST_PRIVATE_ETHEREUM_ETHEREUM_VERSION: auto_fill # Auto fill the version based on the docker image + E2E_TEST_PRIVATE_ETHEREUM_CUSTOM_DOCKER_IMAGE: ${{ matrix.evm_node.docker_image }} + + - name: Show Grafana url in test summary if: always() - uses: smartcontractkit/chainlink-github-actions/chainlink-testing-framework/show-test-summary@75a9005952a9e905649cfb5a6971fd9429436acd # v2.3.25 - + uses: smartcontractkit/.github/actions/ctf-show-grafana-in-test-summary@b6e37806737eef87e8c9137ceeb23ef0bff8b1db # ctf-show-grafana-in-test-summary@0.1.0 + start-slack-thread: name: Start Slack Thread if: always() && needs.*.result != 'skipped' && needs.*.result != 'cancelled' && needs.should-run.outputs.should_run == 'true' && (needs.select-versions.outputs.evm_implementations != '' || github.event.inputs.base64TestList != '') diff --git a/.github/workflows/run-e2e-tests-reusable-workflow.yml b/.github/workflows/run-e2e-tests-reusable-workflow.yml index 8278828a38d..c8e3cde085e 100644 --- a/.github/workflows/run-e2e-tests-reusable-workflow.yml +++ b/.github/workflows/run-e2e-tests-reusable-workflow.yml @@ -661,7 +661,7 @@ jobs: - name: Show Grafana url in test summary if: always() - uses: smartcontractkit/.github/actions/ctf-show-grafana-in-test-summary@70fcaef0bf3a5a7d8aa681861d2f76e4188863d9 # ctf-show-grafana-in-test-summary@0.0.0 + uses: smartcontractkit/.github/actions/ctf-show-grafana-in-test-summary@b6e37806737eef87e8c9137ceeb23ef0bff8b1db # ctf-show-grafana-in-test-summary@0.1.0 # Run K8s tests using old remote runner @@ -764,7 +764,7 @@ jobs: - name: Run tests id: run_tests - uses: smartcontractkit/.github/actions/ctf-run-tests@b8731364b119e88983e94b0c4da87fc27ddb41b8 # ctf-run-tests@0.0.0 + uses: smartcontractkit/.github/actions/ctf-run-tests@b6e37806737eef87e8c9137ceeb23ef0bff8b1db # ctf-run-tests@0.1.0 env: DETACH_RUNNER: true RR_MEM: ${{ matrix.tests.remote_runner_memory }} @@ -827,7 +827,7 @@ jobs: - name: Show Grafana url in test summary if: always() - uses: smartcontractkit/.github/actions/ctf-show-grafana-in-test-summary@70fcaef0bf3a5a7d8aa681861d2f76e4188863d9 # ctf-show-grafana-in-test-summary@0.0.0 + uses: smartcontractkit/.github/actions/ctf-show-grafana-in-test-summary@b6e37806737eef87e8c9137ceeb23ef0bff8b1db # ctf-show-grafana-in-test-summary@0.1.0 after_tests: needs: [load-test-configurations, run-docker-tests, run-k8s-runner-tests] diff --git a/integration-tests/go.mod b/integration-tests/go.mod index 8387c80782d..dbfda153c5a 100644 --- a/integration-tests/go.mod +++ b/integration-tests/go.mod @@ -42,7 +42,7 @@ require ( github.com/smartcontractkit/chainlink-ccip v0.0.0-20240920150748-cf2125c094fe github.com/smartcontractkit/chainlink-common v0.2.3-0.20240919092417-53e784c2e420 github.com/smartcontractkit/chainlink-testing-framework/havoc v1.50.0 - github.com/smartcontractkit/chainlink-testing-framework/lib v1.50.7 + github.com/smartcontractkit/chainlink-testing-framework/lib v1.50.8 github.com/smartcontractkit/chainlink-testing-framework/lib/grafana v1.50.0 github.com/smartcontractkit/chainlink-testing-framework/seth v1.50.1 github.com/smartcontractkit/chainlink-testing-framework/wasp v1.50.0 diff --git a/integration-tests/go.sum b/integration-tests/go.sum index effab5e3da3..dace088e6dc 100644 --- a/integration-tests/go.sum +++ b/integration-tests/go.sum @@ -1439,8 +1439,8 @@ github.com/smartcontractkit/chainlink-starknet/relayer v0.0.1-beta-test.0.202409 github.com/smartcontractkit/chainlink-starknet/relayer v0.0.1-beta-test.0.20240911194142-506bc469d8ae/go.mod h1:ec/a20UZ7YRK4oxJcnTBFzp1+DBcJcwqEaerUMsktMs= github.com/smartcontractkit/chainlink-testing-framework/havoc v1.50.0 h1:mgjBQIEy+3V3G6K8e+6by3xndgsXdYYsdy+7kzQZwSk= github.com/smartcontractkit/chainlink-testing-framework/havoc v1.50.0/go.mod h1:pdIxrooP5CFGmC0p5NTOBiZAFtMw+5pTT4de5GY3ywA= -github.com/smartcontractkit/chainlink-testing-framework/lib v1.50.7 h1:IzDNN3YvQL0yAFLj7fDJqGUDR76ewGhVJx5RiovKDI4= -github.com/smartcontractkit/chainlink-testing-framework/lib v1.50.7/go.mod h1:7R5wGWWJi0dr5Y5cXbLQ4vSeIj0ElvhBaymcfvqqUmo= +github.com/smartcontractkit/chainlink-testing-framework/lib v1.50.8 h1:AdFXA2XxiU5uqLS4ZxA7v+RCz8mhqz2aCxgzbpnja98= +github.com/smartcontractkit/chainlink-testing-framework/lib v1.50.8/go.mod h1:7R5wGWWJi0dr5Y5cXbLQ4vSeIj0ElvhBaymcfvqqUmo= github.com/smartcontractkit/chainlink-testing-framework/lib/grafana v1.50.0 h1:VIxK8u0Jd0Q/VuhmsNm6Bls6Tb31H/sA3A/rbc5hnhg= github.com/smartcontractkit/chainlink-testing-framework/lib/grafana v1.50.0/go.mod h1:lyAu+oMXdNUzEDScj2DXB2IueY+SDXPPfyl/kb63tMM= github.com/smartcontractkit/chainlink-testing-framework/seth v1.50.1 h1:2OxnPfvjC+zs0ZokSsRTRnJrEGJ4NVJwZgfroS1lPHs= diff --git a/integration-tests/load/go.mod b/integration-tests/load/go.mod index 46824e50fbb..9717c843fde 100644 --- a/integration-tests/load/go.mod +++ b/integration-tests/load/go.mod @@ -16,7 +16,7 @@ require ( github.com/rs/zerolog v1.33.0 github.com/slack-go/slack v0.12.2 github.com/smartcontractkit/chainlink-common v0.2.3-0.20240919092417-53e784c2e420 - github.com/smartcontractkit/chainlink-testing-framework/lib v1.50.7 + github.com/smartcontractkit/chainlink-testing-framework/lib v1.50.8 github.com/smartcontractkit/chainlink-testing-framework/seth v1.50.1 github.com/smartcontractkit/chainlink-testing-framework/wasp v1.50.0 github.com/smartcontractkit/chainlink/integration-tests v0.0.0-20240214231432-4ad5eb95178c diff --git a/integration-tests/load/go.sum b/integration-tests/load/go.sum index eb4f92d14eb..8f671ac8a85 100644 --- a/integration-tests/load/go.sum +++ b/integration-tests/load/go.sum @@ -1413,8 +1413,8 @@ github.com/smartcontractkit/chainlink-starknet/relayer v0.0.1-beta-test.0.202409 github.com/smartcontractkit/chainlink-starknet/relayer v0.0.1-beta-test.0.20240911194142-506bc469d8ae/go.mod h1:ec/a20UZ7YRK4oxJcnTBFzp1+DBcJcwqEaerUMsktMs= github.com/smartcontractkit/chainlink-testing-framework/havoc v1.50.0 h1:mgjBQIEy+3V3G6K8e+6by3xndgsXdYYsdy+7kzQZwSk= github.com/smartcontractkit/chainlink-testing-framework/havoc v1.50.0/go.mod h1:pdIxrooP5CFGmC0p5NTOBiZAFtMw+5pTT4de5GY3ywA= -github.com/smartcontractkit/chainlink-testing-framework/lib v1.50.7 h1:IzDNN3YvQL0yAFLj7fDJqGUDR76ewGhVJx5RiovKDI4= -github.com/smartcontractkit/chainlink-testing-framework/lib v1.50.7/go.mod h1:7R5wGWWJi0dr5Y5cXbLQ4vSeIj0ElvhBaymcfvqqUmo= +github.com/smartcontractkit/chainlink-testing-framework/lib v1.50.8 h1:AdFXA2XxiU5uqLS4ZxA7v+RCz8mhqz2aCxgzbpnja98= +github.com/smartcontractkit/chainlink-testing-framework/lib v1.50.8/go.mod h1:7R5wGWWJi0dr5Y5cXbLQ4vSeIj0ElvhBaymcfvqqUmo= github.com/smartcontractkit/chainlink-testing-framework/lib/grafana v1.50.0 h1:VIxK8u0Jd0Q/VuhmsNm6Bls6Tb31H/sA3A/rbc5hnhg= github.com/smartcontractkit/chainlink-testing-framework/lib/grafana v1.50.0/go.mod h1:lyAu+oMXdNUzEDScj2DXB2IueY+SDXPPfyl/kb63tMM= github.com/smartcontractkit/chainlink-testing-framework/seth v1.50.1 h1:2OxnPfvjC+zs0ZokSsRTRnJrEGJ4NVJwZgfroS1lPHs= From 469e04f6938be27fae807830392dc21941034b7b Mon Sep 17 00:00:00 2001 From: Bartek Tofel Date: Mon, 23 Sep 2024 16:57:25 +0200 Subject: [PATCH 11/14] [TT-1747] fix core changeset (#14524) * test core changeset * fix fix version * remove test file --- .github/workflows/changeset.yml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/.github/workflows/changeset.yml b/.github/workflows/changeset.yml index 9d953bc73a0..ba06eb07e9e 100644 --- a/.github/workflows/changeset.yml +++ b/.github/workflows/changeset.yml @@ -93,17 +93,17 @@ jobs: uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633 # v4.1.2 with: repository: smartcontractkit/.github - ref: 228acc0a7f9f0092450a7673786462832bf3d19c + ref: b6e37806737eef87e8c9137ceeb23ef0bff8b1db # validate-solidity-artifacts@0.1.0 path: ./dot_github - name: Update Jira ticket for core id: jira if: ${{ steps.files-changed.outputs.core == 'true' || steps.files-changed.outputs.shared == 'true' }} shell: bash - working-directory: ./dot_github/libs/jira-tracing + working-directory: ./dot_github run: | echo "COMMIT_MESSAGE=$(git log -1 --pretty=format:'%s')" >> $GITHUB_ENV - pnpm install && pnpm issue:update + pnpm install --filter jira-tracing && pnpm --filter jira-tracing issue:update env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} JIRA_HOST: ${{ vars.JIRA_HOST }} From 2b1e8ad51b98aa41eca78758d2041ffcd7fba94a Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Friedemann=20F=C3=BCrst?= <59653747+friedemannf@users.noreply.github.com> Date: Mon, 23 Sep 2024 19:39:16 +0200 Subject: [PATCH 12/14] Change Polygon zkEVM to use FeeHistory estimator (#14161) * Change Polygon zkEVM to use SuggestedPriceEstimator (SHIP-2885) * Set PriceMin to 1mwei for Polygon zkEVM * Remove Polygon zkEVM Goerli * Enable FeeHistory estimator for Polygon zkEVM * Update PriceMin * apply suggestions * Set CacheTimeout to 2 seconds * Revert back to 5s cachetimeout * Change timeout to 4 seconds --------- Co-authored-by: joaoluisam --- .changeset/tender-lemons-obey.md | 5 + .../toml/defaults/Polygon_Zkevm_Cardona.toml | 10 +- .../toml/defaults/Polygon_Zkevm_Goerli.toml | 24 ---- .../toml/defaults/Polygon_Zkevm_Mainnet.toml | 10 +- docs/CONFIG.md | 123 ++---------------- 5 files changed, 27 insertions(+), 145 deletions(-) create mode 100644 .changeset/tender-lemons-obey.md delete mode 100644 core/chains/evm/config/toml/defaults/Polygon_Zkevm_Goerli.toml diff --git a/.changeset/tender-lemons-obey.md b/.changeset/tender-lemons-obey.md new file mode 100644 index 00000000000..2d6cb774b0c --- /dev/null +++ b/.changeset/tender-lemons-obey.md @@ -0,0 +1,5 @@ +--- +"chainlink": patch +--- + +Enable FeeHistory estimator for Polygon zkEVM #nops diff --git a/core/chains/evm/config/toml/defaults/Polygon_Zkevm_Cardona.toml b/core/chains/evm/config/toml/defaults/Polygon_Zkevm_Cardona.toml index cd91465dae6..46ce80e29fc 100644 --- a/core/chains/evm/config/toml/defaults/Polygon_Zkevm_Cardona.toml +++ b/core/chains/evm/config/toml/defaults/Polygon_Zkevm_Cardona.toml @@ -13,12 +13,14 @@ ContractConfirmations = 1 ResendAfterThreshold = '3m' [GasEstimator] -PriceMin = '1 mwei' +Mode = 'FeeHistory' +# The FeeHistory estimator does not enforce PriceMin, setting it to 0 to not place any limits on the price +PriceMin = '0' BumpPercent = 40 -BumpMin = '20 mwei' -[GasEstimator.BlockHistory] -BlockHistorySize = 12 +[GasEstimator.FeeHistory] +# Refresh the suggested price every 4 seconds, to stay slightly below their polling rate of 5s +CacheTimeout = '4s' [HeadTracker] HistoryDepth = 2000 diff --git a/core/chains/evm/config/toml/defaults/Polygon_Zkevm_Goerli.toml b/core/chains/evm/config/toml/defaults/Polygon_Zkevm_Goerli.toml deleted file mode 100644 index 6a9b47190fd..00000000000 --- a/core/chains/evm/config/toml/defaults/Polygon_Zkevm_Goerli.toml +++ /dev/null @@ -1,24 +0,0 @@ -ChainID = '1442' -ChainType = 'zkevm' -FinalityDepth = 500 -NoNewHeadsThreshold = '12m' -MinIncomingConfirmations = 1 -LogPollInterval = '30s' -RPCDefaultBatchSize = 100 - -[OCR] -ContractConfirmations = 1 - -[Transactions] -ResendAfterThreshold = '3m' - -[GasEstimator] -PriceMin = '50 mwei' -BumpPercent = 40 -BumpMin = '20 mwei' - -[GasEstimator.BlockHistory] -BlockHistorySize = 12 - -[HeadTracker] -HistoryDepth = 2000 diff --git a/core/chains/evm/config/toml/defaults/Polygon_Zkevm_Mainnet.toml b/core/chains/evm/config/toml/defaults/Polygon_Zkevm_Mainnet.toml index 79e0cb0fce5..2fef7874d17 100644 --- a/core/chains/evm/config/toml/defaults/Polygon_Zkevm_Mainnet.toml +++ b/core/chains/evm/config/toml/defaults/Polygon_Zkevm_Mainnet.toml @@ -14,12 +14,14 @@ ContractConfirmations = 1 ResendAfterThreshold = '3m' [GasEstimator] -PriceMin = '100 mwei' +Mode = 'FeeHistory' +# The FeeHistory estimator does not enforce PriceMin, setting it to 0 to not place any limits on the price +PriceMin = '0' BumpPercent = 40 -BumpMin = '100 mwei' -[GasEstimator.BlockHistory] -BlockHistorySize = 12 +[GasEstimator.FeeHistory] +# Refresh the suggested price every 4 seconds, to stay slightly below their polling rate of 5s +CacheTimeout = '4s' [HeadTracker] # Polygon suffers from a tremendous number of re-orgs, we need to set this to something very large to be conservative enough diff --git a/docs/CONFIG.md b/docs/CONFIG.md index 1512b33cad4..1a4a2a4ce20 100644 --- a/docs/CONFIG.md +++ b/docs/CONFIG.md @@ -4883,16 +4883,16 @@ Enabled = false Enabled = true [GasEstimator] -Mode = 'BlockHistory' +Mode = 'FeeHistory' PriceDefault = '20 gwei' PriceMax = '115792089237316195423570985008687907853269984665.640564039457584007913129639935 tether' -PriceMin = '100 mwei' +PriceMin = '0' LimitDefault = 500000 LimitMax = 500000 LimitMultiplier = '1' LimitTransfer = 21000 EstimateLimit = false -BumpMin = '100 mwei' +BumpMin = '5 gwei' BumpPercent = 40 BumpThreshold = 3 EIP1559DynamicFees = false @@ -4902,13 +4902,13 @@ TipCapMin = '1 wei' [GasEstimator.BlockHistory] BatchSize = 25 -BlockHistorySize = 12 +BlockHistorySize = 8 CheckInclusionBlocks = 12 CheckInclusionPercentile = 90 TransactionPercentile = 60 [GasEstimator.FeeHistory] -CacheTimeout = '10s' +CacheTimeout = '4s' [HeadTracker] HistoryDepth = 2000 @@ -5255,109 +5255,6 @@ GasLimitDefault = 400000

-
Polygon Zkevm Goerli (1442)

- -```toml -AutoCreateKey = true -BlockBackfillDepth = 10 -BlockBackfillSkip = false -ChainType = 'zkevm' -FinalityDepth = 500 -FinalityTagEnabled = false -LogBackfillBatchSize = 1000 -LogPollInterval = '30s' -LogKeepBlocksDepth = 100000 -LogPrunePageSize = 0 -BackupLogPollerBlockDelay = 100 -MinIncomingConfirmations = 1 -MinContractPayment = '0.00001 link' -NonceAutoSync = true -NoNewHeadsThreshold = '12m0s' -LogBroadcasterEnabled = true -RPCDefaultBatchSize = 100 -RPCBlockQueryDelay = 1 -FinalizedBlockOffset = 0 -NoNewFinalizedHeadsThreshold = '0s' - -[Transactions] -ForwardersEnabled = false -MaxInFlight = 16 -MaxQueued = 250 -ReaperInterval = '1h0m0s' -ReaperThreshold = '168h0m0s' -ResendAfterThreshold = '3m0s' - -[Transactions.AutoPurge] -Enabled = false - -[BalanceMonitor] -Enabled = true - -[GasEstimator] -Mode = 'BlockHistory' -PriceDefault = '20 gwei' -PriceMax = '115792089237316195423570985008687907853269984665.640564039457584007913129639935 tether' -PriceMin = '50 mwei' -LimitDefault = 500000 -LimitMax = 500000 -LimitMultiplier = '1' -LimitTransfer = 21000 -EstimateLimit = false -BumpMin = '20 mwei' -BumpPercent = 40 -BumpThreshold = 3 -EIP1559DynamicFees = false -FeeCapDefault = '100 gwei' -TipCapDefault = '1 wei' -TipCapMin = '1 wei' - -[GasEstimator.BlockHistory] -BatchSize = 25 -BlockHistorySize = 12 -CheckInclusionBlocks = 12 -CheckInclusionPercentile = 90 -TransactionPercentile = 60 - -[GasEstimator.FeeHistory] -CacheTimeout = '10s' - -[HeadTracker] -HistoryDepth = 2000 -MaxBufferSize = 3 -SamplingInterval = '1s' -MaxAllowedFinalityDepth = 10000 -FinalityTagBypass = true - -[NodePool] -PollFailureThreshold = 5 -PollInterval = '10s' -SelectionMode = 'HighestHead' -SyncThreshold = 5 -LeaseDuration = '0s' -NodeIsSyncingEnabled = false -FinalizedBlockPollInterval = '5s' -EnforceRepeatableRead = false -DeathDeclarationDelay = '10s' -NewHeadsPollInterval = '0s' - -[OCR] -ContractConfirmations = 1 -ContractTransmitterTransmitTimeout = '10s' -DatabaseTimeout = '10s' -DeltaCOverride = '168h0m0s' -DeltaCJitterOverride = '1h0m0s' -ObservationGracePeriod = '1s' - -[OCR2] -[OCR2.Automation] -GasLimit = 5400000 - -[Workflow] -GasLimitDefault = 400000 -``` - -

-
Kroma Sepolia (2358)

```toml @@ -5500,16 +5397,16 @@ Enabled = false Enabled = true [GasEstimator] -Mode = 'BlockHistory' +Mode = 'FeeHistory' PriceDefault = '20 gwei' PriceMax = '115792089237316195423570985008687907853269984665.640564039457584007913129639935 tether' -PriceMin = '1 mwei' +PriceMin = '0' LimitDefault = 500000 LimitMax = 500000 LimitMultiplier = '1' LimitTransfer = 21000 EstimateLimit = false -BumpMin = '20 mwei' +BumpMin = '5 gwei' BumpPercent = 40 BumpThreshold = 3 EIP1559DynamicFees = false @@ -5519,13 +5416,13 @@ TipCapMin = '1 wei' [GasEstimator.BlockHistory] BatchSize = 25 -BlockHistorySize = 12 +BlockHistorySize = 8 CheckInclusionBlocks = 12 CheckInclusionPercentile = 90 TransactionPercentile = 60 [GasEstimator.FeeHistory] -CacheTimeout = '10s' +CacheTimeout = '4s' [HeadTracker] HistoryDepth = 2000 From c7a16eb9aa531edb9870c8720451817ba300ef08 Mon Sep 17 00:00:00 2001 From: Oliver Townsend <133903322+ogtownsend@users.noreply.github.com> Date: Mon, 23 Sep 2024 10:59:36 -0700 Subject: [PATCH 13/14] CCIP-3416 paginate token admin registry get all tokens call (#14514) * paginate token admin registry get all tokens call * add nil check * restructure loop condition --- .../ccip/view/v1_5/tokenadminregistry.go | 28 ++++++++++++++++++- 1 file changed, 27 insertions(+), 1 deletion(-) diff --git a/integration-tests/deployment/ccip/view/v1_5/tokenadminregistry.go b/integration-tests/deployment/ccip/view/v1_5/tokenadminregistry.go index 1e704efae72..ebfff8c2feb 100644 --- a/integration-tests/deployment/ccip/view/v1_5/tokenadminregistry.go +++ b/integration-tests/deployment/ccip/view/v1_5/tokenadminregistry.go @@ -9,13 +9,20 @@ import ( "github.com/smartcontractkit/chainlink/v2/core/gethwrappers/ccip/generated/token_admin_registry" ) +const ( + GetTokensPaginationSize = 20 +) + type TokenAdminRegistryView struct { types.ContractMetaData Tokens []common.Address `json:"tokens"` } func GenerateTokenAdminRegistryView(taContract *token_admin_registry.TokenAdminRegistry) (TokenAdminRegistryView, error) { - tokens, err := taContract.GetAllConfiguredTokens(nil, 0, 10) + if taContract == nil { + return TokenAdminRegistryView{}, fmt.Errorf("token admin registry contract is nil") + } + tokens, err := getAllConfiguredTokensPaginated(taContract) if err != nil { return TokenAdminRegistryView{}, fmt.Errorf("view error for token admin registry: %w", err) } @@ -28,3 +35,22 @@ func GenerateTokenAdminRegistryView(taContract *token_admin_registry.TokenAdminR Tokens: tokens, }, nil } + +// getAllConfiguredTokensPaginated fetches all configured tokens from the TokenAdminRegistry contract in paginated +// manner to avoid RPC timeouts since the list of configured tokens can grow to be very large over time. +func getAllConfiguredTokensPaginated(taContract *token_admin_registry.TokenAdminRegistry) ([]common.Address, error) { + startIndex := uint64(0) + allTokens := make([]common.Address, 0) + for { + fetchedTokens, err := taContract.GetAllConfiguredTokens(nil, startIndex, GetTokensPaginationSize) + if err != nil { + return nil, err + } + allTokens = append(allTokens, fetchedTokens...) + startIndex += GetTokensPaginationSize + if len(fetchedTokens) < GetTokensPaginationSize { + break + } + } + return allTokens, nil +} From f1b310bfbfb206a959288f65c92c6cc9550bdc3c Mon Sep 17 00:00:00 2001 From: Oliver Townsend <133903322+ogtownsend@users.noreply.github.com> Date: Mon, 23 Sep 2024 11:07:55 -0700 Subject: [PATCH 14/14] CCIP 3388 - add offramp generation (#14526) * add offramp getters * Move GetRemoteChainSelectors to router * run lint * remove unnecessary owner calls --- integration-tests/deployment/ccip/state.go | 11 ++++ .../deployment/ccip/view/chain.go | 2 + .../deployment/ccip/view/v1_2/router.go | 15 +++++ .../deployment/ccip/view/v1_6/feequoter.go | 17 +---- .../deployment/ccip/view/v1_6/offramp.go | 64 +++++++++++++++++++ .../deployment/ccip/view/v1_6/onramp.go | 3 +- 6 files changed, 96 insertions(+), 16 deletions(-) create mode 100644 integration-tests/deployment/ccip/view/v1_6/offramp.go diff --git a/integration-tests/deployment/ccip/state.go b/integration-tests/deployment/ccip/state.go index d6bd8894396..efb3416ec3f 100644 --- a/integration-tests/deployment/ccip/state.go +++ b/integration-tests/deployment/ccip/state.go @@ -116,6 +116,17 @@ func (c CCIPChainState) GenerateView() (view.ChainView, error) { } chainView.OnRamp[c.OnRamp.Address().Hex()] = onRampView } + + if c.OffRamp != nil && c.Router != nil { + offRampView, err := v1_6.GenerateOffRampView( + c.OffRamp, + c.Router, + ) + if err != nil { + return chainView, err + } + chainView.OffRamp[c.OffRamp.Address().Hex()] = offRampView + } return chainView, nil } diff --git a/integration-tests/deployment/ccip/view/chain.go b/integration-tests/deployment/ccip/view/chain.go index 468e5e7d487..8dbd6cabbba 100644 --- a/integration-tests/deployment/ccip/view/chain.go +++ b/integration-tests/deployment/ccip/view/chain.go @@ -13,6 +13,7 @@ type ChainView struct { Router map[string]v1_2.RouterView `json:"router,omitempty"` RMN map[string]v1_6.RMNRemoteView `json:"rmn,omitempty"` OnRamp map[string]v1_6.OnRampView `json:"onRamp,omitempty"` + OffRamp map[string]v1_6.OffRampView `json:"offRamp,omitempty"` } func NewChain() ChainView { @@ -22,6 +23,7 @@ func NewChain() ChainView { Router: make(map[string]v1_2.RouterView), RMN: make(map[string]v1_6.RMNRemoteView), OnRamp: make(map[string]v1_6.OnRampView), + OffRamp: make(map[string]v1_6.OffRampView), FeeQuoter: make(map[string]v1_6.FeeQuoterView), } } diff --git a/integration-tests/deployment/ccip/view/v1_2/router.go b/integration-tests/deployment/ccip/view/v1_2/router.go index 9d3711d0c28..14a96e86b79 100644 --- a/integration-tests/deployment/ccip/view/v1_2/router.go +++ b/integration-tests/deployment/ccip/view/v1_2/router.go @@ -54,3 +54,18 @@ func GenerateRouterView(r *router.Router) (RouterView, error) { OffRamps: offRamps, }, nil } + +// From the perspective of the OnRamp, the destination chains are the source chains for the OffRamp. +func GetRemoteChainSelectors(routerContract *router.Router) ([]uint64, error) { + remoteSelectors := make([]uint64, 0) + offRamps, err := routerContract.GetOffRamps(nil) + if err != nil { + return nil, fmt.Errorf("failed to get offRamps from router: %w", err) + } + // lanes are bidirectional, so we get the list of source chains to know which chains are supported as destinations as well + for _, offRamp := range offRamps { + remoteSelectors = append(remoteSelectors, offRamp.SourceChainSelector) + } + + return remoteSelectors, nil +} diff --git a/integration-tests/deployment/ccip/view/v1_6/feequoter.go b/integration-tests/deployment/ccip/view/v1_6/feequoter.go index 10e0b984f3f..a2694e96a48 100644 --- a/integration-tests/deployment/ccip/view/v1_6/feequoter.go +++ b/integration-tests/deployment/ccip/view/v1_6/feequoter.go @@ -6,6 +6,7 @@ import ( "github.com/ethereum/go-ethereum/common" "github.com/smartcontractkit/chainlink/integration-tests/deployment/ccip/view/types" + "github.com/smartcontractkit/chainlink/integration-tests/deployment/ccip/view/v1_2" "github.com/smartcontractkit/chainlink/v2/core/gethwrappers/ccip/generated/fee_quoter" router1_2 "github.com/smartcontractkit/chainlink/v2/core/gethwrappers/ccip/generated/router" "github.com/smartcontractkit/chainlink/v2/core/gethwrappers/ccip/generated/token_admin_registry" @@ -83,7 +84,7 @@ func GenerateFeeQuoterView(fqContract *fee_quoter.FeeQuoter, router *router1_2.R } // find router contract in dependencies fq.DestinationChainConfig = make(map[uint64]FeeQuoterDestChainConfig) - destSelectors, err := GetDestinationSelectors(router) + destSelectors, err := v1_2.GetRemoteChainSelectors(router) if err != nil { return FeeQuoterView{}, fmt.Errorf("view error for FeeQuoter: %w", err) } @@ -137,17 +138,3 @@ func GetSupportedTokens(taContract *token_admin_registry.TokenAdminRegistry) ([] } return tokens, nil } - -func GetDestinationSelectors(routerContract *router1_2.Router) ([]uint64, error) { - destSelectors := make([]uint64, 0) - offRamps, err := routerContract.GetOffRamps(nil) - if err != nil { - return nil, fmt.Errorf("failed to get offRamps from router: %w", err) - } - // lanes are bidirectional, so we get the list of source chains to know which chains are supported as destinations as well - for _, offRamp := range offRamps { - destSelectors = append(destSelectors, offRamp.SourceChainSelector) - } - - return destSelectors, nil -} diff --git a/integration-tests/deployment/ccip/view/v1_6/offramp.go b/integration-tests/deployment/ccip/view/v1_6/offramp.go new file mode 100644 index 00000000000..38992cb3da1 --- /dev/null +++ b/integration-tests/deployment/ccip/view/v1_6/offramp.go @@ -0,0 +1,64 @@ +package v1_6 + +import ( + "fmt" + + "github.com/smartcontractkit/chainlink/integration-tests/deployment/ccip/view/types" + "github.com/smartcontractkit/chainlink/integration-tests/deployment/ccip/view/v1_2" + "github.com/smartcontractkit/chainlink/v2/core/gethwrappers/ccip/generated/offramp" + router1_2 "github.com/smartcontractkit/chainlink/v2/core/gethwrappers/ccip/generated/router" +) + +type OffRampView struct { + types.ContractMetaData + DynamicConfig offramp.OffRampDynamicConfig `json:"dynamicConfig"` + LatestPriceSequenceNumber uint64 `json:"latestPriceSequenceNumber"` + SourceChainConfigs map[uint64]offramp.OffRampSourceChainConfig `json:"sourceChainConfigs"` + StaticConfig offramp.OffRampStaticConfig `json:"staticConfig"` +} + +func GenerateOffRampView( + offRampContract *offramp.OffRamp, + routerContract *router1_2.Router, +) (OffRampView, error) { + tv, err := types.NewContractMetaData(offRampContract, offRampContract.Address()) + if err != nil { + return OffRampView{}, err + } + + dynamicConfig, err := offRampContract.GetDynamicConfig(nil) + if err != nil { + return OffRampView{}, fmt.Errorf("failed to get dynamic config: %w", err) + } + + latestPriceSequenceNumber, err := offRampContract.GetLatestPriceSequenceNumber(nil) + if err != nil { + return OffRampView{}, fmt.Errorf("failed to get latest price sequence number: %w", err) + } + + sourceChainSelectors, err := v1_2.GetRemoteChainSelectors(routerContract) + if err != nil { + return OffRampView{}, fmt.Errorf("failed to get source chain selectors: %w", err) + } + sourceChainConfigs := make(map[uint64]offramp.OffRampSourceChainConfig) + for _, sourceChainSelector := range sourceChainSelectors { + sourceChainConfig, err := offRampContract.GetSourceChainConfig(nil, sourceChainSelector) + if err != nil { + return OffRampView{}, fmt.Errorf("failed to get source chain config: %w", err) + } + sourceChainConfigs[sourceChainSelector] = sourceChainConfig + } + + staticConfig, err := offRampContract.GetStaticConfig(nil) + if err != nil { + return OffRampView{}, fmt.Errorf("failed to get static config: %w", err) + } + + return OffRampView{ + ContractMetaData: tv, + DynamicConfig: dynamicConfig, + LatestPriceSequenceNumber: latestPriceSequenceNumber, + SourceChainConfigs: sourceChainConfigs, + StaticConfig: staticConfig, + }, nil +} diff --git a/integration-tests/deployment/ccip/view/v1_6/onramp.go b/integration-tests/deployment/ccip/view/v1_6/onramp.go index a5fe13bfb5c..72ac9e948ef 100644 --- a/integration-tests/deployment/ccip/view/v1_6/onramp.go +++ b/integration-tests/deployment/ccip/view/v1_6/onramp.go @@ -6,6 +6,7 @@ import ( "github.com/ethereum/go-ethereum/common" "github.com/smartcontractkit/chainlink/integration-tests/deployment/ccip/view/types" + "github.com/smartcontractkit/chainlink/integration-tests/deployment/ccip/view/v1_2" "github.com/smartcontractkit/chainlink/v2/core/gethwrappers/ccip/generated/onramp" router1_2 "github.com/smartcontractkit/chainlink/v2/core/gethwrappers/ccip/generated/router" "github.com/smartcontractkit/chainlink/v2/core/gethwrappers/ccip/generated/token_admin_registry" @@ -51,7 +52,7 @@ func GenerateOnRampView( return OnRampView{}, fmt.Errorf("failed to get owner: %w", err) } // populate destChainSelectors from router - destChainSelectors, err := GetDestinationSelectors(routerContract) + destChainSelectors, err := v1_2.GetRemoteChainSelectors(routerContract) if err != nil { return OnRampView{}, fmt.Errorf("failed to get destination selectors: %w", err) }