Skip to content

Commit

Permalink
Merge upstream/master, fix conflicts
Browse files Browse the repository at this point in the history
  • Loading branch information
kertal committed Feb 9, 2020
2 parents f8d8e5a + ea84cbf commit 59af21d
Show file tree
Hide file tree
Showing 23,445 changed files with 1,445,284 additions and 731,480 deletions.
The diff you're trying to view is too large. We only load the first 3000 changed files.
2 changes: 1 addition & 1 deletion .backportrc.json
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
{
"upstream": "elastic/kibana",
"branches": [{ "name": "7.x", "checked": true }, "7.4", "7.3", "7.2", "7.1", "7.0", "6.8", "6.7", "6.6", "6.5", "6.4", "6.3", "6.2", "6.1", "6.0", "5.6"],
"branches": [{ "name": "7.x", "checked": true }, "7.6", "7.5", "7.4", "7.3", "7.2", "7.1", "7.0", "6.8", "6.7", "6.6", "6.5", "6.4", "6.3", "6.2", "6.1", "6.0", "5.6"],
"labels": ["backport"]
}
105 changes: 105 additions & 0 deletions .ci/Jenkinsfile_coverage
Original file line number Diff line number Diff line change
@@ -0,0 +1,105 @@
#!/bin/groovy

library 'kibana-pipeline-library'
kibanaLibrary.load() // load from the Jenkins instance

stage("Kibana Pipeline") { // This stage is just here to help the BlueOcean UI a little bit
timeout(time: 180, unit: 'MINUTES') {
timestamps {
ansiColor('xterm') {
catchError {
withEnv([
'CODE_COVERAGE=1', // Needed for multiple ci scripts, such as remote.ts, test/scripts/*.sh, schema.js, etc.
]) {
parallel([
'kibana-intake-agent': {
withEnv([
'NODE_ENV=test' // Needed for jest tests only
]) {
kibanaPipeline.intakeWorker('kibana-intake', './test/scripts/jenkins_unit.sh')()
}
},
'x-pack-intake-agent': {
withEnv([
'NODE_ENV=test' // Needed for jest tests only
]) {
kibanaPipeline.intakeWorker('x-pack-intake', './test/scripts/jenkins_xpack.sh')()
}
},
'kibana-oss-agent': kibanaPipeline.withWorkers('kibana-oss-tests', { kibanaPipeline.buildOss() }, [
'oss-ciGroup1': kibanaPipeline.getOssCiGroupWorker(1),
'oss-ciGroup2': kibanaPipeline.getOssCiGroupWorker(2),
'oss-ciGroup3': kibanaPipeline.getOssCiGroupWorker(3),
'oss-ciGroup4': kibanaPipeline.getOssCiGroupWorker(4),
'oss-ciGroup5': kibanaPipeline.getOssCiGroupWorker(5),
'oss-ciGroup6': kibanaPipeline.getOssCiGroupWorker(6),
'oss-ciGroup7': kibanaPipeline.getOssCiGroupWorker(7),
'oss-ciGroup8': kibanaPipeline.getOssCiGroupWorker(8),
'oss-ciGroup9': kibanaPipeline.getOssCiGroupWorker(9),
'oss-ciGroup10': kibanaPipeline.getOssCiGroupWorker(10),
'oss-ciGroup11': kibanaPipeline.getOssCiGroupWorker(11),
'oss-ciGroup12': kibanaPipeline.getOssCiGroupWorker(12),
]),
'kibana-xpack-agent': kibanaPipeline.withWorkers('kibana-xpack-tests', { kibanaPipeline.buildXpack() }, [
'xpack-ciGroup1': kibanaPipeline.getXpackCiGroupWorker(1),
'xpack-ciGroup2': kibanaPipeline.getXpackCiGroupWorker(2),
'xpack-ciGroup3': kibanaPipeline.getXpackCiGroupWorker(3),
'xpack-ciGroup4': kibanaPipeline.getXpackCiGroupWorker(4),
'xpack-ciGroup5': kibanaPipeline.getXpackCiGroupWorker(5),
'xpack-ciGroup6': kibanaPipeline.getXpackCiGroupWorker(6),
'xpack-ciGroup7': kibanaPipeline.getXpackCiGroupWorker(7),
'xpack-ciGroup8': kibanaPipeline.getXpackCiGroupWorker(8),
'xpack-ciGroup9': kibanaPipeline.getXpackCiGroupWorker(9),
'xpack-ciGroup10': kibanaPipeline.getXpackCiGroupWorker(10),
]),
])
kibanaPipeline.jobRunner('tests-l', false) {
kibanaPipeline.downloadCoverageArtifacts()
kibanaPipeline.bash(
'''
# bootstrap from x-pack folder
source src/dev/ci_setup/setup_env.sh
cd x-pack
yarn kbn bootstrap --prefer-offline
cd ..
# extract archives
mkdir -p /tmp/extracted_coverage
echo extracting intakes
tar -xzf /tmp/downloaded_coverage/coverage/kibana-intake/kibana-coverage.tar.gz -C /tmp/extracted_coverage
tar -xzf /tmp/downloaded_coverage/coverage/x-pack-intake/kibana-coverage.tar.gz -C /tmp/extracted_coverage
echo extracting kibana-oss-tests
tar -xzf /tmp/downloaded_coverage/coverage/kibana-oss-tests/kibana-coverage.tar.gz -C /tmp/extracted_coverage
echo extracting kibana-xpack-tests
tar -xzf /tmp/downloaded_coverage/coverage/kibana-xpack-tests/kibana-coverage.tar.gz -C /tmp/extracted_coverage
# replace path in json files to have valid html report
pwd=$(pwd)
du -sh /tmp/extracted_coverage/target/kibana-coverage/
echo replacing path in json files
for i in {1..9}; do
sed -i "s|/dev/shm/workspace/kibana|$pwd|g" /tmp/extracted_coverage/target/kibana-coverage/functional/${i}*.json &
done
wait
# merge oss & x-pack reports
echo merging coverage reports
yarn nyc report --temp-dir /tmp/extracted_coverage/target/kibana-coverage/jest --report-dir target/kibana-coverage/jest-combined --reporter=html --reporter=json-summary
yarn nyc report --temp-dir /tmp/extracted_coverage/target/kibana-coverage/functional --report-dir target/kibana-coverage/functional-combined --reporter=html --reporter=json-summary
echo copy mocha reports
mkdir -p target/kibana-coverage/mocha-combined
cp -r /tmp/extracted_coverage/target/kibana-coverage/mocha target/kibana-coverage/mocha-combined
''',
"run `yarn kbn bootstrap && merge coverage`"
)
sh 'tar -czf kibana-jest-coverage.tar.gz target/kibana-coverage/jest-combined/*'
kibanaPipeline.uploadCoverageArtifacts("coverage/jest-combined", 'kibana-jest-coverage.tar.gz')
sh 'tar -czf kibana-functional-coverage.tar.gz target/kibana-coverage/functional-combined/*'
kibanaPipeline.uploadCoverageArtifacts("coverage/functional-combined", 'kibana-functional-coverage.tar.gz')
sh 'tar -czf kibana-mocha-coverage.tar.gz target/kibana-coverage/mocha-combined/*'
kibanaPipeline.uploadCoverageArtifacts("coverage/mocha-combined", 'kibana-mocha-coverage.tar.gz')
}
}
}
kibanaPipeline.sendMail()
}
}
}
}
146 changes: 146 additions & 0 deletions .ci/Jenkinsfile_flaky
Original file line number Diff line number Diff line change
@@ -0,0 +1,146 @@
#!/bin/groovy

library 'kibana-pipeline-library'
kibanaLibrary.load()

def CI_GROUP_PARAM = params.CI_GROUP

// Looks like 'oss:ciGroup:1', 'oss:firefoxSmoke', or 'all:serverMocha'
def JOB_PARTS = CI_GROUP_PARAM.split(':')
def IS_XPACK = JOB_PARTS[0] == 'xpack'
def JOB = JOB_PARTS[1]
def NEED_BUILD = JOB != 'serverMocha'
def CI_GROUP = JOB_PARTS.size() > 2 ? JOB_PARTS[2] : ''
def EXECUTIONS = params.NUMBER_EXECUTIONS.toInteger()
def AGENT_COUNT = getAgentCount(EXECUTIONS)

def worker = getWorkerFromParams(IS_XPACK, JOB, CI_GROUP)

def workerFailures = []

currentBuild.displayName += trunc(" ${params.GITHUB_OWNER}:${params.branch_specifier}", 24)
currentBuild.description = "${params.CI_GROUP}<br />Agents: ${AGENT_COUNT}<br />Executions: ${params.NUMBER_EXECUTIONS}"

stage("Kibana Pipeline") {
timeout(time: 180, unit: 'MINUTES') {
timestamps {
ansiColor('xterm') {
def agents = [:]
for(def agentNumber = 1; agentNumber <= AGENT_COUNT; agentNumber++) {
def agentNumberInside = agentNumber
def agentExecutions = floor(EXECUTIONS/AGENT_COUNT) + (agentNumber <= EXECUTIONS%AGENT_COUNT ? 1 : 0)
agents["agent-${agentNumber}"] = {
catchError {
print "Agent ${agentNumberInside} - ${agentExecutions} executions"

kibanaPipeline.withWorkers('flaky-test-runner', {
if (NEED_BUILD) {
if (!IS_XPACK) {
kibanaPipeline.buildOss()
if (CI_GROUP == '1') {
runbld("./test/scripts/jenkins_build_kbn_tp_sample_panel_action.sh", "Build kbn tp sample panel action for ciGroup1")
}
} else {
kibanaPipeline.buildXpack()
}
}
}, getWorkerMap(agentNumberInside, agentExecutions, worker, workerFailures))()
}
}
}

parallel(agents)

currentBuild.description += ", Failures: ${workerFailures.size()}"

if (workerFailures.size() > 0) {
print "There were ${workerFailures.size()} test suite failures."
print "The executions that failed were:"
print workerFailures.join("\n")
print "Please check 'Test Result' and 'Pipeline Steps' pages for more info"
}
}
}
}
}

def getWorkerFromParams(isXpack, job, ciGroup) {
if (!isXpack) {
if (job == 'serverMocha') {
return kibanaPipeline.getPostBuildWorker('serverMocha', {
kibanaPipeline.bash(
"""
source src/dev/ci_setup/setup_env.sh
node scripts/mocha
""",
"run `node scripts/mocha`"
)
})
} else if (job == 'firefoxSmoke') {
return kibanaPipeline.getPostBuildWorker('firefoxSmoke', { runbld('./test/scripts/jenkins_firefox_smoke.sh', 'Execute kibana-firefoxSmoke') })
} else if(job == 'visualRegression') {
return kibanaPipeline.getPostBuildWorker('visualRegression', { runbld('./test/scripts/jenkins_visual_regression.sh', 'Execute kibana-visualRegression') })
} else {
return kibanaPipeline.getOssCiGroupWorker(ciGroup)
}
}

if (job == 'firefoxSmoke') {
return kibanaPipeline.getPostBuildWorker('xpack-firefoxSmoke', { runbld('./test/scripts/jenkins_xpack_firefox_smoke.sh', 'Execute xpack-firefoxSmoke') })
} else if(job == 'visualRegression') {
return kibanaPipeline.getPostBuildWorker('xpack-visualRegression', { runbld('./test/scripts/jenkins_xpack_visual_regression.sh', 'Execute xpack-visualRegression') })
} else {
return kibanaPipeline.getXpackCiGroupWorker(ciGroup)
}
}

def getWorkerMap(agentNumber, numberOfExecutions, worker, workerFailures, maxWorkerProcesses = 12) {
def workerMap = [:]
def numberOfWorkers = Math.min(numberOfExecutions, maxWorkerProcesses)

for(def i = 1; i <= numberOfWorkers; i++) {
def workerExecutions = floor(numberOfExecutions/numberOfWorkers + (i <= numberOfExecutions%numberOfWorkers ? 1 : 0))

workerMap["agent-${agentNumber}-worker-${i}"] = { workerNumber ->
for(def j = 0; j < workerExecutions; j++) {
print "Execute agent-${agentNumber} worker-${workerNumber}: ${j}"
withEnv([
"JOB=agent-${agentNumber}-worker-${workerNumber}-${j}",
"REMOVE_KIBANA_INSTALL_DIR=1",
]) {
catchError {
try {
worker(workerNumber)
} catch (ex) {
workerFailures << "agent-${agentNumber} worker-${workerNumber}-${j}"
throw ex
}
}
}
}
}
}

return workerMap
}

def getAgentCount(executions) {
// Increase agent count every 24 worker processess, up to 3 agents maximum
return Math.min(3, 1 + floor(executions/24))
}

def trunc(str, length) {
if (str.size() >= length) {
return str.take(length) + "..."
}

return str;
}

// All of the real rounding/truncating methods are sandboxed
def floor(num) {
return num
.toString()
.split('\\.')[0]
.toInteger()
}
125 changes: 125 additions & 0 deletions .ci/end2end.groovy
Original file line number Diff line number Diff line change
@@ -0,0 +1,125 @@
#!/usr/bin/env groovy

library identifier: 'apm@current',
retriever: modernSCM(
[$class: 'GitSCMSource',
credentialsId: 'f94e9298-83ae-417e-ba91-85c279771570',
id: '37cf2c00-2cc7-482e-8c62-7bbffef475e2',
remote: 'git@github.com:elastic/apm-pipeline-library.git'])

pipeline {
agent { label 'linux && immutable' }
environment {
BASE_DIR = 'src/github.com/elastic/kibana'
HOME = "${env.WORKSPACE}"
APM_ITS = 'apm-integration-testing'
CYPRESS_DIR = 'x-pack/legacy/plugins/apm/cypress'
PIPELINE_LOG_LEVEL = 'DEBUG'
}
options {
timeout(time: 1, unit: 'HOURS')
buildDiscarder(logRotator(numToKeepStr: '40', artifactNumToKeepStr: '20', daysToKeepStr: '30'))
timestamps()
ansiColor('xterm')
disableResume()
durabilityHint('PERFORMANCE_OPTIMIZED')
}
triggers {
issueCommentTrigger('(?i).*jenkins\\W+run\\W+(?:the\\W+)?e2e(?:\\W+please)?.*')
}
parameters {
booleanParam(name: 'FORCE', defaultValue: false, description: 'Whether to force the run.')
}
stages {
stage('Checkout') {
options { skipDefaultCheckout() }
steps {
deleteDir()
gitCheckout(basedir: "${BASE_DIR}", githubNotifyFirstTimeContributor: false,
shallow: false, reference: "/var/lib/jenkins/.git-references/kibana.git")
script {
dir("${BASE_DIR}"){
def regexps =[ "^x-pack/legacy/plugins/apm/.*" ]
env.APM_UPDATED = isGitRegionMatch(patterns: regexps)
}
}
dir("${APM_ITS}"){
git changelog: false,
credentialsId: 'f6c7695a-671e-4f4f-a331-acdce44ff9ba',
poll: false,
url: "git@github.com:elastic/${APM_ITS}.git"
}
}
}
stage('Start services') {
options { skipDefaultCheckout() }
when {
anyOf {
expression { return params.FORCE }
expression { return env.APM_UPDATED != "false" }
}
}
steps {
dir("${APM_ITS}"){
sh './scripts/compose.py start master --no-kibana --no-xpack-secure'
}
}
}
stage('Prepare Kibana') {
options { skipDefaultCheckout() }
when {
anyOf {
expression { return params.FORCE }
expression { return env.APM_UPDATED != "false" }
}
}
environment {
JENKINS_NODE_COOKIE = 'dontKillMe'
}
steps {
dir("${BASE_DIR}"){
sh script: "${CYPRESS_DIR}/ci/prepare-kibana.sh"
}
}
}
stage('Smoke Tests'){
options { skipDefaultCheckout() }
when {
anyOf {
expression { return params.FORCE }
expression { return env.APM_UPDATED != "false" }
}
}
steps{
dir("${BASE_DIR}"){
sh '''
jobs -l
docker build --tag cypress ${CYPRESS_DIR}/ci
docker run --rm -t --user "$(id -u):$(id -g)" \
-v `pwd`:/app --network="host" \
--name cypress cypress'''
}
}
post {
always {
dir("${BASE_DIR}"){
archiveArtifacts(allowEmptyArchive: false, artifacts: "${CYPRESS_DIR}/screenshots/**,${CYPRESS_DIR}/videos/**,${CYPRESS_DIR}/*e2e-tests.xml")
junit(allowEmptyResults: true, testResults: "${CYPRESS_DIR}/*e2e-tests.xml")
}
dir("${APM_ITS}"){
sh 'docker-compose logs > apm-its.log || true'
sh 'docker-compose down -v || true'
archiveArtifacts(allowEmptyArchive: false, artifacts: 'apm-its.log')
}
}
}
}
}
post {
always {
dir("${BASE_DIR}"){
archiveArtifacts(allowEmptyArchive: true, artifacts: "${CYPRESS_DIR}/ingest-data.log,kibana.log")
}
}
}
}
Loading

0 comments on commit 59af21d

Please sign in to comment.