-
Notifications
You must be signed in to change notification settings - Fork 6
/
Jenkinsfile
127 lines (123 loc) · 4.2 KB
/
Jenkinsfile
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
@Library('shared-libraries') _
def runtests(String javaVersion){
sh label:'test', script: '''#!/bin/bash
export JAVA_HOME=$'''+javaVersion+'''
export GRADLE_USER_HOME=$WORKSPACE/$GRADLE_DIR
export PATH=$GRADLE_USER_HOME:$JAVA_HOME/bin:$PATH
cd marklogic-spark-connector
echo "mlPassword=admin" > gradle-local.properties
echo "Waiting for MarkLogic server to initialize."
sleep 30s
./gradlew -i mlDeploy
echo "Loading data a second time to try to avoid Optic bug with duplicate rows being returned."
./gradlew -i mlLoadData
./gradlew test || true
'''
junit '**/build/**/*.xml'
}
def runSonarScan(String javaVersion){
sh label:'test', script: '''#!/bin/bash
export JAVA_HOME=$'''+javaVersion+'''
export GRADLE_USER_HOME=$WORKSPACE/$GRADLE_DIR
export PATH=$GRADLE_USER_HOME:$JAVA_HOME/bin:$PATH
cd marklogic-spark-connector
./gradlew sonar -Dsonar.projectKey='marklogic_marklogic-spark-connector_AY1bXn6J_50_odbCDKMX' -Dsonar.projectName='ML-DevExp-marklogic-spark-connector' || true
'''
}
pipeline{
agent none
triggers{
parameterizedCron(env.BRANCH_NAME == "develop" ? "00 02 * * * % regressions=true" : "")
}
parameters{
booleanParam(name: 'regressions', defaultValue: false, description: 'indicator if build is for regressions')
}
options {
checkoutToSubdirectory 'marklogic-spark-connector'
buildDiscarder logRotator(artifactDaysToKeepStr: '7', artifactNumToKeepStr: '', daysToKeepStr: '30', numToKeepStr: '')
}
environment{
JAVA17_HOME_DIR="/home/builder/java/jdk-17.0.2"
GRADLE_DIR =".gradle"
DMC_USER = credentials('MLBUILD_USER')
DMC_PASSWORD = credentials('MLBUILD_PASSWORD')
}
stages{
stage('tests'){
environment{
scannerHome = tool 'SONAR_Progress'
}
agent {label 'devExpLinuxPool'}
steps{
sh label:'mlsetup', script: '''#!/bin/bash
echo "Removing any running MarkLogic server and clean up MarkLogic data directory"
sudo /usr/local/sbin/mladmin remove
sudo /usr/local/sbin/mladmin cleandata
cd marklogic-spark-connector
mkdir -p docker/marklogic/logs
docker-compose down -v || true
docker-compose up -d --build
'''
runtests('JAVA17_HOME_DIR')
withSonarQubeEnv('SONAR_Progress') {
runSonarScan('JAVA17_HOME_DIR')
}
}
post{
always{
sh label:'mlcleanup', script: '''#!/bin/bash
cd marklogic-spark-connector
docker-compose down -v || true
sudo /usr/local/sbin/mladmin delete $WORKSPACE/marklogic-spark-connector/docker/marklogic/logs/
'''
}
}
}
stage('publish'){
agent {label 'devExpLinuxPool'}
when {
branch 'develop'
}
steps{
sh label:'publish', script: '''#!/bin/bash
export JAVA_HOME=$JAVA17_HOME_DIR
export GRADLE_USER_HOME=$WORKSPACE/$GRADLE_DIR
export PATH=$GRADLE_USER_HOME:$JAVA_HOME/bin:$PATH
cp ~/.gradle/gradle.properties $GRADLE_USER_HOME;
cd marklogic-spark-connector
./gradlew publish
'''
}
}
stage('regressions'){
agent {label 'devExpLinuxPool'}
when{
allOf{
branch 'develop'
expression {return params.regressions}
}
}
steps{
sh label:'mlsetup', script: '''#!/bin/bash
echo "Removing any running MarkLogic server and clean up MarkLogic data directory"
sudo /usr/local/sbin/mladmin remove
sudo /usr/local/sbin/mladmin cleandata
cd marklogic-spark-connector
mkdir -p docker/marklogic/logs
docker-compose down -v || true
MARKLOGIC_TAG=latest-10.0 docker-compose up -d --build
'''
runtests('JAVA17_HOME_DIR')
}
post{
always{
sh label:'mlcleanup', script: '''#!/bin/bash
cd marklogic-spark-connector
docker-compose down -v || true
sudo /usr/local/sbin/mladmin delete $WORKSPACE/marklogic-spark-connector/docker/marklogic/logs/
'''
}
}
}
}
}