target
.gradle
+# We do not want to store binary objects in this repo
+*.jar
+gradlew*
+.tox
+node_modules
+gradle/*
+docs/_build
# Created by https://www.gitignore.io/api/gradle,eclipse,intellij
# Edit at https://www.gitignore.io/?templates=gradle,eclipse,intellij
--- /dev/null
+[gerrit]
+host=gerrit.linuxfoundation.org
+port=29418
+project=releng/pipelines.git
+defaultbranch=master
rev: v0.13.1
hooks:
- id: gitlint
-
- - repo: https://github.com/neuhalje/pre-commit-gradle.git
- rev: v0.0.1
- hooks:
- - id: gradle-check
// limitations under the License.
//
+loadGlobalLibrary()
+
pipeline {
agent {
label 'centos7-docker-4c-2g'
timeout(360)
}
+ environment {
+ PYTHON = "python3"
+ TOX_DIR = "."
+ TOX_ENVS = ""
+ }
+
stages {
stage('Lint Pipelines') {
steps {
sh "./scripts/pipeline-linter.sh $JENKINS_URL"
}
}
+
+ stage('Tox Tests') {
+ steps {
+ // Since these scripts are not all set with +x (due to being
+ // read into shell steps in global-jjb rather than executed
+ // directly), we have to read the files into the sh step.
+
+ // TODO: Replace with a tox-run library call once it is
+ // implemented.
+ sh readFile(file: "resources/shell/python-tools-install.sh")
+ sh readFile(file: "resources/shell/tox-install.sh")
+ sh readFile(file: "resources/shell/tox-run.sh")
+
+ junit allowEmptyResults: true,
+ testResults: 'target/test-results/test/*.xml'
+
+ // Test summary
+ publishHTML([
+ allowMissing: true,
+ alwaysLinkToLastBuild: true,
+ keepAll: true,
+ reportDir: 'target/reports/tests/test',
+ reportFiles: 'index.html',
+ reportName: 'Unit Test Summary'
+ ])
+ }
+ }
+ }
+
+ post {
+ failure {
+ script {
+ currentBuild.result = "FAILED"
+ }
+ }
}
}
+
+def loadGlobalLibrary(branch = '*/master') {
+ library(identifier: 'pipelines@master',
+ retriever: legacySCM([
+ $class: 'GitSCM',
+ userRemoteConfigs: [[url: 'https://gerrit.linuxfoundation.org/infra/releng/pipelines']],
+ branches: [[name: branch]],
+ doGenerateSubmoduleConfigurations: false,
+ extensions: [[
+ $class: 'SubmoduleOption',
+ recursiveSubmodules: true,
+ ]]]
+ )
+ ) _
+}
--- /dev/null
+// SPDX-License-Identifier: Apache-2.0
+//
+// Copyright (c) 2019 Intel Corporation
+// Copyright (c) 2020 The Linux Foundation
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+plugins {
+ id 'java'
+ id 'groovy'
+ id 'jacoco'
+}
+
+jacoco {
+ toolVersion = "0.8.5"
+}
+
+group = 'thelinuxfoundation'
+version = "0.0.1"
+description = "Testing Shared Pipelines Library"
+
+// Spock works with Java 1.7 and above
+sourceCompatibility = 1.8
+
+project.buildDir = 'target'
+
+repositories {
+ // Spock releases are available from Maven Central
+ mavenCentral()
+ maven { url "https://repo.jenkins-ci.org/public" }
+}
+
+dependencies {
+ def withoutIcu = { exclude group: 'com.ibm.icu', module: 'icu4j' }
+
+ // mandatory dependencies for using Spock
+ implementation "org.codehaus.groovy:groovy-all:2.5.8"
+ implementation "com.cloudbees:groovy-cps:1.31@jar", withoutIcu
+
+ implementation "org.slf4j:jcl-over-slf4j:1.7.25"
+ testImplementation "org.slf4j:log4j-over-slf4j:1.7.25"
+ testImplementation "org.slf4j:slf4j-api:1.7.25"
+
+ testImplementation "ch.qos.logback:logback-core:1.2.3"
+ testImplementation "ch.qos.logback:logback-classic:1.2.3"
+ testImplementation "org.apache.commons:commons-csv:1.1"
+ testImplementation "com.google.guava:guava:20.0"
+
+ testImplementation group: 'org.apache.maven', name: 'maven-model',
+ version: '3.0.2'
+
+ testImplementation "org.spockframework:spock-core:1.3-groovy-2.5@jar"
+
+ // Jenkins related
+ testImplementation "com.homeaway.devtools.jenkins:jenkins-spock:2.1.2"
+ testImplementation "javax.servlet:javax.servlet-api:3.1.0"
+ testImplementation "org.jenkins-ci.main:jenkins-core:2.225", withoutIcu
+ testImplementation "org.jenkins-ci.plugins.workflow:workflow-api:2.40@jar"
+ testImplementation \
+ "org.jenkins-ci.plugins.workflow:workflow-step-api:2.22@jar"
+ testImplementation "org.jenkins-ci.plugins.workflow:workflow-cps:2.78@jar"
+ testImplementation \
+ "org.jenkins-ci.plugins.workflow:workflow-durable-task-step:2.35@jar"
+ // durable-task: transitive dependency for workflow-durable-task-step
+ testImplementation "org.jenkins-ci.plugins:durable-task:1.33@jar"
+ // workflow-cps-global-lib: provides libraryResource() step
+ testImplementation \
+ "org.jenkins-ci.plugins.workflow:workflow-cps-global-lib:2.15@jar"
+ testImplementation "org.jenkins-ci:symbol-annotation:1.10"
+ testImplementation "org.jenkins-ci.plugins:pipeline-stage-step:2.3@jar"
+ testImplementation "org.jenkins-ci.plugins:ssh-agent:1.17@jar"
+ testImplementation "org.jenkins-ci.plugins:config-file-provider:3.6.2@jar"
+ testImplementation "org.jenkins-ci.plugins:credentials-binding:1.20@jar"
+ testImplementation "org.jenkins-ci.plugins:pipeline-utility-steps:2.3.1@jar"
+ testImplementation "org.jenkins-ci.plugins:script-security:1.68@jar"
+ testImplementation "org.jenkins-ci.plugins:docker-commons:1.15@jar"
+ testImplementation "org.jenkins-ci.plugins:docker-workflow:1.21@jar"
+ testImplementation "org.springframework:spring-core:4.3.19.RELEASE"
+ testImplementation "org.springframework:spring-test:4.3.2.RELEASE"
+}
+
+test {
+ systemProperty 'root.loglevel', 'INFO'
+ systemProperty 'root.appender', 'Stdout'
+ systemProperty 'test.loglevel', 'INFO'
+ systemProperty 'logdir', './target/logs'
+
+ reports {
+ junitXml.enabled = true
+ }
+
+ // prints a summary after test execution
+ testLogging {
+ afterSuite { desc, result ->
+ if (!desc.parent) {
+ println "Results: ${result.resultType} (${result.testCount} tests, " \
+ + "${result.successfulTestCount} successes, " \
+ + "${result.failedTestCount} failures, ${result.skippedTestCount} " \
+ + "skipped)"
+ }
+ }
+ }
+}
+
+jacocoTestReport {
+ reports {
+ xml.enabled true
+ csv.enabled false
+ html.enabled true
+ }
+}
+
+// this is needed for spock to find all the source code in the var directory
+task copyGlobalLibVars (type: Copy) {
+ from "$rootDir/vars"
+ include '**/*.groovy'
+ into "$buildDir/classes/vars"
+}
+
+compileTestGroovy {
+ options.incremental = true
+ options.fork = true
+ options.failOnError = false
+}
+compileTestGroovy.dependsOn copyGlobalLibVars
+
+// print the test classpath. Good for debugging ClassNotFound issues
+task printClasspath {
+ doLast {
+ configurations.testRuntimeClasspath.each { println it }
+ }
+}
--- /dev/null
+<center>
+ <div class="btn-group" role="group" aria-label="...">
+ {% if prev %}
+ <a class="btn btn-default" href="{{ prev.link|e }}">Prev Page</a>
+ {% else %}
+ <button type="button" class="btn btn-default disabled">Prev Page</button>
+ {% endif %} {% if next %}
+ <a class="btn btn-default" href="{{ next.link|e }}">Next Page</a>
+ {% else %}
+ <button type="button" class="btn btn-default disabled">Next Page</button>
+ {% endif %}
+ </div>
+</center>
--- /dev/null
+#!/usr/bin/env python3
+# -*- coding: utf-8 -*-
+#
+# Linux Foundation Release Engineering Tools documentation build configuration file, created by
+# sphinx-quickstart on Sat Mar 4 12:20:05 2017.
+#
+# This file is execfile()d with the current directory set to its
+# containing dir.
+#
+# Note that not all possible configuration values are present in this
+# autogenerated file.
+#
+# All configuration values have a default; values that are commented out
+# serve to show the default.
+
+# If extensions (or modules to document with autodoc) are in another directory,
+# add these directories to sys.path here. If the directory is relative to the
+# documentation root, use os.path.abspath to make it absolute, like shown here.
+#
+import os
+import subprocess
+import sys
+import sphinx_bootstrap_theme
+
+sys.path.insert(0, os.path.abspath('..'))
+
+
+def format_version(version):
+ fmt = '{tag}.dev{commitcount}+{gitsha}'
+ parts = version.split('-')
+ assert len(parts) in (3, 4)
+ dirty = len(parts) == 4
+ tag, count, sha = parts[:3]
+ if count == '0' and not dirty:
+ return tag
+ return fmt.format(tag=tag, commitcount=count, gitsha=sha.lstrip('g'))
+
+
+# -- General configuration ------------------------------------------------
+
+# If your documentation needs a minimal Sphinx version, state it here.
+#
+# needs_sphinx = '1.0'
+
+# Add any Sphinx extension module names here, as strings. They can be
+# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
+# ones.
+extensions = [
+ 'reno.sphinxext',
+ 'sphinx.ext.autodoc',
+ 'sphinx.ext.doctest',
+ 'sphinx.ext.intersphinx',
+ 'sphinx.ext.todo',
+ 'sphinx.ext.coverage',
+ 'sphinx.ext.viewcode',
+ 'sphinxcontrib.programoutput',
+]
+
+# Add any paths that contain templates here, relative to this directory.
+templates_path = ['_templates']
+
+# The suffix(es) of source filenames.
+# You can specify multiple suffix as a list of string:
+#
+# source_suffix = ['.rst', '.md']
+source_suffix = '.rst'
+
+# The encoding of source files.
+#
+# source_encoding = 'utf-8-sig'
+
+# The master toctree document.
+master_doc = 'index'
+
+# General information about the project.
+project = 'lf-releng-pipelines'
+copyright = '2020, The Linux Foundation'
+author = 'Linux Foundation Releng'
+
+# The version info for the project you're documenting, acts as replacement for
+# |version| and |release|, also used in various other places throughout the
+# built documents.
+#
+# The short X.Y version.
+command = 'git describe --tags --long --dirty'
+try:
+ git_version = format_version(
+ subprocess.check_output(command.split()).decode('utf-8').strip())
+except subprocess.CalledProcessError: # Handle docs builds from tarball
+ git_version = "v0.0.9999-local"
+version = git_version
+# The full version, including alpha/beta/rc tags.
+release = version
+
+# The language for content autogenerated by Sphinx. Refer to documentation
+# for a list of supported languages.
+#
+# This is also used if you do content translation via gettext catalogs.
+# Usually you set "language" from the command line for these cases.
+language = None
+
+# There are two options for replacing |today|: either, you set today to some
+# non-false value, then it is used:
+#
+# today = ''
+#
+# Else, today_fmt is used as the format for a strftime call.
+#
+# today_fmt = '%B %d, %Y'
+
+# List of patterns, relative to source directory, that match files and
+# directories to ignore when looking for source files.
+# This patterns also effect to html_static_path and html_extra_path
+exclude_patterns = []
+
+# The reST default role (used for this markup: `text`) to use for all
+# documents.
+#
+# default_role = None
+
+# If true, '()' will be appended to :func: etc. cross-reference text.
+#
+# add_function_parentheses = True
+
+# If true, the current module name will be prepended to all description
+# unit titles (such as .. function::).
+#
+# add_module_names = True
+
+# If true, sectionauthor and moduleauthor directives will be shown in the
+# output. They are ignored by default.
+#
+# show_authors = False
+
+# The name of the Pygments (syntax highlighting) style to use.
+pygments_style = 'sphinx'
+
+# A list of ignored prefixes for module index sorting.
+# modindex_common_prefix = []
+
+# If true, keep warnings as "system message" paragraphs in the built documents.
+# keep_warnings = False
+
+# If true, `todo` and `todoList` produce output, else they produce nothing.
+todo_include_todos = True
+
+
+# -- Options for HTML output ----------------------------------------------
+
+# The theme to use for HTML and HTML Help pages. See the documentation for
+# a list of builtin themes.
+#
+html_theme = 'bootstrap'
+
+# Theme options are theme-specific and customize the look and feel of a theme
+# further. For a list of options available for each theme, see the
+# documentation.
+#
+# html_theme_options = {}
+html_theme_options = {
+ 'bootswatch_theme': "cerulean",
+ 'navbar_sidebarrel': False,
+ 'source_link_position': "footer",
+}
+
+# Add any paths that contain custom themes here, relative to this directory.
+# html_theme_path = []
+html_theme_path = sphinx_bootstrap_theme.get_html_theme_path()
+
+# The name for this set of Sphinx documents.
+# "<project> v<release> documentation" by default.
+#
+# html_title = 'Linux Foundation Release Engineering Tools v0.0.1'
+
+# A shorter title for the navigation bar. Default is the same as html_title.
+#
+# html_short_title = None
+
+# The name of an image file (relative to this directory) to place at the top
+# of the sidebar.
+#
+html_logo = '_static/lf-logo-small.png'
+
+# The name of an image file (relative to this directory) to use as a favicon of
+# the docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
+# pixels large.
+#
+html_favicon = 'favicon.ico'
+
+# Add any paths that contain custom static files (such as style sheets) here,
+# relative to this directory. They are copied after the builtin static files,
+# so a file named "default.css" will overwrite the builtin "default.css".
+html_static_path = ['_static']
+
+# Add any extra paths that contain custom files (such as robots.txt or
+# .htaccess) here, relative to this directory. These files are copied
+# directly to the root of the documentation.
+#
+# html_extra_path = []
+
+# If not None, a 'Last updated on:' timestamp is inserted at every page
+# bottom, using the given strftime format.
+# The empty string is equivalent to '%b %d, %Y'.
+#
+# html_last_updated_fmt = None
+
+# If true, SmartyPants will be used to convert quotes and dashes to
+# typographically correct entities.
+#
+# html_use_smartypants = True
+
+# Custom sidebar templates, maps document names to template names.
+#
+html_sidebars = {
+ '**': ['localtoc.html', 'relations.html'],
+}
+
+# Additional templates that should be rendered to pages, maps page names to
+# template names.
+#
+# html_additional_pages = {}
+
+# If false, no module index is generated.
+#
+# html_domain_indices = True
+
+# If false, no index is generated.
+#
+# html_use_index = True
+
+# If true, the index is split into individual pages for each letter.
+#
+# html_split_index = False
+
+# If true, links to the reST sources are added to the pages.
+#
+# html_show_sourcelink = True
+
+# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
+#
+# html_show_sphinx = True
+
+# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
+#
+# html_show_copyright = True
+
+# If true, an OpenSearch description file will be output, and all pages will
+# contain a <link> tag referring to it. The value of this option must be the
+# base URL from which the finished HTML is served.
+#
+# html_use_opensearch = ''
+
+# This is the file name suffix for HTML files (e.g. ".xhtml").
+# html_file_suffix = None
+
+# Language to be used for generating the HTML full-text search index.
+# Sphinx supports the following languages:
+# 'da', 'de', 'en', 'es', 'fi', 'fr', 'h', 'it', 'ja'
+# 'nl', 'no', 'pt', 'ro', 'r', 'sv', 'tr', 'zh'
+#
+# html_search_language = 'en'
+
+# A dictionary with options for the search language support, empty by default.
+# 'ja' uses this config value.
+# 'zh' user can custom change `jieba` dictionary path.
+#
+# html_search_options = {'type': 'default'}
+
+# The name of a javascript file (relative to the configuration directory) that
+# implements a search results scorer. If empty, the default will be used.
+#
+# html_search_scorer = 'scorer.js'
+
+# Output file base name for HTML help builder.
+htmlhelp_basename = 'LinuxFoundationReleaseEngineeringPipelinesdoc'
+
+# Example configuration for intersphinx: refer to the Python standard library.
+intersphinx_mapping = {
+ 'jjb': ('https://docs.openstack.org/infra/jenkins-job-builder/', None),
+ 'lfdocs': ('http://docs.releng.linuxfoundation.org/en/latest/', None),
+ 'global-jjb': ('http://global-jjb.releng.linuxfoundation.org/en/latest/', None),
+}
--- /dev/null
+#############
+Configuration
+#############
+
+In order to use the LF RelEng Pipelines Library, it simply needs to be imported
+by adding to the Pipeline Libraries in Jenkins. This can be done in the global
+Jenkins settings under Global Pipeline Libraries, in individual job settings,
+or in the GitHub Organization config, if using the GitHub plugin for an entire
+org.
+
+.. warning::
+
+ Global Pipeline Libraries imported in the global Settings do not run inside
+ of Jenkins' groovy sandbox. This can lead to security risks, and it is
+ recommended that the library be imported at a more specific level.
+
+With the library imported, all functions in the ``vars`` directory can be called
+directly in Jenkinsfile, without the need for further imports. For a more
+detailed explanation of pipeline libraries, please see the
+`Jenkins Shared Library docs
+<https://www.jenkins.io/doc/book/pipeline/shared-libraries/#using-libraries>`_.
--- /dev/null
+.. _lfreleng-pipelines:
+
+Linux Foundation Releng Pipelines Library
+=========================================
+
+Linux Foundation Release Engineering Pipelines Library Documentation.
+
+The Pipelines Library is a library of Jenkins pipeline functions that
+replicate the functionality of
+:ref:`Global-JJB <global-jjb:lfreleng-global-jjb>`. Just as Global-JJB
+simplifies the process of creating jobs in Jenkins Job Builder, this library is
+intended to save time and standardize the creation of pipelines by providing
+common functionality.
+
+Release Notes
+-------------
+
+.. toctree::
+ :maxdepth: 2
+
+ release-notes
+
+Guides
+------
+
+.. toctree::
+ :maxdepth: 2
+
+ configuration
+
+Functions
+---------
+
+All available functions are stored in the ``vars`` directory. See below for
+information on each available function.
+
+.. toctree::
+ :glob:
+ :maxdepth: 1
+
+ vars/*
+
+Indices and tables
+==================
+
+* :ref:`genindex`
+* :ref:`modindex`
+* :ref:`search`
--- /dev/null
+.. release-notes:: Release Notes
--- /dev/null
+###############
+lfInfraShipLogs
+###############
+
+Parameters
+==========
+
+:Optional Parameters:
+
+ :logSettingsFile: Override the file name where log shipping credentials can
+ be found. Default: ``jenkins-log-archives-settings``
+
+Usage
+=====
+
+lfInfraShipLogs looks for two environment variables: ``LOGS_SERVER`` and
+``S3_BUCKET``. If ``LOGS_SERVER`` is present, logs will be pushed to the Nexus
+server that the variable points to. If ``S3_BUCKET`` is present and contains a
+name matching the regex ``.*logs-s3.*``, logs will be pushed to the indicated
+S3 bucket.
+
+If both ``LOGS_SERVER`` and ``S3_BUCKET`` are defined, lfInfraShipLogs will
+attempt to push to both servers. If neither is defined, it will echo this fact
+and return.
--- /dev/null
+---
+prelude: >
+ First release of the Linux Foundation's Jenkins pipeline library.
+features:
+ - |
+ lfInfraShipLogs, which implements log shipping as seen in global-jjb. This
+ includes S3 shipping, Nexus shipping, and job cost data.
--- /dev/null
+reno~=2.11.2
+Sphinx~=2.3.1
+sphinx_bootstrap_theme~=0.7.1
+sphinxcontrib-programoutput
+yq
--- /dev/null
+../global-jjb/shell
\ No newline at end of file
--- /dev/null
+// SPDX-License-Identifier: Apache-2.0
+//
+// Copyright (c) 2019 Intel Corporation
+// Copyright (c) 2020 The Linux Foundation
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+import com.homeaway.devtools.jenkins.testing.JenkinsPipelineSpecification
+
+public class LFInfraShipLogsSpec extends JenkinsPipelineSpecification {
+
+ def lfInfraShipLogs = null
+
+ def setup() {
+ lfInfraShipLogs = loadPipelineScriptForTest('vars/lfInfraShipLogs.groovy')
+ explicitlyMockPipelineVariable('out')
+ }
+
+ def "Test lfInfraShipLogs [Should] throw exception [When] logSettingsFile is null" () {
+ setup:
+ when:
+ lfInfraShipLogs({logSettingsFile = null})
+ then:
+ thrown Exception
+ }
+
+ def "Test lfInfraShipLogs [Should] call expected shell scripts [When] called" () {
+ setup:
+ lfInfraShipLogs.getBinding().setVariable('SILO', 'MySilo')
+ lfInfraShipLogs.getBinding().setVariable('JENKINS_HOSTNAME', 'MyJenkinsHostname')
+ lfInfraShipLogs.getBinding().setVariable('JOB_NAME', 'MyJobName')
+ lfInfraShipLogs.getBinding().setVariable('BUILD_NUMBER', 'MyBuildNumber')
+ lfInfraShipLogs.getBinding().setVariable('CDN_URL', 'MyCdnUrl')
+ explicitlyMockPipelineStep('echo')
+ explicitlyMockPipelineStep('withEnv')
+ getPipelineMock("libraryResource")('shell/create-netrc.sh') >> {
+ return 'create-netrc'
+ }
+ getPipelineMock("libraryResource")('shell/python-tools-install.sh') >> {
+ return 'python-tools-install'
+ }
+ getPipelineMock("libraryResource")('shell/sudo-logs.sh') >> {
+ return 'sudo-logs'
+ }
+ getPipelineMock("libraryResource")('shell/job-cost.sh') >> {
+ return 'job-cost'
+ }
+ getPipelineMock("libraryResource")('shell/logs-deploy.sh') >> {
+ return 'logs-deploy'
+ }
+ getPipelineMock("libraryResource")('shell/logs-clear-credentials.sh') >> {
+ return 'logs-clear-credentials'
+ }
+ when: 'Only LOGS_SERVER defined'
+ lfInfraShipLogs.getBinding().setVariable('LOGS_SERVER', 'MyLogServer')
+ lfInfraShipLogs.getBinding().setVariable('S3_BUCKET', '')
+ // currentBuild needs to be reset for each test, so that it does not
+ // carry over values between tests.
+ lfInfraShipLogs.getBinding().setVariable('currentBuild', [:])
+ lfInfraShipLogs()
+ then:
+ 1 * getPipelineMock('withEnv').call(_) >> { _arguments ->
+ def envArgs = [
+ 'SERVER_ID=logs'
+ ]
+ assert envArgs == _arguments[0][0]
+ }
+ 1 * getPipelineMock('sh').call([script:'create-netrc'])
+ 1 * getPipelineMock('sh').call([script:'python-tools-install'])
+ 1 * getPipelineMock('sh').call([script:'sudo-logs'])
+ 1 * getPipelineMock('sh').call([script:'job-cost'])
+ 1 * getPipelineMock('sh').call([script:'logs-deploy'])
+ 1 * getPipelineMock('sh').call([script:'logs-clear-credentials'])
+ assert lfInfraShipLogs.getBinding().getVariable("currentBuild").
+ description =~ /Nexus build logs: /
+
+ when: 'Only S3_BUCKET defined'
+ lfInfraShipLogs.getBinding().setVariable('LOGS_SERVER', '')
+ lfInfraShipLogs.getBinding().setVariable('S3_BUCKET', 'my-logs-s3')
+ lfInfraShipLogs.getBinding().setVariable('currentBuild', [:])
+ lfInfraShipLogs()
+ then:
+ 1 * getPipelineMock('withEnv').call(_) >> { _arguments ->
+ def envArgs = [
+ 'SERVER_ID=logs'
+ ]
+ assert envArgs == _arguments[0][0]
+ }
+ 1 * getPipelineMock('sh').call([script:'create-netrc'])
+ 1 * getPipelineMock('sh').call([script:'python-tools-install'])
+ 1 * getPipelineMock('sh').call([script:'sudo-logs'])
+ 1 * getPipelineMock('sh').call([script:'job-cost'])
+ 1 * getPipelineMock('sh').call([script:'logs-deploy'])
+ 1 * getPipelineMock('sh').call([script:'logs-clear-credentials'])
+ assert lfInfraShipLogs.getBinding().getVariable("currentBuild").
+ description =~ /S3 build logs: /
+
+ when: 'LOGS_SERVER and S3_BUCKET defined'
+ lfInfraShipLogs.getBinding().setVariable('LOGS_SERVER', 'MyLogsServer')
+ lfInfraShipLogs.getBinding().setVariable('S3_BUCKET', 'my-logs-s3')
+ lfInfraShipLogs.getBinding().setVariable('currentBuild', [:])
+ lfInfraShipLogs()
+ then:
+ 1 * getPipelineMock('withEnv').call(_) >> { _arguments ->
+ def envArgs = [
+ 'SERVER_ID=logs'
+ ]
+ assert envArgs == _arguments[0][0]
+ }
+ 1 * getPipelineMock('sh').call([script:'create-netrc'])
+ 1 * getPipelineMock('sh').call([script:'python-tools-install'])
+ 1 * getPipelineMock('sh').call([script:'sudo-logs'])
+ 1 * getPipelineMock('sh').call([script:'job-cost'])
+ 1 * getPipelineMock('sh').call([script:'logs-deploy'])
+ 1 * getPipelineMock('sh').call([script:'logs-clear-credentials'])
+ assert lfInfraShipLogs.getBinding().getVariable("currentBuild").
+ description =~ /Nexus build logs: /
+ assert lfInfraShipLogs.getBinding().getVariable("currentBuild").
+ description =~ /S3 build logs: /
+
+ when: 'No server is defined'
+ lfInfraShipLogs.getBinding().setVariable('LOGS_SERVER', '')
+ lfInfraShipLogs.getBinding().setVariable('S3_BUCKET', '')
+ lfInfraShipLogs.getBinding().setVariable('currentBuild', [:])
+ lfInfraShipLogs()
+ then: 'All steps are skipped'
+ 0 * getPipelineMock('withEnv').call(_) >> { _arguments ->
+ def envArgs = [
+ 'SERVER_ID=logs'
+ ]
+ assert envArgs == _arguments[0][0]
+ }
+ 0 * getPipelineMock('sh').call([script:'create-netrc'])
+ 0 * getPipelineMock('sh').call([script:'python-tools-install'])
+ 0 * getPipelineMock('sh').call([script:'sudo-logs'])
+ 0 * getPipelineMock('sh').call([script:'job-cost'])
+ 0 * getPipelineMock('sh').call([script:'logs-deploy'])
+ 0 * getPipelineMock('sh').call([script:'logs-clear-credentials'])
+ assert !(lfInfraShipLogs.getBinding().getVariable("currentBuild").
+ description =~ /Nexus build logs: /)
+ assert !(lfInfraShipLogs.getBinding().getVariable("currentBuild").
+ description =~ /S3 build logs: /)
+ }
+}
envlist =
docs,
docs-linkcheck,
+ gradle,
license,
- pre-commit
+ pre-commit,
+ reno
minversion = 3.7
skipsdist = True
[testenv:docs]
description = Build the documentation with sphinx
basepython = python3
-deps = -rrequirements-docs.txt
+deps = -rrequirements.txt
extras = openstack
commands = sphinx-build -W -b html -n -d {envtmpdir}/doctrees ./docs/ {toxinidir}/docs/_build/html
[testenv:docs-linkcheck]
description = Check the documentation links with sphinx
basepython = python3
-deps = -rrequirements-docs.txt
-extras = openstack
+deps = -rrequirements.txt
commands = sphinx-build -W -b linkcheck -d {envtmpdir}/doctrees ./docs/ {toxinidir}/docs/_build/linkcheck
-[testenv:license]
-description = Check all files for license header
-basepython = python3
-deps = lftools
+[testenv:gradle]
+description = Bootstrap the Gradle wrapper and run Gradle tests
+allowlist_externals =
+ */bash
+ */gradle
+setenv = GRADLE_VER=6.5.1
+commands_pre =
+ bash -ec 'if ! command -v gradle >/dev/null 2>&1; then curl -L \
+ "https://services.gradle.org/distributions/gradle-{env:GRADLE_VER}-bin.zip" \
+ -o {envtmpdir}/gradle.zip && \
+ unzip -o {envtmpdir}/gradle.zip -d {envtmpdir} && \
+ ln -s {envtmpdir}/gradle-{env:GRADLE_VER}/bin/gradle \
+ {envbindir}/gradle; fi'
commands =
- lftools license check-dir lftools
- lftools license check-dir -r '.+' shell
+ gradle wrapper --gradle-version {env:GRADLE_VER} --distribution-type all
+ ./gradlew clean test
[testenv:pre-commit]
description = Precommit checks for black, gitlint, etc.
* @param body Config values to be provided in the form "key = value".
*/
def call(body) {
- // Stub, to be expanded in future commits
+ // Evaluate the body block and collect configuration into the object
+ def config = [:]
+
+ if (body) {
+ body.resolveStrategy = Closure.DELEGATE_FIRST
+ body.delegate = config
+ body()
+ }
+
+ def _logSettingsFile = config.logSettingsFile ?: "jenkins-log-archives-settings"
+ if (!_logSettingsFile) {
+ throw new Exception("Log settings file id (logSettingsFile) is " +
+ "required for LF log deploy script.")
+ }
+
+ if (!("$S3_BUCKET" =~ /.*logs-s3.*/) && "$LOGS_SERVER" == "") {
+ echo "No LOGS_SERVER or valid S3_BUCKET defined. Skipping log shipping."
+ } else {
+ // SERVER_ID should always be "logs" when running create-netrc for log shipping
+ withEnv(["SERVER_ID=logs"]){
+ configFileProvider([configFile(fileId: _logSettingsFile,
+ variable: 'SETTINGS_FILE')]) {
+ echo 'Running shell/create-netrc.sh'
+ sh(script: libraryResource('shell/create-netrc.sh'))
+ }
+
+ echo 'Running shell/python-tools-install.sh'
+ sh(script: libraryResource('shell/python-tools-install.sh'))
+ echo 'Running shell/sudo-logs.sh'
+ sh(script: libraryResource('shell/sudo-logs.sh'))
+ echo 'Running shell/job-cost.sh'
+ sh(script: libraryResource('shell/job-cost.sh'))
+
+ buildDesc = ""
+
+ if ("$S3_BUCKET" =~ /.*logs-s3.*/) {
+ // If S3_BUCKET is defined, we need the config file
+ configFileProvider([configFile(fileId: "jenkins-s3-log-ship",
+ targetLocation: '$HOME/.aws/credentials')]) {
+ echo 'Running shell/logs-deploy.sh'
+ sh(script: libraryResource('shell/logs-deploy.sh'))
+ }
+ s3_path = "logs/${SILO}/${JENKINS_HOSTNAME}/${JOB_NAME}/${BUILD_NUMBER}/"
+ buildDesc += "S3 build logs: <a href=\"https://$CDN_URL/$s3_path\"></a>\n"
+ // If LOGS_SERVER is also defined, logs-deploy.sh will deploy to both
+ if ("$LOGS_SERVER" != "") {
+ nexus_path = "${SILO}/${JENKINS_HOSTNAME}/${JOB_NAME}/${BUILD_NUMBER}"
+ buildDesc += "Nexus build logs: <a href=\"$LOGS_SERVER/" +
+ "$nexus_path\">$LOGS_SERVER/$nexus_path</a>\n"
+ }
+ } else { // Only LOGS_SERVER is defined
+ echo 'Running shell/logs-deploy.sh'
+ sh(script: libraryResource('shell/logs-deploy.sh'))
+ nexus_path = "${SILO}/${JENKINS_HOSTNAME}/${JOB_NAME}/${BUILD_NUMBER}"
+ buildDesc += "Nexus build logs: <a href=\"$LOGS_SERVER/" +
+ "$nexus_path\">$LOGS_SERVER/$nexus_path</a>\n"
+ }
+
+ echo 'Running shell/logs-clear-credentials.sh'
+ sh(script: libraryResource('shell/logs-clear-credentials.sh'))
+ }
+
+ if (!currentBuild.description) {currentBuild.description = ''}
+ // The old GHPRB plugin updated the description to contain the PR #
+ // with a link to the PR. If the build description contains a link to
+ // the PR, then add a br.
+ if (currentBuild.description.contains('PR #')) {
+ currentBuild.description += "<br>"
+ }
+ currentBuild.description += buildDesc
+ }
}