Introduce publishToS3, publishToPulse, and submitToTreeherder steps.
This introduces the submitToTreeherder step along with some necessary refactoring.
This commit is contained in:
Родитель
337da3aab0
Коммит
64cc356a1f
|
@ -3,3 +3,8 @@ This repository holds
|
|||
[shared libraries](https://jenkins.io/doc/book/pipeline/shared-libraries/) for
|
||||
[Jenkins pipelines](https://jenkins.io/doc/book/pipeline/) used by
|
||||
[Firefox Test Engineering](https://wiki.mozilla.org/TestEngineering).
|
||||
|
||||
## Requirements
|
||||
|
||||
* [Pipeline Utility Steps Plugin](https://wiki.jenkins-ci.org/display/JENKINS/Pipeline+Utility+Steps+Plugin)
|
||||
* [S3 Plugin](https://wiki.jenkins-ci.org/display/JENKINS/S3+Plugin)
|
||||
|
|
|
@ -0,0 +1,471 @@
|
|||
{
|
||||
"$schema": "http://json-schema.org/draft-04/schema#",
|
||||
"title": "Job Definition",
|
||||
"description": "Definition of a single job that can be added to Treeherder\nProject is determined by the routing key, so we don't need to specify it here.\n",
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"taskId": {
|
||||
"title": "taskId",
|
||||
"description": "This could just be what was formerly submitted as a job_guid in the\nREST API.\n",
|
||||
"type": "string",
|
||||
"pattern": "^[A-Za-z0-9/+-]+$",
|
||||
"minLength": 1,
|
||||
"maxLength": 50
|
||||
},
|
||||
"retryId": {
|
||||
"title": "retryId",
|
||||
"description": "The infrastructure retry iteration on this job. The number of times this\njob has been retried by the infrastructure.\nIf it's the 1st time running, then it should be 0. If this is the first\nretry, it will be 1, etc.\n",
|
||||
"type": "integer",
|
||||
"default": 0,
|
||||
"minimum": 0
|
||||
},
|
||||
"isRetried": {
|
||||
"description": "True indicates this job has been retried.",
|
||||
"type": "boolean"
|
||||
},
|
||||
"buildSystem": {
|
||||
"description": "The name of the build system that initiated this content. Some examples\nare \"buildbot\" and \"taskcluster\". But this could be any name. This\nvalue will be used in the routing key for retriggering jobs in the\npublish-job-action task.\n",
|
||||
"type": "string",
|
||||
"pattern": "^[\\w-]+$",
|
||||
"minLength": 1,
|
||||
"maxLength": 25
|
||||
},
|
||||
"origin": {
|
||||
"oneOf": [
|
||||
{
|
||||
"type": "object",
|
||||
"description": "PREFERRED: An HG job that only has a revision. This is for all\njobs going forward.\n",
|
||||
"properties": {
|
||||
"kind": {
|
||||
"type": "string",
|
||||
"enum": [
|
||||
"hg.mozilla.org"
|
||||
]
|
||||
},
|
||||
"project": {
|
||||
"type": "string",
|
||||
"pattern": "^[\\w-]+$",
|
||||
"minLength": 1,
|
||||
"maxLength": 50
|
||||
},
|
||||
"revision": {
|
||||
"type": "string",
|
||||
"pattern": "^[0-9a-f]+$",
|
||||
"minLength": 40,
|
||||
"maxLength": 40
|
||||
},
|
||||
"pushLogID": {
|
||||
"type": "integer"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"kind",
|
||||
"project",
|
||||
"revision"
|
||||
]
|
||||
},
|
||||
{
|
||||
"type": "object",
|
||||
"description": "BACKWARD COMPATABILITY: An HG job that only has a revision_hash.\nSome repos like mozilla-beta have not yet merged in the code that\nallows them access to the revision.\n",
|
||||
"properties": {
|
||||
"kind": {
|
||||
"type": "string",
|
||||
"enum": [
|
||||
"hg.mozilla.org"
|
||||
]
|
||||
},
|
||||
"project": {
|
||||
"type": "string",
|
||||
"pattern": "^[\\w-]+$",
|
||||
"minLength": 1,
|
||||
"maxLength": 50
|
||||
},
|
||||
"revision_hash": {
|
||||
"type": "string",
|
||||
"pattern": "^[0-9a-f]+$",
|
||||
"minLength": 40,
|
||||
"maxLength": 40
|
||||
},
|
||||
"pushLogID": {
|
||||
"type": "integer"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"kind",
|
||||
"project",
|
||||
"revision_hash"
|
||||
]
|
||||
},
|
||||
{
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"kind": {
|
||||
"type": "string",
|
||||
"enum": [
|
||||
"github.com"
|
||||
]
|
||||
},
|
||||
"owner": {
|
||||
"description": "This could be the organization or the individual git username\ndepending on who owns the repo.\n",
|
||||
"type": "string",
|
||||
"pattern": "^[\\w-]+$",
|
||||
"minLength": 1,
|
||||
"maxLength": 50
|
||||
},
|
||||
"project": {
|
||||
"type": "string",
|
||||
"pattern": "^[\\w-]+$",
|
||||
"minLength": 1,
|
||||
"maxLength": 50
|
||||
},
|
||||
"revision": {
|
||||
"type": "string",
|
||||
"minLength": 40,
|
||||
"maxLength": 40
|
||||
},
|
||||
"pullRequestID": {
|
||||
"type": "integer"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"kind",
|
||||
"project",
|
||||
"revision"
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
"display": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"jobSymbol": {
|
||||
"title": "jobSymbol",
|
||||
"type": "string",
|
||||
"minLength": 0,
|
||||
"maxLength": 25
|
||||
},
|
||||
"chunkId": {
|
||||
"title": "chunkId",
|
||||
"type": "integer",
|
||||
"minimum": 1
|
||||
},
|
||||
"chunkCount": {
|
||||
"title": "chunkCount",
|
||||
"type": "integer",
|
||||
"minimum": 1
|
||||
},
|
||||
"groupSymbol": {
|
||||
"title": "group symbol",
|
||||
"type": "string",
|
||||
"minLength": 1,
|
||||
"maxLength": 25
|
||||
},
|
||||
"jobName": {
|
||||
"title": "job name",
|
||||
"type": "string",
|
||||
"minLength": 1,
|
||||
"maxLength": 100
|
||||
},
|
||||
"groupName": {
|
||||
"title": "group name",
|
||||
"type": "string",
|
||||
"minLength": 1,
|
||||
"maxLength": 100
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"jobName",
|
||||
"jobSymbol",
|
||||
"groupSymbol"
|
||||
]
|
||||
},
|
||||
"state": {
|
||||
"title": "state",
|
||||
"description": "unscheduled: not yet scheduled\npending: not yet started\nrunning: currently in progress\ncompleted: Job ran through to completion\n",
|
||||
"type": "string",
|
||||
"enum": [
|
||||
"unscheduled",
|
||||
"pending",
|
||||
"running",
|
||||
"completed"
|
||||
]
|
||||
},
|
||||
"result": {
|
||||
"title": "result",
|
||||
"description": "fail: A failure\nexception: An infrastructure error/exception\nsuccess: Build/Test executed without error or failure\ncanceled: The job was cancelled by a user\nunknown: When the job is not yet completed\n",
|
||||
"type": "string",
|
||||
"enum": [
|
||||
"success",
|
||||
"fail",
|
||||
"exception",
|
||||
"canceled",
|
||||
"unknown"
|
||||
]
|
||||
},
|
||||
"jobKind": {
|
||||
"type": "string",
|
||||
"default": "other",
|
||||
"enum": [
|
||||
"build",
|
||||
"test",
|
||||
"other"
|
||||
]
|
||||
},
|
||||
"tier": {
|
||||
"type": "integer",
|
||||
"minimum": 1,
|
||||
"maximum": 3
|
||||
},
|
||||
"coalesced": {
|
||||
"description": "The job guids that were coalesced to this job.",
|
||||
"title": "coalesced",
|
||||
"type": "array",
|
||||
"items": {
|
||||
"title": "job guid",
|
||||
"type": "string",
|
||||
"pattern": "^[\\w/+-]+$",
|
||||
"minLength": 1,
|
||||
"maxLength": 50
|
||||
}
|
||||
},
|
||||
"timeScheduled": {
|
||||
"type": "string",
|
||||
"format": "date-time"
|
||||
},
|
||||
"timeStarted": {
|
||||
"type": "string",
|
||||
"format": "date-time"
|
||||
},
|
||||
"timeCompleted": {
|
||||
"type": "string",
|
||||
"format": "date-time"
|
||||
},
|
||||
"labels": {
|
||||
"title": "labels",
|
||||
"description": "Labels are a dimension of a platform. The values here can vary wildly,\nso most strings are valid for this. The list of labels that are used\nis maleable going forward.\n\nThese were formerly known as \"Options\" within \"Option Collections\" but\ncalling labels now so they can be understood to be just strings that\ndenotes a characteristic of the job.\n\nSome examples of labels that have been used:\n opt Optimize Compiler GCC optimize flags\n debug Debug flags passed in\n pgo Profile Guided Optimization - Like opt, but runs with profiling, then builds again using that profiling\n asan Address Sanitizer\n tsan Thread Sanitizer Build\n",
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "string",
|
||||
"minLength": 1,
|
||||
"maxLength": 50,
|
||||
"pattern": "^[\\w-]+$"
|
||||
}
|
||||
},
|
||||
"owner": {
|
||||
"description": "Description of who submitted the job: gaia | scheduler name | username | email\n",
|
||||
"title": "owner",
|
||||
"type": "string",
|
||||
"minLength": 1,
|
||||
"maxLength": 50
|
||||
},
|
||||
"reason": {
|
||||
"description": "Examples include:\n- scheduled\n- scheduler\n- Self-serve: Rebuilt by foo@example.com\n- Self-serve: Requested by foo@example.com\n- The Nightly scheduler named 'b2g_mozilla-inbound periodic' triggered this build\n- unknown\n",
|
||||
"type": "string",
|
||||
"minLength": 1,
|
||||
"maxLength": 125
|
||||
},
|
||||
"productName": {
|
||||
"description": "Examples include:\n- 'b2g'\n- 'firefox'\n- 'taskcluster'\n- 'xulrunner'\n",
|
||||
"type": "string",
|
||||
"minLength": 1,
|
||||
"maxLength": 125
|
||||
},
|
||||
"buildMachine": {
|
||||
"$ref": "#/definitions/machine"
|
||||
},
|
||||
"runMachine": {
|
||||
"$ref": "#/definitions/machine"
|
||||
},
|
||||
"jobInfo": {
|
||||
"description": "Definition of the Job Info for a job. These are extra data\nfields that go along with a job that will be displayed in\nthe details panel within Treeherder.\n",
|
||||
"id": "jobInfo",
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"summary": {
|
||||
"type": "string",
|
||||
"description": "Plain text description of the job and its state. Submitted with\nthe final message about a task.\n"
|
||||
},
|
||||
"links": {
|
||||
"type": "array",
|
||||
"items": [
|
||||
{
|
||||
"type": "object",
|
||||
"description": "List of URLs shown as key/value pairs. Shown as:\n\"<label>: <linkText>\" where linkText will be a link to the url.\n",
|
||||
"properties": {
|
||||
"url": {
|
||||
"type": "string",
|
||||
"format": "uri",
|
||||
"maxLength": 512
|
||||
},
|
||||
"linkText": {
|
||||
"type": "string",
|
||||
"minLength": 1,
|
||||
"maxLength": 125
|
||||
},
|
||||
"label": {
|
||||
"type": "string",
|
||||
"minLength": 1,
|
||||
"maxLength": 70
|
||||
}
|
||||
},
|
||||
"additionalProperties": false,
|
||||
"required": [
|
||||
"url",
|
||||
"linkText",
|
||||
"label"
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
"additionalProperties": false
|
||||
},
|
||||
"logs": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"url": {
|
||||
"type": "string",
|
||||
"format": "uri",
|
||||
"minLength": 1,
|
||||
"maxLength": 255
|
||||
},
|
||||
"name": {
|
||||
"type": "string",
|
||||
"minLength": 1,
|
||||
"maxLength": 50
|
||||
},
|
||||
"steps": {
|
||||
"type": "array",
|
||||
"description": "This object defines what is seen in the Treeherder Log Viewer.\nThese values can be submitted here, or they will be generated\nby Treeherder's internal log parsing process from the\nsubmitted log. If this value is submitted, Treeherder will\nconsider the log already parsed and skip parsing.\n",
|
||||
"items": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"errors": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"line": {
|
||||
"type": "string",
|
||||
"minLength": 1,
|
||||
"maxLength": 255
|
||||
},
|
||||
"linenumber": {
|
||||
"type": "integer",
|
||||
"minimum": 0
|
||||
}
|
||||
},
|
||||
"additionalProperties": false
|
||||
}
|
||||
},
|
||||
"name": {
|
||||
"type": "string",
|
||||
"minLength": 1,
|
||||
"maxLength": 255
|
||||
},
|
||||
"timeStarted": {
|
||||
"type": "string",
|
||||
"format": "date-time"
|
||||
},
|
||||
"timeFinished": {
|
||||
"type": "string",
|
||||
"format": "date-time"
|
||||
},
|
||||
"lineStarted": {
|
||||
"type": "integer",
|
||||
"minimum": 0
|
||||
},
|
||||
"lineFinished": {
|
||||
"type": "integer",
|
||||
"minimum": 0
|
||||
},
|
||||
"result": {
|
||||
"type": "string",
|
||||
"enum": [
|
||||
"success",
|
||||
"fail",
|
||||
"exception",
|
||||
"canceled",
|
||||
"unknown"
|
||||
]
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"name",
|
||||
"timeStarted",
|
||||
"lineStarted",
|
||||
"lineFinished",
|
||||
"timeFinished",
|
||||
"result"
|
||||
]
|
||||
}
|
||||
},
|
||||
"errorsTruncated": {
|
||||
"type": "boolean",
|
||||
"description": "If true, indicates that the number of errors in the log was too\nlarge and not all of those lines are indicated here.\n"
|
||||
}
|
||||
},
|
||||
"additionalProperties": false,
|
||||
"required": [
|
||||
"url",
|
||||
"name"
|
||||
]
|
||||
}
|
||||
},
|
||||
"extra": {
|
||||
"type": "object",
|
||||
"description": "Extra information that Treeherder reads on a best-effort basis"
|
||||
},
|
||||
"version": {
|
||||
"type": "integer"
|
||||
}
|
||||
},
|
||||
"additionalProperties": false,
|
||||
"required": [
|
||||
"taskId",
|
||||
"origin",
|
||||
"buildSystem",
|
||||
"display",
|
||||
"state",
|
||||
"jobKind",
|
||||
"version"
|
||||
],
|
||||
"definitions": {
|
||||
"machine": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"name": {
|
||||
"type": "string",
|
||||
"pattern": "^[\\w-]+$",
|
||||
"minLength": 1,
|
||||
"maxLength": 50
|
||||
},
|
||||
"platform": {
|
||||
"type": "string",
|
||||
"pattern": "^[\\w-]+$",
|
||||
"minLength": 1,
|
||||
"maxLength": 100
|
||||
},
|
||||
"os": {
|
||||
"type": "string",
|
||||
"pattern": "^[\\w-]+$",
|
||||
"minLength": 1,
|
||||
"maxLength": 25
|
||||
},
|
||||
"architecture": {
|
||||
"type": "string",
|
||||
"pattern": "^[\\w-]+$",
|
||||
"minLength": 1,
|
||||
"maxLength": 25
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"platform",
|
||||
"os",
|
||||
"architecture"
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,23 @@
|
|||
package org.mozilla.fxtest
|
||||
|
||||
import org.jenkinsci.plugins.pipeline.modeldefinition.shaded.com.fasterxml.jackson.databind.ObjectMapper
|
||||
import com.github.fge.jsonschema.exceptions.ProcessingException
|
||||
import com.github.fge.jsonschema.main.JsonSchemaFactory
|
||||
import com.github.fge.jsonschema.util.JsonLoader
|
||||
|
||||
@NonCPS
|
||||
def validate(payload, schema) {
|
||||
def factory = JsonSchemaFactory.byDefault()
|
||||
def jsonSchema = factory.getJsonSchema(JsonLoader.fromString(schema));
|
||||
def mapper = new ObjectMapper()
|
||||
def instance = mapper.readTree(payload)
|
||||
def report = jsonSchema.validate(instance)
|
||||
if ( !report.isSuccess() ) {
|
||||
for ( message in report ) {
|
||||
echo "$message"
|
||||
}
|
||||
throw new ProcessingException('Failure validating Pulse payload against schema.')
|
||||
} else {
|
||||
echo 'Sucessfully validated Pulse payload against schema.'
|
||||
}
|
||||
}
|
|
@ -0,0 +1,27 @@
|
|||
package org.mozilla.fxtest
|
||||
|
||||
@Grab(group='com.rabbitmq', module='amqp-client', version='4.1.0')
|
||||
import com.rabbitmq.client.AMQP
|
||||
import com.rabbitmq.client.ConnectionFactory
|
||||
import com.rabbitmq.client.MessageProperties
|
||||
|
||||
@NonCPS
|
||||
def publish(String exchange, String routingKey, String payload) {
|
||||
def factory = new ConnectionFactory()
|
||||
factory.setUri("amqp://${PULSE}@pulse.mozilla.org:5671")
|
||||
factory.useSslProtocol()
|
||||
def connection = factory.newConnection()
|
||||
def channel = connection.createChannel()
|
||||
channel.exchangeDeclare exchange, 'topic', true
|
||||
|
||||
def properties = new AMQP.BasicProperties.Builder()
|
||||
.contentType('application/json')
|
||||
.deliveryMode(2)
|
||||
.build()
|
||||
|
||||
channel.basicPublish exchange, routingKey, properties, payload.bytes
|
||||
echo "Published payload to Pulse on $exchange with routing key $routingKey."
|
||||
echo payload
|
||||
channel.close()
|
||||
connection.close()
|
||||
}
|
|
@ -0,0 +1,18 @@
|
|||
/** Publish message to a Pulse exchange
|
||||
*
|
||||
* @param exchange exchange to send message to
|
||||
* @param routingKey routing key to use
|
||||
* @param message message to send
|
||||
* @param schema optional schema to validate against
|
||||
*/
|
||||
def call(String exchange,
|
||||
String routingKey,
|
||||
String message,
|
||||
String schema = null) {
|
||||
if ( schema != null ) {
|
||||
def jsonSchemaValidator = new org.mozilla.fxtest.JsonSchemaValidator()
|
||||
jsonSchemaValidator.validate(message, schema)
|
||||
}
|
||||
def pulse = new org.mozilla.fxtest.Pulse()
|
||||
pulse.publish(exchange, routingKey, message)
|
||||
}
|
|
@ -0,0 +1,13 @@
|
|||
<p>
|
||||
Publish message to a
|
||||
<a href="https://wiki.mozilla.org/Auto-tools/Projects/Pulse">Pulse</a>
|
||||
exchange.</p>
|
||||
<dl>
|
||||
<dt>publishToPulse(exchange, routingKey, message, [schema])</dt>
|
||||
<dd>
|
||||
Publishes the <code>message</code>, to Pulse with the specified
|
||||
<code>exchange</code> and <code>routingKey</code>. If a schema is provided
|
||||
then it will be used to check that the message is valid. If the message
|
||||
fails to pass validation, details will be output to the console log and
|
||||
<code>ProcessingException</code> will be thrown.
|
||||
</dl>
|
|
@ -0,0 +1,40 @@
|
|||
/** Publish files to an Amazon S3 bucket
|
||||
*
|
||||
* @param path path to the file(s) to publish
|
||||
* @param bucket bucket and destination for file(s)
|
||||
* @param region region bucket belongs to
|
||||
* @return list of [name, url] maps for published file(s)
|
||||
*/
|
||||
def call(String path, String bucket, String region = 'us-east-1') {
|
||||
step([$class: 'S3BucketPublisher',
|
||||
consoleLogLevel: 'INFO',
|
||||
dontWaitForConcurrentBuildCompletion: false,
|
||||
entries: [[
|
||||
bucket: "$bucket/${BUILD_TAG}",
|
||||
excludedFile: '',
|
||||
flatten: true,
|
||||
gzipFiles: true,
|
||||
keepForever: false,
|
||||
managedArtifacts: false,
|
||||
noUploadOnFailure: false,
|
||||
selectedRegion: region,
|
||||
showDirectlyInBrowser: false,
|
||||
sourceFile: path,
|
||||
storageClass: 'STANDARD',
|
||||
uploadFromSlave: false,
|
||||
useServerSideEncryption: false]],
|
||||
pluginFailureResultConstraint: 'SUCCESS',
|
||||
profileName: 'ActiveData'])
|
||||
return getLinks(bucket, path)
|
||||
}
|
||||
|
||||
def getLinks(bucket, path) {
|
||||
def links = []
|
||||
def files = findFiles(glob: path)
|
||||
for ( f in files ) {
|
||||
links.add([
|
||||
name: f.name,
|
||||
url: "https://s3.amazonaws.com/$bucket/${BUILD_TAG}/$f.name"])
|
||||
}
|
||||
return links
|
||||
}
|
|
@ -0,0 +1,11 @@
|
|||
<p>
|
||||
Publish files to an <a href="https://aws.amazon.com/s3/">Amazon S3</a>
|
||||
bucket.
|
||||
</p>
|
||||
<dl>
|
||||
<dt>publishToS3(path, bucket, [region])</dt>
|
||||
<dd>
|
||||
Publishes the files at <code>path</code> to the specified S3
|
||||
<code>bucket</code> and <code>region</code>. Defaults to region
|
||||
<code>us-east-1</code>.
|
||||
</dl>
|
|
@ -1,30 +1,7 @@
|
|||
/** Publish a structured log to S3 for processing by ActiveData
|
||||
/** Submit structured logs to ActiveData
|
||||
*
|
||||
* @param path path to the structured log file
|
||||
* @param bucket bucket and destination for structured log
|
||||
* @param region region bucket belongs to
|
||||
* @param logPath path to the structured log(s)
|
||||
*/
|
||||
def call(String path,
|
||||
String bucket = "net-mozaws-stage-fx-test-activedata/${BUILD_TAG}",
|
||||
String region = 'us-east-1') {
|
||||
step([$class: 'S3BucketPublisher',
|
||||
consoleLogLevel: 'INFO',
|
||||
dontWaitForConcurrentBuildCompletion: false,
|
||||
entries: [[
|
||||
bucket: bucket,
|
||||
excludedFile: '',
|
||||
flatten: true,
|
||||
gzipFiles: true,
|
||||
keepForever: false,
|
||||
managedArtifacts: false,
|
||||
noUploadOnFailure: false,
|
||||
selectedRegion: region,
|
||||
showDirectlyInBrowser: false,
|
||||
sourceFile: path,
|
||||
storageClass: 'STANDARD',
|
||||
uploadFromSlave: false,
|
||||
useServerSideEncryption: false]],
|
||||
pluginFailureResultConstraint: 'SUCCESS',
|
||||
profileName: 'ActiveData',
|
||||
userMetadata: []])
|
||||
def call(String logPath) {
|
||||
publishToS3(logPath, 'net-mozaws-stage-fx-test-activedata')
|
||||
}
|
||||
|
|
|
@ -1,10 +1,9 @@
|
|||
<p>Publishes a structured log to S3 for processing by ActiveData.</p>
|
||||
<p>
|
||||
Submit structured logs to
|
||||
<a href="https://wiki.mozilla.org/Auto-tools/Projects/ActiveData">ActiveData</a>.
|
||||
</p>
|
||||
<dl>
|
||||
<dt>submitToActiveData(path, [bucket, region])</dt>
|
||||
<dt>submitToActiveData(logPath)</dt>
|
||||
<dd>
|
||||
Publishes the structured log at <code>path</code>, to the specified
|
||||
<code>bucket</code> and <code>region</code>. Defaults to bucket
|
||||
<code>net-mozaws-stage-fx-test-activedata/${BUILD_TAG}</code> and region
|
||||
<code>us-east-1</code>. Note that ActiveData must be configured to look for
|
||||
structured logs in the specified bucket.
|
||||
Publishes the structured log(s) at <code>logPath</code> to ActiveData.
|
||||
</dl>
|
||||
|
|
|
@ -0,0 +1,110 @@
|
|||
import groovy.json.JsonOutput
|
||||
import java.text.SimpleDateFormat
|
||||
|
||||
/** Submit build results to Treeherder
|
||||
*
|
||||
* @param project project to submit results for
|
||||
* @param jobSymbol symbol for the job
|
||||
* @param jobName name for the job
|
||||
* @param artifactPath path for artifact(s) to publish
|
||||
* @param logPath path for log(s) to publish
|
||||
* @param groupSymbol symbol for the job group
|
||||
* @param groupName name for the job group
|
||||
*/
|
||||
def call(String project,
|
||||
String jobSymbol,
|
||||
String jobName,
|
||||
String artifactPath = null,
|
||||
String logPath = null,
|
||||
String groupSymbol = '?',
|
||||
String groupName = null) {
|
||||
machine = getMachine()
|
||||
payload = [
|
||||
taskId: UUID.randomUUID().toString(),
|
||||
buildSystem: machine['name'],
|
||||
origin: [kind: 'github.com', project: project, revision: getRevision()],
|
||||
display: [
|
||||
jobSymbol: jobSymbol,
|
||||
jobName: jobName,
|
||||
groupSymbol: groupSymbol],
|
||||
state: 'completed',
|
||||
result: getResult(),
|
||||
jobKind: 'test',
|
||||
timeScheduled: getDateTime(currentBuild.timeInMillis),
|
||||
timeStarted: getDateTime(currentBuild.startTimeInMillis),
|
||||
timeCompleted: getDateTime(currentBuild.startTimeInMillis + currentBuild.duration),
|
||||
reason: 'scheduled', // TODO build cause: currentBuild.rawBuild.getCause().getShortDescription()
|
||||
productName: project,
|
||||
buildMachine: machine,
|
||||
runMachine: machine,
|
||||
jobInfo: [summary: 'myJobInfoSummary', links: getJobLinks(artifactPath)],
|
||||
logs: getLogs(logPath),
|
||||
version: 1
|
||||
]
|
||||
|
||||
if ( groupName != null) {
|
||||
payload.display.groupName = groupName
|
||||
}
|
||||
|
||||
// TODO include ec2-metadata output in payload
|
||||
exchange = "exchange/${PULSE_USR}/jobs"
|
||||
routingKey = "${PULSE_USR}.${payload.productName}"
|
||||
schema = libraryResource 'org/mozilla/fxtest/pulse/schemas/treeherder.json'
|
||||
publishToPulse(exchange, routingKey, JsonOutput.toJson(payload), schema)
|
||||
}
|
||||
|
||||
def getMachine() {
|
||||
os = System.getProperty("os.name").toLowerCase().replaceAll('\\W', '-')
|
||||
version = System.getProperty("os.version").toLowerCase().replaceAll('\\W', '-')
|
||||
architecture = System.getProperty("os.arch")
|
||||
return [
|
||||
name: new URI(env.JENKINS_URL).getHost(),
|
||||
platform: [os, version, architecture].join('-'),
|
||||
os: os,
|
||||
architecture: architecture
|
||||
]
|
||||
}
|
||||
|
||||
def getRevision() {
|
||||
return sh(returnStdout: true, script: 'git rev-parse HEAD').trim()
|
||||
}
|
||||
|
||||
def getResult() {
|
||||
switch(currentBuild.result) {
|
||||
case 'FAILURE':
|
||||
case 'UNSTABLE':
|
||||
return 'fail'
|
||||
case 'SUCCESS':
|
||||
case null:
|
||||
return 'success'
|
||||
default:
|
||||
return 'unknown'
|
||||
}
|
||||
}
|
||||
|
||||
def getDateTime(timeInMillis) {
|
||||
time = new Date(timeInMillis)
|
||||
return new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ssXXX").format(time)
|
||||
}
|
||||
|
||||
def getJobLinks(artifactPath) {
|
||||
links = [[url: env.BUILD_URL, linkText: env.BUILD_TAG, label: 'build']]
|
||||
if ( artifactPath != null ) {
|
||||
artifactLinks = publishToS3(artifactPath, 'net-mozaws-stage-fx-test-treeherder')
|
||||
for (link in artifactLinks) {
|
||||
links.add([url: link.url, linkText: link.name, label: 'artifact uploaded'])
|
||||
}
|
||||
}
|
||||
return links
|
||||
}
|
||||
|
||||
def getLogs(logPath) {
|
||||
links = []
|
||||
if ( logPath != null ) {
|
||||
logLinks = publishToS3(logPath, 'net-mozaws-stage-fx-test-treeherder')
|
||||
for (link in logLinks) {
|
||||
links.add([url: link.url, name: link.name])
|
||||
}
|
||||
}
|
||||
return links
|
||||
}
|
|
@ -0,0 +1,17 @@
|
|||
<p>
|
||||
Submit build results to
|
||||
<a href="https://wiki.mozilla.org/EngineeringProductivity/Projects/Treeherder">Treeherder</a>.
|
||||
</p>
|
||||
<dl>
|
||||
<dt>
|
||||
submitToTreeherder(project, jobSymbol, jobName, [artifactPath, logPath,
|
||||
groupSymbol, groupName])
|
||||
</dt>
|
||||
<dd>
|
||||
Submits the build result for <code>project</code> to Treeherder using the
|
||||
specified <code>jobSymbol</code> and <code>jobName</code>. If provided,
|
||||
files located by <code>artifactPath</code> and <code>logPath</code> will be
|
||||
published to Amazon S3 and linked from the build results. If job is part of
|
||||
a group, specify this using the optional <code>groupSymbol</code> and
|
||||
<code>groupName</code> arguments.
|
||||
</dl>
|
Загрузка…
Ссылка в новой задаче