From 6a8bde1c1f54d1ca7f862785b88b4f0ce5582710 Mon Sep 17 00:00:00 2001 From: Inga Lovinde <52715130+inga-lovinde@users.noreply.github.com> Date: Tue, 24 Jan 2017 12:38:16 +0300 Subject: [PATCH] Code style improvements --- BuildServer/app.js | 75 ++++---- BuildServer/lib/builder.js | 177 ++++++++++-------- BuildServer/lib/commenter.js | 85 +++++---- BuildServer/lib/git/copy.js | 102 +++++----- BuildServer/lib/git/loader.js | 29 ++- BuildServer/lib/mail-sender.js | 6 +- BuildServer/lib/status-processor.js | 62 +++--- BuildServer/lib/task-processor.js | 63 ++++--- .../lib/tasks/cleanupafterdotnetbuild.js | 25 ++- BuildServer/lib/tasks/conditional.js | 11 +- BuildServer/lib/tasks/copy.js | 14 +- BuildServer/lib/tasks/copyglob.js | 25 ++- BuildServer/lib/tasks/cssnano.js | 22 ++- BuildServer/lib/tasks/cssnanoall.js | 25 ++- BuildServer/lib/tasks/deletefromcode.js | 34 ++-- BuildServer/lib/tasks/dotnetbuild.js | 14 +- BuildServer/lib/tasks/dotnetbuildandtest.js | 18 +- BuildServer/lib/tasks/dotnetbuilderwrapper.js | 39 ++-- .../lib/tasks/dotnetbuildwithoutcleanup.js | 36 ++-- BuildServer/lib/tasks/dotnetcheckstyle.js | 49 +++-- BuildServer/lib/tasks/dotnetcompile.js | 42 +++-- BuildServer/lib/tasks/dotnetnugetpack.js | 41 ++-- BuildServer/lib/tasks/dotnetnugetprocess.js | 46 ++--- BuildServer/lib/tasks/dotnetnugetpush.js | 40 ++-- BuildServer/lib/tasks/dotnetnugetpushonly.js | 11 +- BuildServer/lib/tasks/dotnetnugetrestore.js | 17 +- BuildServer/lib/tasks/dotnetnunit.js | 8 +- BuildServer/lib/tasks/dotnetnunitall.js | 28 +-- BuildServer/lib/tasks/dotnetpackwebapp.js | 63 ++++--- BuildServer/lib/tasks/dotnetrewrite.js | 35 ++-- BuildServer/lib/tasks/echo.js | 2 +- BuildServer/lib/tasks/eslintbrowser.js | 17 +- BuildServer/lib/tasks/eslintbrowserall.js | 25 ++- BuildServer/lib/tasks/noop.js | 4 +- BuildServer/lib/tasks/packform.js | 38 ++-- BuildServer/lib/tasks/parallel.js | 6 +- BuildServer/lib/tasks/sequential.js | 9 +- BuildServer/lib/tasks/uglifyjs.js | 13 +- BuildServer/lib/tasks/uglifyjsall.js | 25 ++- BuildServer/lib/tasks/writefile.js | 13 +- BuildServer/lib/tasks/zip.js | 16 +- BuildServer/routes/artifact.js | 16 +- BuildServer/routes/index.js | 12 +- BuildServer/routes/manual.js | 17 +- BuildServer/routes/postreceive.js | 82 ++++---- BuildServer/routes/release.js | 48 +++-- BuildServer/routes/status.js | 50 ++--- 47 files changed, 890 insertions(+), 745 deletions(-) diff --git a/BuildServer/app.js b/BuildServer/app.js index 55db21d..8aecf69 100644 --- a/BuildServer/app.js +++ b/BuildServer/app.js @@ -1,55 +1,56 @@ "use strict"; -//const https = require('https'); -const realFs = require('fs'); -const fs = require('graceful-fs'); +const realFs = require("fs"); +const fs = require("graceful-fs"); + fs.gracefulify(realFs); -const express = require('express'); -const routes = require('./routes'); -const http = require('http'); -const path = require('path'); -const serveFavicon = require('serve-favicon'); -const morgan = require('morgan'); -const bodyParser = require('body-parser'); -const methodOverride = require('method-override'); -const serveStatic = require('serve-static'); -const errorhandler = require('errorhandler'); +const express = require("express"); +const routes = require("./routes"); +const http = require("http"); +const path = require("path"); +const serveFavicon = require("serve-favicon"); +const morgan = require("morgan"); +const bodyParser = require("body-parser"); +const methodOverride = require("method-override"); +const serveStatic = require("serve-static"); +const errorhandler = require("errorhandler"); + +const settings = require("./settings"); const app = express(); -// all environments -app.set('port', process.env.PORT || 3000); -app.set('views', path.join(__dirname, 'views')); -app.set('view engine', 'jade'); -app.set('gitpath', 'M:/g'); -app.set('tmpcodepath', 'M:/c'); -app.set('releasepath', 'M:/r'); -app.use(serveFavicon(path.join(__dirname, 'public/images/favicon.png'))); -app.use(morgan('dev')); -app.use(bodyParser.json({ limit: '10mb' })); -app.use(bodyParser.urlencoded({ extended: false })); +// All environments +app.set("port", settings.port); +app.set("views", path.join(__dirname, "views")); +app.set("view engine", "jade"); +app.set("gitpath", settings.gitpath); +app.set("tmpcodepath", settings.tmpcodepath); +app.set("releasepath", settings.releasepath); +app.use(serveFavicon(path.join(__dirname, "public/images/favicon.png"))); +app.use(morgan("dev")); +app.use(bodyParser.json({ "limit": "10mb" })); +app.use(bodyParser.urlencoded({ "extended": false })); app.use(methodOverride()); -app.use(serveStatic(path.join(__dirname, 'public'))); +app.use(serveStatic(path.join(__dirname, "public"))); -// development only -if ('development' === app.get('env')) { +if (app.get("env") === "development") { app.use(errorhandler()); } -app.route('/').get(routes.index); -app.route('/github/postreceive') +app.route("/").get(routes.index); +app.route("/github/postreceive") .post(routes.postreceive) - .get((req, res) => res.send("Only automated POST requests are allowed for postreceive route")); + .get((req, res) => res.send("Only automated POST requests are allowed for postreceive route")); -app.route('/manual') +app.route("/manual") .get(routes.manual.get) .post(routes.manual.post); -app.route('/status/:owner/:reponame/:branch/:rev?').get(routes.status.page); -app.route('/pos-github.payonline.ru/*').get(routes.status.pageFromGithub); -app.route('/status.svg').get(routes.status.image); -app.route('/release/:owner/:reponame/:branch/:rev').get(routes.release); -app.route('/artifact/:owner/:reponame/:branch/:rev/*').get(routes.artifact); +app.route("/status/:owner/:reponame/:branch/:rev?").get(routes.status.page); +app.route("/pos-github.payonline.ru/*").get(routes.status.pageFromGithub); +app.route("/status.svg").get(routes.status.image); +app.route("/release/:owner/:reponame/:branch/:rev").get(routes.release); +app.route("/artifact/:owner/:reponame/:branch/:rev/*").get(routes.artifact); -http.createServer(app).listen(app.get('port'), () => console.log('Express server listening on port ' + app.get('port'))); +http.createServer(app).listen(app.get("port"), () => console.log(`Express server listening on port ${app.get("port")}`)); diff --git a/BuildServer/lib/builder.js b/BuildServer/lib/builder.js index 3135c0d..edd4773 100644 --- a/BuildServer/lib/builder.js +++ b/BuildServer/lib/builder.js @@ -1,31 +1,34 @@ "use strict"; -const fs = require('fs'); -const fse = require('fs-extra'); -const async = require('async'); -const gitLoader = require('./git/loader'); -const processor = require('./task-processor'); -const mailSender = require('./mail-sender'); -const settings = require('../settings'); - -//const codePostfix = "/code"; +const path = require("path"); +const fs = require("fs"); +const fse = require("fs-extra"); +const async = require("async"); +const gitLoader = require("./git/loader"); +const processor = require("./task-processor"); +const mailSender = require("./mail-sender"); +const settings = require("../settings"); + const codePostfix = ""; const notifyStatus = (options, callback) => { const status = { - owner: options.owner, - repo: options.reponame, - sha: options.hash, - state: options.state, - target_url: settings.siteRoot + "status/" + options.owner + "/" + options.reponame + "/" + options.hash, - description: ((options.description || "") + "").substr(0, 140) + "description": String(options.description || "").substr(0, 140), + "owner": options.owner, + "repo": options.reponame, + "sha": options.hash, + "state": options.state, + "target_url": `${settings.siteRoot}status/${options.owner}/${options.reponame}/${options.hash}` }; - settings.createGithub(options.owner).repos.createStatus(status, (err, result) => { + + settings.createGithub(options.owner).repos.createStatus(status, (err) => { if (err) { - console.log("Error while creating status: " + err); + console.log(`Error while creating status: ${err}`); console.log(status); + return callback(err); } + return callback(); }); }; @@ -37,59 +40,76 @@ const build = (options, callback) => { const rev = options.rev; const branch = options.branch; const skipGitLoader = options.skipGitLoader; - const local = options.app.get('gitpath') + "/r/"; - const tmp = options.app.get('tmpcodepath') + "/" + rev.substr(0, 15); + const local = path.join(options.app.get("gitpath"), "r"); + const tmp = path.join(options.app.get("tmpcodepath"), rev.substr(0, 15)); const exported = tmp + codePostfix; - const release = options.app.get('releasepath') + "/" + owner + "/" + reponame + "/" + branch + "/" + rev; + const release = path.join(options.app.get("releasepath"), owner, reponame, branch, rev); const statusQueue = async.queue((task, callback) => task(callback), 1); - const actualGitLoader = skipGitLoader ? (options, callback) => process.nextTick(callback) : gitLoader; + const actualGitLoader = skipGitLoader + ? (options, callback) => process.nextTick(callback) + : gitLoader; const date = new Date(); - const versionInfo = date.getFullYear() + "." + - (date.getMonth() + 1) + "." + - date.getDate() + "." + - (date.getHours() * 100 + date.getMinutes()) + "; " + - "built from " + rev + "; " + - "repository: " + owner + "/" + reponame + "; " + - "branch: " + branch; + const versionMajor = date.getFullYear(); + const versionMinor = date.getMonth() + 1; + const versionBuild = date.getDate(); + const versionRev = (date.getHours() * 100) + date.getMinutes(); + const version = `${versionMajor}.${versionMinor}.${versionBuild}.${versionRev}`; + const versionInfo = `${version}; built from ${rev}; repository: ${owner}/${reponame}; branch: ${branch}`; statusQueue.push((callback) => notifyStatus({ - state: "pending", - description: "Preparing to build...", - owner: owner, - reponame: reponame, - hash: rev + "description": "Preparing to build...", + "hash": rev, + owner, + reponame, + "state": "pending" }, callback)); fse.mkdirsSync(release); - fs.writeFileSync(options.app.get('releasepath') + "/" + owner + "/" + reponame + "/" + branch + "/latest.id", rev); - fse.mkdirsSync(options.app.get('releasepath') + "/" + owner + "/" + reponame + "/$revs"); - fs.writeFileSync(options.app.get('releasepath') + "/" + owner + "/" + reponame + "/$revs/" + rev + ".branch", branch); + fs.writeFileSync(path.join(options.app.get("releasepath"), owner, reponame, branch, "latest.id"), rev); + fse.mkdirsSync(path.join(options.app.get("releasepath"), owner, reponame, "$revs")); + fs.writeFileSync(path.join(options.app.get("releasepath"), owner, reponame, "$revs", `${rev}.branch`), branch); const done = (err, result) => { - const errorMessage = result && result.errors ? ((result.errors.$allMessages || [])[0] || {}).message : err; - const warnMessage = result && result.warns ? ((result.warns.$allMessages || [])[0] || {}).message : err; - const infoMessage = result && result.infos ? ((result.infos.$allMessages || []).slice(-1)[0] || {}).message : err; - - fs.writeFile(release + "/report.json", JSON.stringify({date: Date.now(), err: err, result: result}), (writeErr) => { + const errorMessage = result && result.errors + ? ((result.errors.$allMessages || [])[0] || {}).message + : err; + const warnMessage = result && result.warns + ? ((result.warns.$allMessages || [])[0] || {}).message + : err; + const infoMessage = result && result.infos + ? ((result.infos.$allMessages || []).slice(-1)[0] || {}).message + : err; + + fs.writeFile(path.join(release, "report.json"), JSON.stringify({ + "date": Date.now(), + err, + result + }), (writeErr) => { statusQueue.push((callback) => async.parallel([ (callback) => notifyStatus({ - state: err ? "error" : "success", - description: errorMessage || warnMessage || infoMessage || "Success", - owner: owner, - reponame: reponame, - hash: rev + "description": errorMessage || warnMessage || infoMessage || "Success", + "hash": rev, + owner, + reponame, + "state": err + ? "error" + : "success" }, callback), (callback) => mailSender.send({ - from: settings.smtp.sender, - to: settings.smtp.receiver, - subject: (err ? "Build failed for " : "Successfully built ") + owner + "/" + reponame + "/" + branch, - headers: { - 'X-Laziness-level': 1000 - }, - text: ("Build status URL: " + settings.siteRoot + "status/" + owner + "/" + reponame + "/" + rev + "\r\n\r\n") + - (err ? ("Error message: " + err + "\r\n\r\n") : "") + - ((!result || !result.messages || !result.messages.$allMessages) ? JSON.stringify(result, null, 4) : result.messages.$allMessages.map(msg => msg.prefix + "\t" + msg.message).join("\r\n")) + "from": settings.smtp.sender, + "headers": { "X-Laziness-level": 1000 }, + "subject": `${err ? "Build failed for" : "Successfully built"} ${owner}/${reponame}/${branch}`, + "text": `Build status URL: ${settings.siteRoot}status/${owner}/${reponame}/${rev}\r\n\r\n` + + ( + err + ? `Error message: ${err}\r\n\r\n` + : "") + + ( + (!result || !result.messages || !result.messages.$allMessages) + ? JSON.stringify(result, null, 4) + : result.messages.$allMessages.map((msg) => `${msg.prefix}\t${msg.message}`).join("\r\n")), + "to": settings.smtp.receiver }, callback), (callback) => { if (err) { @@ -103,48 +123,55 @@ const build = (options, callback) => { if (writeErr) { return callback(writeErr); } + return callback(err, result); }); }; actualGitLoader({ - remote: url + ".git", - local: local, - branch: branch, - hash: rev, - exported: tmp + codePostfix + branch, + exported, + "hash": rev, + local, + "remote": `${url}.git` }, (err) => { if (err) { console.log(err); - return done("Git fetch error: " + err); + + return done(`Git fetch error: ${err}`); } + console.log("Done loading from git"); - fs.exists(exported + "/mbs.json", (exists) => { + + return fs.exists(path.join(exported, "mbs.json"), (exists) => { if (!exists) { return done(null, "MBSNotFound"); } - fs.readFile(exported + "/mbs.json", (err, data) => { + + return fs.readFile(path.join(exported, "mbs.json"), (err, data) => { if (err) { return done(err, "MBSUnableToRead"); } - let task; + let task = null; + try { task = JSON.parse(data); - } catch(ex) { - console.log("Malformed data: " + data); + } catch (ex) { + console.log(`Malformed data: ${data}`); + return done(ex, "MBSMalformed"); } - processor.processTask(task, { - owner: owner, - reponame: reponame, - branch: branch, - rev: rev, - tmp: tmp, - exported: exported, - release: release, - versionInfo: versionInfo + return processor.processTask(task, { + branch, + exported, + owner, + release, + reponame, + rev, + tmp, + versionInfo }, (err, result) => { if (err) { return done(err, result); diff --git a/BuildServer/lib/commenter.js b/BuildServer/lib/commenter.js index 7faaf5b..98f41d9 100644 --- a/BuildServer/lib/commenter.js +++ b/BuildServer/lib/commenter.js @@ -1,5 +1,6 @@ "use strict"; +const path = require("path"); const fs = require("fs"); const _ = require("underscore"); const settings = require("../settings"); @@ -9,10 +10,10 @@ const versionNamePattern = /^v\d+(\.\d+)*$/; const masterNamePattern = /^master$/; const writeComment = (options, message, callback) => options.github.issues.createComment({ - owner: options.baseRepoOptions.owner, - repo: options.baseRepoOptions.reponame, - number: options.number, - body: message + "body": message, + "number": options.number, + "owner": options.baseRepoOptions.owner, + "repo": options.baseRepoOptions.reponame }, callback); const closePullRequest = (options, message, callback) => writeComment(options, message, (err) => { @@ -21,43 +22,43 @@ const closePullRequest = (options, message, callback) => writeComment(options, m } return options.github.issues.edit({ - owner: options.baseRepoOptions.owner, - repo: options.baseRepoOptions.reponame, - number: options.number, - state: "closed" + "number": options.number, + "owner": options.baseRepoOptions.owner, + "repo": options.baseRepoOptions.reponame, + "state": "closed" }, callback); }); const checkHasIssue = (options, issueNumber, callback) => options.github.issues.get({ - owner: options.baseRepoOptions.owner, - repo: options.baseRepoOptions.reponame, - number: issueNumber + "number": issueNumber, + "owner": options.baseRepoOptions.owner, + "repo": options.baseRepoOptions.reponame }, (err, result) => { if (err && err.code !== 404) { return callback(err); } if (err || result.number.toString() !== issueNumber) { - return callback(undefined, false); + return callback(null, false); } if (result.pull_request && result.pull_request.url) { - return callback(undefined, false); + return callback(null, false); } - return callback(undefined, true, result.title); + return callback(null, true, result.title); }); const checkHasReleases = (options, callback) => options.github.repos.getReleases({ - owner: options.baseRepoOptions.owner, - repo: options.baseRepoOptions.reponame, - per_page: 1 + "owner": options.baseRepoOptions.owner, + "per_page": 1, + "repo": options.baseRepoOptions.reponame }, (err, result) => { if (err) { return callback(err); } - return callback(undefined, result && result.length); + return callback(null, result && result.length); }); const checkPullRequest = (options, callback) => { @@ -83,7 +84,7 @@ const checkPullRequest = (options, callback) => { } if (options.action === "opened") { - return writeComment(options, "Switching master branch to " + head.branchname + " release", callback); + return writeComment(options, `Switching master branch to ${head.branchname} release`, callback); } return process.nextTick(callback); @@ -91,24 +92,26 @@ const checkPullRequest = (options, callback) => { } if (!featureNamePattern.test(head.branchname)) { - return closePullRequest(options, "Only merging from feature branch is allowed (pattern: `" + featureNamePattern.toString() + "`)", callback); + return closePullRequest(options, `Only merging from feature branch is allowed (pattern: \`${featureNamePattern}\`)`, callback); } if (!versionNamePattern.test(base.branchname) && !masterNamePattern.test(base.branchname)) { - return closePullRequest(options, "Only merging to master or version branch is allowed; merging to '" + base.branchname + "' is not supported", callback); + return closePullRequest(options, `Only merging to master or version branch is allowed; merging to '${base.branchname}' is not supported`, callback); } const issueNumber = featureNamePattern.exec(head.branchname)[1]; + return checkHasIssue(options, issueNumber, (err, hasIssue, issueTitle) => { if (err) { - return writeComment(options, "Unable to check for issue:\r\n\r\n" + err.message, callback); + return writeComment(options, `Unable to check for issue:\r\n\r\n${err.message}`, callback); } if (!hasIssue) { - return closePullRequest(options, "Unable to find issue #" + issueNumber, callback); + return closePullRequest(options, `Unable to find issue #${issueNumber}`, callback); } const shouldHaveReleases = versionNamePattern.test(base.branchname); + return checkHasReleases(options, (err, hasReleases) => { if (err) { return writeComment(options, "Unable to check for releases", callback); @@ -123,7 +126,7 @@ const checkPullRequest = (options, callback) => { } if (options.action === "opened") { - return writeComment(options, "Merging feature #" + issueNumber + " (" + issueTitle + ") to " + base.branchname + (shouldHaveReleases ? " release" : ""), callback); + return writeComment(options, `Merging feature #${issueNumber} (${issueTitle}) to ${base.branchname}${shouldHaveReleases ? " release" : ""}`, callback); } return process.nextTick(callback); @@ -132,8 +135,8 @@ const checkPullRequest = (options, callback) => { }; const getStatusMessageFromRelease = (app, options, callback) => { - const releaseDir = app.get("releasepath") + "/" + options.owner + "/" + options.reponame + "/" + options.branch + "/" + options.rev; - const reportFile = releaseDir + "/report.json"; + const releaseDir = path.join(app.get("releasepath"), options.owner, options.reponame, options.branch, options.rev); + const reportFile = path.join(releaseDir, "/report.json"); options.attemptsGetReport = (options.attemptsGetReport || 0) + 1; @@ -147,7 +150,7 @@ const getStatusMessageFromRelease = (app, options, callback) => { return callback("Report file not found"); } - //maybe it is building right now + // Maybe it is building right now return setTimeout(() => getStatusMessageFromRelease(app, options, callback), 10000); }), 2000); } @@ -156,10 +159,13 @@ const getStatusMessageFromRelease = (app, options, callback) => { if (err) { return callback(err); } + const data = dataBuffer.toString(); + if (!data) { return callback("Report file not found"); } + const report = JSON.parse(data); if (report.result === "MBSNotFound") { @@ -167,27 +173,34 @@ const getStatusMessageFromRelease = (app, options, callback) => { } if (report.result && ((report.result.errors || {}).$allMessages || []).length + ((report.result.warns || {}).$allMessages || []).length > 0) { return callback(_.map( - (report.result.errors || {}).$allMessages || [], (message) => "ERR: " + message.message + (report.result.errors || {}).$allMessages || [], (message) => `ERR: ${message.message}` ).concat(_.map( - (report.result.warns || {}).$allMessages || [], (message) => "WARN: " + message.message - )).join("\r\n")); + (report.result.warns || {}).$allMessages || [], (message) => `WARN: ${message.message}` + )) + .join("\r\n")); } if (!report.result || report.err) { - return callback("CRITICAL ERROR: " + report.err); + return callback(`CRITICAL ERROR: ${report.err}`); } if ((report.result.infos.$allMessages || []).length > 0) { - return callback(undefined, report.result.infos.$allMessages[report.result.infos.$allMessages.length-1].message); + return callback(null, report.result.infos.$allMessages[report.result.infos.$allMessages.length - 1].message); } - return callback(undefined, "OK"); + + return callback(null, "OK"); }), 1000); }); }; exports.commentOnPullRequest = (options, callback) => { options.github = settings.createGithub(options.baseRepoOptions.owner); + return checkPullRequest(options, (err, successMessage) => getStatusMessageFromRelease(options.app, options.headRepoOptions, (err, successMessage) => { - const message = err ? ("Was not built:\r\n\r\n```\r\n" + err.substring(0, 64000).replace(/```/g, "` ` `") + "\r\n```\r\n\r\nDO NOT MERGE!") : ("Build OK\r\n\r\n" + successMessage); - const statusUrlMessage = "Build status URL: " + settings.siteRoot + "status/" + options.headRepoOptions.owner + "/" + options.headRepoOptions.reponame + "/" + options.headRepoOptions.rev + "\r\n\r\n"; - return writeComment(options, message + "\r\n\r\n" + statusUrlMessage, callback); + const escapedErr = err.substring(0, 64000).replace(/`/g, "` "); + const message = err + ? `Was not built:\r\n\r\n\`\`\`\r\n${escapedErr}\r\n\`\`\`\r\n\r\nDO NOT MERGE!` + : `Build OK\r\n\r\n${successMessage}`; + const statusUrlMessage = `Build status URL: ${settings.siteRoot}status/${options.headRepoOptions.owner}/${options.headRepoOptions.reponame}/${options.headRepoOptions.rev}\r\n\r\n`; + + return writeComment(options, `${message}\r\n\r\n${statusUrlMessage}`, callback); })); }; diff --git a/BuildServer/lib/git/copy.js b/BuildServer/lib/git/copy.js index 3d64f29..0ac1aa3 100644 --- a/BuildServer/lib/git/copy.js +++ b/BuildServer/lib/git/copy.js @@ -1,69 +1,82 @@ "use strict"; -const EventEmitter = require('events').EventEmitter; -const path = require('path'); -const fs = require('fs'); -const async = require('async'); -const Copier = require('recursive-tree-copy').Copier; +const EventEmitter = require("events").EventEmitter; +const path = require("path"); +const fs = require("fs"); +const async = require("async"); +const Copier = require("recursive-tree-copy").Copier; const gitToFsCopier = new Copier({ - concurrency: 4, - walkSourceTree: (tree) => { + "concurrency": 4, + "copyLeaf": (entry, targetDir, callback) => { + const targetPath = path.join(targetDir, entry.name()); + + entry.getBlob((err, blob) => { + if (err) { + return callback(err); + } + + return fs.writeFile(targetPath, blob.content(), callback); + }); + }, + "createTargetTree": (tree, targetDir, callback) => { + const targetSubdir = path.join(targetDir, tree.name); + + fs.mkdir(targetSubdir, (err) => { + // Workaround for broken trees + if (err && err.code !== "EEXIST") { + return callback(err); + } + + return callback(null, targetSubdir); + }); + }, + "finalizeTargetTree": (targetSubdir, callback) => callback(), + "walkSourceTree": (tree) => { const emitter = new EventEmitter(); + process.nextTick(() => { - let entries; + let entries = null; + try { entries = tree.gitTree.entries(); - } catch(err) { - return emitter.emit('error', err); + } catch (err) { + return emitter.emit("error", err); } - async.parallel(entries.map((entry) => (callback) => { + return async.parallel(entries.map((entry) => (callback) => { if (entry.isTree()) { - entry.getTree((err, subTree) => { + return entry.getTree((err, subTree) => { if (err) { return callback(err); } - emitter.emit('tree', { gitTree: subTree, name: entry.name() }); - callback(); + emitter.emit("tree", { + "gitTree": subTree, + "name": entry.name() + }); + + return callback(); }); - } else if (entry.isFile()) { - emitter.emit('leaf', entry); - callback(); - } else { - callback(); } + + if (entry.isFile()) { + emitter.emit("leaf", entry); + + return callback(); + } + + return callback(); }), (err) => { if (err) { - return emitter.emit('error', err); + return emitter.emit("error", err); } - return emitter.emit('done'); + return emitter.emit("done"); }); }); - return emitter; - }, - createTargetTree: (tree, targetDir, callback) => { - const targetSubdir = path.join(targetDir, tree.name); - fs.mkdir(targetSubdir, (err) => { - if (err && err.code !== 'EEXIST' /* workaround for broken trees */) { - return callback(err); - } - - callback(undefined, targetSubdir); - }); - }, - finalizeTargetTree: (targetSubdir, callback) => callback(), - copyLeaf: (entry, targetDir, callback) => { - const targetPath = path.join(targetDir, entry.name()); - entry.getBlob((err, blob) => { - if (err) { - return callback(err); - } - fs.writeFile(targetPath, blob.content(), callback); - }); + return emitter; } }); @@ -72,5 +85,8 @@ exports.gitToFs = (commit, exportDir, callback) => commit.getTree((err, tree) => return callback(err); } - gitToFsCopier.copy({ gitTree: tree, name: "." }, exportDir, callback); + return gitToFsCopier.copy({ + "gitTree": tree, + "name": "." + }, exportDir, callback); }); diff --git a/BuildServer/lib/git/loader.js b/BuildServer/lib/git/loader.js index dbcc98a..017afaf 100644 --- a/BuildServer/lib/git/loader.js +++ b/BuildServer/lib/git/loader.js @@ -1,18 +1,16 @@ "use strict"; -const nodegit = require('nodegit'); -const fse = require('fs-extra'); -const gitToFs = require('./copy').gitToFs; +const nodegit = require("nodegit"); +const fse = require("fs-extra"); +const gitToFs = require("./copy").gitToFs; const mkdirs = (path) => { - /*jslint stupid: true */ - fse.mkdirsSync(path); + fse.mkdirsSync(path); // eslint-disable-line no-sync }; const removedirs = (path) => { - /*jslint stupid: true */ - fse.removeSync(path); + fse.removeSync(path); // eslint-disable-line no-sync }; -/* +/* Example: options = { "remote": "https://github.com/visionmedia/express.git", "local": "D:\\data\\repositories\\visionmedia\\express.git\\", @@ -20,21 +18,21 @@ options = { "hash": "82e15cf321fccf3215068814d1ea1aeb3581ddb3", "exported": "D:\\data\\exportedsource\\visionmedia\\express\\82e15cf321fccf3215068814d1ea1aeb3581ddb3\\", } -*/ + */ module.exports = (options, globalCallback) => { let url = options.remote; - const path = options.local + "/" + options.hash; + const path = `${options.local}/${options.hash}`; const exported = options.exported; removedirs(path); mkdirs(path); if (url.substr(0, 8) === "https://") { - url = "git://" + url.substr(8); + url = `git://${url.substr(8)}`; } - console.log("Cloning %s to %s", url, path); + console.log(`Cloning ${url} to ${path}`); nodegit.Repository.init(path, 1) .catch(globalCallback) @@ -44,12 +42,12 @@ module.exports = (options, globalCallback) => { .catch(globalCallback) .then((number) => { if (number) { - return globalCallback("Failed to fetch commit: error number " + number); + return globalCallback(`Failed to fetch commit: error number ${number}`); } - console.log("Cloned %s to %s", url, path); + console.log(`Cloned ${url} to ${path}`); - repo.getCommit(options.hash) + return repo.getCommit(options.hash) .catch(globalCallback) .then((commit) => { removedirs(exported); @@ -57,6 +55,7 @@ module.exports = (options, globalCallback) => { gitToFs(commit, exported, (err, result) => { repo.free(); + return globalCallback(err, result); }); }); diff --git a/BuildServer/lib/mail-sender.js b/BuildServer/lib/mail-sender.js index 8497f30..fe32310 100644 --- a/BuildServer/lib/mail-sender.js +++ b/BuildServer/lib/mail-sender.js @@ -1,7 +1,7 @@ "use strict"; -const nodemailer = require('nodemailer'); -const settings = require('../settings'); +const nodemailer = require("nodemailer"); +const settings = require("../settings"); exports.send = (message, callback) => { return process.nextTick(callback); @@ -12,4 +12,4 @@ exports.send = (message, callback) => { callback(err, result); }); */ -}; \ No newline at end of file +}; diff --git a/BuildServer/lib/status-processor.js b/BuildServer/lib/status-processor.js index 4e3d6b3..8108085 100644 --- a/BuildServer/lib/status-processor.js +++ b/BuildServer/lib/status-processor.js @@ -1,88 +1,108 @@ "use strict"; -const fs = require('fs'); -const glob = require('glob'); +const path = require("path"); +const fs = require("fs"); +const glob = require("glob"); const addBranchInfo = (app, options, callback) => { - const branchFile = app.get('releasepath') + "/" + options.owner + "/" + options.reponame + "/$revs/" + options.rev + ".branch"; + const branchFile = path.join(app.get("releasepath"), options.owner, options.reponame, "$revs", `${options.rev}.branch`); + fs.exists(branchFile, (exists) => { if (!exists) { return callback("BranchFileNotFound", options); } - fs.readFile(branchFile, (err, data) => { + + return fs.readFile(branchFile, (err, data) => { if (err) { return callback(err, options); } options.branch = data.toString(); options.branchName = options.branch.split("/").pop(); + return callback(null, options); }); }); }; const addRevInfo = (app, options, callback) => { - const revFile = app.get('releasepath') + "/" + options.owner + "/" + options.reponame + "/" + options.branch + "/latest.id"; + const revFile = path.join(app.get("releasepath"), options.owner, options.reponame, options.branch, "latest.id"); + fs.exists(revFile, (exists) => { if (!exists) { return callback("RevFileNotFound", options); } - fs.readFile(revFile, (err, data) => { + + return fs.readFile(revFile, (err, data) => { if (err) { return callback(err, options); } options.rev = data.toString(); + return callback(null, options); }); }); }; const parseOptions = (app, options, callback) => { - const result = {}; - - result.owner = options.owner; - result.reponame = options.reponame; + const result = { + "owner": options.owner, + "reponame": options.reponame + }; if (options.rev && !(/^[\da-f]{40}$/i).test(options.rev)) { - return callback("Wrong rev format: " + options.rev, options); + return callback(`Wrong rev format: ${options.rev}`, options); } if (options.rev) { result.rev = options.rev; + return addBranchInfo(app, result, callback); - } else if (/^[\da-f]{40}$/i.test(options.branchName)) { + } + + if (/^[\da-f]{40}$/i.test(options.branchName)) { result.rev = options.branchName; + return addBranchInfo(app, result, callback); - } else { - result.branchName = options.branchName || "master"; - result.branch = "refs/heads/" + result.branchName; - return addRevInfo(app, result, callback); } + + result.branchName = options.branchName || "master"; + result.branch = `refs/heads/${result.branchName}`; + + return addRevInfo(app, result, callback); }; const loadReport = (app, options, callback) => { - const releaseDir = app.get('releasepath') + "/" + options.owner + "/" + options.reponame + "/" + options.branch + "/" + options.rev; + const releaseDir = path.join(app.get("releasepath"), options.owner, options.reponame, options.branch, options.rev); - glob("**", {cwd: releaseDir, mark: true}, (err, files) => { + glob("**", { + "cwd": releaseDir, + "mark": true + }, (err, files) => { if (err) { return callback(err, options); } - const reportFile = releaseDir + "/report.json"; + const reportFile = path.join(releaseDir, "report.json"); + options.files = files; - fs.exists(reportFile, (exists) => { + + return fs.exists(reportFile, (exists) => { if (!exists) { return callback("ReportFileNotFound", options); } - fs.readFile(reportFile, (err, dataBuffer) => { + return fs.readFile(reportFile, (err, dataBuffer) => { if (err) { return callback(err, options); } + const data = dataBuffer.toString(); + if (!data) { return callback("ReportFileNotFound", options); } options.report = JSON.parse(data); + return callback(null, options); }); }); diff --git a/BuildServer/lib/task-processor.js b/BuildServer/lib/task-processor.js index 859089d..e29cc29 100644 --- a/BuildServer/lib/task-processor.js +++ b/BuildServer/lib/task-processor.js @@ -1,16 +1,22 @@ "use strict"; -//TaskProcessor does not look like EventEmitter, so no need to extend EventEmitter and use `emit' here. +// TaskProcessor does not look like EventEmitter, so no need to extend EventEmitter and use `emit' here. const TaskProcessor = function (task, outerProcessor, callback) { - if (!this) { + if (!this) { return new TaskProcessor(task); } - const self = this; - let taskWorker = undefined; + const that = this; + let taskWorker = null; const errors = []; const process = () => taskWorker.process(); - const getOuterPrefix = (prefix) => (task.name && prefix) ? (task.name + "/" + prefix) : (task.name || "") + (prefix || ""); + const getOuterPrefix = (prefix) => { + if (task.name && prefix) { + return `${task.name}/${prefix}`; + } + + return (task.name || "") + (prefix || ""); + }; const onError = (message, prefix) => { errors.push(message); outerProcessor.onError(message, getOuterPrefix(prefix)); @@ -18,21 +24,25 @@ const TaskProcessor = function (task, outerProcessor, callback) { const onWarn = (message, prefix) => outerProcessor.onWarn(message, getOuterPrefix(prefix)); const onInfo = (message, prefix) => outerProcessor.onInfo(message, getOuterPrefix(prefix)); const processTask = (innerTask, innerCallback) => { - const innerProcessor = new TaskProcessor(innerTask, self, innerCallback); + const innerProcessor = new TaskProcessor(innerTask, that, innerCallback); + innerProcessor.process(); }; const done = () => callback(errors.join("\r\n")); - self.process = process; - self.onError = onError; - self.onWarn = onWarn; - self.onInfo = onInfo; - self.processTask = processTask; - self.done = done; - self.context = outerProcessor.context; + that.process = process; + that.onError = onError; + that.onWarn = onWarn; + that.onInfo = onInfo; + that.processTask = processTask; + that.done = done; + that.context = outerProcessor.context; - const taskImpl = require('./tasks/' + task.type.match(/[\w\-]/g).join("")); - taskWorker = taskImpl(task.params || {}, self); + const taskImpl = require(`./tasks/${task.type.match(/[\w\-]/g).join("")}`); + + taskWorker = taskImpl(task.params || {}, that); + + return this; }; exports.processTask = (task, context, callback) => { @@ -46,14 +56,17 @@ exports.processTask = (task, context, callback) => { let innerList = list; parts.forEach((part) => { - innerList = (innerList[part] = innerList[part] || {}); + innerList = innerList[part] = innerList[part] || {}; }); innerList.$messages = innerList.$messages || []; innerList.$messages.push(message); list.$allMessages = list.$allMessages || []; - list.$allMessages.push({ prefix: prefix, message: message }); + list.$allMessages.push({ + message, + prefix + }); }; return (message, prefix) => { @@ -62,15 +75,15 @@ exports.processTask = (task, context, callback) => { }; }; const processor = new TaskProcessor(task, { - onError: messageProcessor(errors), - onWarn: messageProcessor(warns), - onInfo: messageProcessor(infos), - context: context + context, + "onError": messageProcessor(errors), + "onInfo": messageProcessor(infos), + "onWarn": messageProcessor(warns) }, (err) => callback(err, { - errors: errors, - warns: warns, - infos: infos, - messages: messages + errors, + infos, + messages, + warns })); processor.process(); diff --git a/BuildServer/lib/tasks/cleanupafterdotnetbuild.js b/BuildServer/lib/tasks/cleanupafterdotnetbuild.js index e0717c7..b02f255 100644 --- a/BuildServer/lib/tasks/cleanupafterdotnetbuild.js +++ b/BuildServer/lib/tasks/cleanupafterdotnetbuild.js @@ -1,14 +1,15 @@ "use strict"; -const glob = require('glob'); +const glob = require("glob"); module.exports = (params, processor) => ({ - process: () => glob("**/obj/{Debug,Release}/*.{dll,pdb,xml}", { - dot: true, - cwd: processor.context.exported + "process": () => glob("**/obj/{Debug,Release}/*.{dll,pdb,xml}", { + "cwd": processor.context.exported, + "dot": true }, (err, files) => { if (err) { processor.onError(err); + return processor.done(); } @@ -17,16 +18,14 @@ module.exports = (params, processor) => ({ } return processor.processTask({ - type: "parallel", - params: { - tasks: files.map((file) => ({ - name: file, - type: "deletefromcode", - params: { - filename: file - } + "params": { + "tasks": files.map((file) => ({ + "name": file, + "params": { "filename": file }, + "type": "deletefromcode" })) - } + }, + "type": "parallel" }, processor.done.bind(processor)); }) }); diff --git a/BuildServer/lib/tasks/conditional.js b/BuildServer/lib/tasks/conditional.js index 5123496..f71c6f6 100644 --- a/BuildServer/lib/tasks/conditional.js +++ b/BuildServer/lib/tasks/conditional.js @@ -1,10 +1,11 @@ "use strict"; module.exports = (params, processor) => { - const condition = (!params.owner || params.owner === processor.context.owner) && (!params.branch || params.branch === processor.context.branch || "refs/heads/" + params.branch === processor.context.branch); - const task = condition ? params.task : params.otherwise; + const condition = (!params.owner || params.owner === processor.context.owner) + && (!params.branch || params.branch === processor.context.branch || `refs/heads/${params.branch}` === processor.context.branch); + const task = condition + ? params.task + : params.otherwise; - return { - process: () => processor.processTask(task || {type: "noop"}, processor.done.bind(processor)) - }; + return { "process": () => processor.processTask(task || { "type": "noop" }, processor.done.bind(processor)) }; }; diff --git a/BuildServer/lib/tasks/copy.js b/BuildServer/lib/tasks/copy.js index 4cc64d6..cc0f59c 100644 --- a/BuildServer/lib/tasks/copy.js +++ b/BuildServer/lib/tasks/copy.js @@ -1,20 +1,22 @@ "use strict"; -const fse = require('fs-extra'); +const path = require("path"); +const fse = require("fs-extra"); module.exports = (params, processor) => ({ - process: () => { - const sourceFilePath = processor.context.exported + "/" + params.filename; - const targetFilePath = processor.context.release + "/" + params.filename; + "process": () => { + const sourceFilePath = path.join(processor.context.exported, params.filename); + const targetFilePath = path.join(processor.context.release, params.filename); - processor.onInfo("Copying " + sourceFilePath + " to " + targetFilePath); + processor.onInfo(`Copying ${sourceFilePath} to ${targetFilePath}`); fse.copy(sourceFilePath, targetFilePath, (err) => { if (err) { - processor.onError("Unable to copy file: " + err); + processor.onError(`Unable to copy file: ${err}`); } else { processor.onInfo("Copied file"); } + return processor.done(); }); } diff --git a/BuildServer/lib/tasks/copyglob.js b/BuildServer/lib/tasks/copyglob.js index d2b1e68..a1a7aab 100644 --- a/BuildServer/lib/tasks/copyglob.js +++ b/BuildServer/lib/tasks/copyglob.js @@ -1,14 +1,15 @@ "use strict"; -const glob = require('glob'); +const glob = require("glob"); module.exports = (params, processor) => ({ - process: () => glob(params.mask, { - dot: true, - cwd: processor.context.exported + "process": () => glob(params.mask, { + "cwd": processor.context.exported, + "dot": true }, (err, files) => { if (err) { processor.onError(err); + return processor.done(); } @@ -17,16 +18,14 @@ module.exports = (params, processor) => ({ } return processor.processTask({ - type: "parallel", - params: { - tasks: files.map((file) => ({ - name: file, - type: "copy", - params: { - filename: file - } + "params": { + "tasks": files.map((file) => ({ + "name": file, + "params": { "filename": file }, + "type": "copy" })) - } + }, + "type": "parallel" }, processor.done.bind(processor)); }) }); diff --git a/BuildServer/lib/tasks/cssnano.js b/BuildServer/lib/tasks/cssnano.js index 0bb5b43..19858b1 100644 --- a/BuildServer/lib/tasks/cssnano.js +++ b/BuildServer/lib/tasks/cssnano.js @@ -1,35 +1,37 @@ "use strict"; -const fs = require('fs'); -const path = require('path'); +const fs = require("fs"); +const path = require("path"); const cssnano = require("cssnano"); module.exports = (params, processor) => ({ - process: () => { - const filePath = path.normalize(processor.context.exported + "/" + params.filename); + "process": () => { + const filePath = path.join(processor.context.exported, params.filename); + fs.readFile(filePath, (err, css) => { if (err) { - processor.onError("Unable to read stylesheet " + params.filename + ": " + err); + processor.onError(`Unable to read stylesheet ${params.filename}: ${err}`); + return processor.done(); } - cssnano.process(css) + return cssnano.process(css) .catch((err) => { - processor.onError("Unable to uglify stylesheet: " + err); + processor.onError(`Unable to uglify stylesheet: ${err}`); processor.done(); }) .then((result) => { fs.writeFile(filePath, result.css, (err) => { if (err) { - processor.onError("Unable to write uglified stylesheet for " + params.filename + ": " + err); + processor.onError(`Unable to write uglified stylesheet for ${params.filename}: ${err}`); } else { - processor.onInfo("Saved uglified stylesheet for " + params.filename + "; uglified length: " + result.css.length); + processor.onInfo(`Saved uglified stylesheet for ${params.filename}; uglified length: ${result.css.length}`); } processor.done(); }); }); - }); + }); } }); diff --git a/BuildServer/lib/tasks/cssnanoall.js b/BuildServer/lib/tasks/cssnanoall.js index 613cbdb..35dc307 100644 --- a/BuildServer/lib/tasks/cssnanoall.js +++ b/BuildServer/lib/tasks/cssnanoall.js @@ -1,9 +1,9 @@ "use strict"; -const glob = require('glob'); +const glob = require("glob"); module.exports = (params, processor) => ({ - process: () => { + "process": () => { if (processor.context.cssnanoallDone) { processor.onWarn("cssnanoall task is executed more than once; this is probably a bug in your mbs.json"); } @@ -11,25 +11,24 @@ module.exports = (params, processor) => ({ processor.context.cssnanoallDone = true; glob("**/*.css", { - dot: true, - cwd: processor.context.exported + "cwd": processor.context.exported, + "dot": true }, (err, files) => { if (err) { processor.onError(err); + return processor.done(); } return processor.processTask({ - type: params.preventParallelTests ? "sequential" : "parallel", - params: { - tasks: files.map((file) => ({ - name: file, - type: "cssnano", - params: { - filename: file - } + "params": { + "tasks": files.map((file) => ({ + "name": file, + "params": { "filename": file }, + "type": "cssnano" })) - } + }, + "type": (params.preventParallelTests && "sequential") || "parallel" }, processor.done.bind(processor)); }); } diff --git a/BuildServer/lib/tasks/deletefromcode.js b/BuildServer/lib/tasks/deletefromcode.js index abdcb7e..f74e3dd 100644 --- a/BuildServer/lib/tasks/deletefromcode.js +++ b/BuildServer/lib/tasks/deletefromcode.js @@ -1,22 +1,22 @@ "use strict"; -const fse = require('fs-extra'); +const path = require("path"); +const fse = require("fs-extra"); -module.exports = (params, processor) => { - return { - process: () => { - var sourceFilePath = processor.context.exported + "/" + params.filename; +module.exports = (params, processor) => ({ + "process": () => { + const sourceFilePath = path.join(processor.context.exported, params.filename); - processor.onInfo("Deleting " + sourceFilePath); + processor.onInfo(`Deleting ${sourceFilePath}`); - fse.remove(sourceFilePath, function(err) { - if (err) { - processor.onError("Unable to delete file: " + err); - } else { - processor.onInfo("Deleted file"); - } - return processor.done(); - }); - } - }; -}; + fse.remove(sourceFilePath, (err) => { + if (err) { + processor.onError(`Unable to delete file: ${err}`); + } else { + processor.onInfo("Deleted file"); + } + + return processor.done(); + }); + } +}); diff --git a/BuildServer/lib/tasks/dotnetbuild.js b/BuildServer/lib/tasks/dotnetbuild.js index c9b3691..de55745 100644 --- a/BuildServer/lib/tasks/dotnetbuild.js +++ b/BuildServer/lib/tasks/dotnetbuild.js @@ -1,17 +1,17 @@ "use strict"; -const sequential = require('./sequential'); +const sequential = require("./sequential"); module.exports = (params, processor) => sequential({ - tasks: [ + "tasks": [ { - type: "dotnetbuildwithoutcleanup", - name: "build", - params: params + "name": "build", + params, + "type": "dotnetbuildwithoutcleanup" }, { - type: "cleanupafterdotnetbuild", - name: "cleanup" + "name": "cleanup", + "type": "cleanupafterdotnetbuild" } ] }, processor); diff --git a/BuildServer/lib/tasks/dotnetbuildandtest.js b/BuildServer/lib/tasks/dotnetbuildandtest.js index 046e4e4..de27456 100644 --- a/BuildServer/lib/tasks/dotnetbuildandtest.js +++ b/BuildServer/lib/tasks/dotnetbuildandtest.js @@ -3,20 +3,20 @@ const sequential = require("./sequential"); module.exports = (params, processor) => sequential({ - tasks: [ + "tasks": [ { - type: "dotnetbuildwithoutcleanup", - name: "build", - params: params + "name": "build", + params, + "type": "dotnetbuildwithoutcleanup" }, { - type: "dotnetnunitall", - name: "test", - params: params + "name": "test", + params, + "type": "dotnetnunitall" }, { - type: "cleanupafterdotnetbuild", - name: "cleanup" + "name": "cleanup", + "type": "cleanupafterdotnetbuild" } ] }, processor); diff --git a/BuildServer/lib/tasks/dotnetbuilderwrapper.js b/BuildServer/lib/tasks/dotnetbuilderwrapper.js index e1cf197..e73bec2 100644 --- a/BuildServer/lib/tasks/dotnetbuilderwrapper.js +++ b/BuildServer/lib/tasks/dotnetbuilderwrapper.js @@ -1,43 +1,52 @@ "use strict"; -const spawn = require('child_process').spawn; +const spawn = require("child_process").spawn; const settings = require("../../settings"); module.exports = (params, processor) => ({ - process: () => { + "process": () => { let result = ""; let error = ""; const builder = spawn(settings.builderExecutable, [params.command]); - processor.onInfo("DotNetBuilderWrapper processing (at " + (new Date().toISOString()) + "): " + JSON.stringify(params, null, 4)); + processor.onInfo(`DotNetBuilderWrapper processing (at ${new Date().toISOString()}): ${JSON.stringify(params, null, 4)}`); - builder.stdout.on('data', (data) => result += data); - builder.stderr.on('data', (data) => error += data); - builder.on('exit', (code) => { + builder.stdout.on("data", (data) => { + result += data; + }); + + builder.stderr.on("data", (data) => { + error += data; + }); + + builder.on("exit", (code) => { if (code !== 0) { - error = "Return code is " + code + "\r\n" + error; + error = `Return code is ${code}\r\n${error}`; processor.onError(error); + return processor.done(); } const report = JSON.parse(result); const messages = report.Messages; + messages.forEach((message) => { if (!message) { return processor.onError("Message is null"); } - switch(message.Type) { - case "info": - return processor.onInfo(message.Body); - case "warn": - return processor.onWarn(message.Body); - default: - return processor.onError(message.Body); + switch (message.Type) { + case "info": + return processor.onInfo(message.Body); + case "warn": + return processor.onWarn(message.Body); + default: + return processor.onError(message.Body); } }); - processor.onInfo("Done DotNetBuilderWrapper processing (at " + (new Date().toISOString()) + ")"); + processor.onInfo(`Done DotNetBuilderWrapper processing (at ${new Date().toISOString()})`); + return processor.done(); }); diff --git a/BuildServer/lib/tasks/dotnetbuildwithoutcleanup.js b/BuildServer/lib/tasks/dotnetbuildwithoutcleanup.js index 714439c..20c4cae 100644 --- a/BuildServer/lib/tasks/dotnetbuildwithoutcleanup.js +++ b/BuildServer/lib/tasks/dotnetbuildwithoutcleanup.js @@ -1,42 +1,40 @@ "use strict"; -const sequential = require('./sequential'); +const sequential = require("./sequential"); module.exports = (params, processor) => { const tasks = []; if (!params.skipMbsCheckStyle) { tasks.push({ - type: "dotnetcheckstyle", - params: params + params, + "type": "dotnetcheckstyle" }); } tasks.push({ - type: "dotnetrewrite", - params: params + params, + "type": "dotnetrewrite" }); if (!params.skipNugetRestore) { tasks.push({ - type: "dotnetnugetrestore", - params: params + params, + "type": "dotnetnugetrestore" }); } tasks.push({ - type: "dotnetcompile", - params: { - solution: params.solution, - skipCodeSigning: params.skipCodeSigning, - forceCodeAnalysis: params.forceCodeAnalysis, - ignoreCodeAnalysis: params.ignoreCodeAnalysis, - configuration: params.configuration, - target: "Rebuild" - } + "params": { + "configuration": params.configuration, + "forceCodeAnalysis": params.forceCodeAnalysis, + "ignoreCodeAnalysis": params.ignoreCodeAnalysis, + "skipCodeSigning": params.skipCodeSigning, + "solution": params.solution, + "target": "Rebuild" + }, + "type": "dotnetcompile" }); - return sequential({ - tasks: tasks - }, processor); + return sequential({ tasks }, processor); }; diff --git a/BuildServer/lib/tasks/dotnetcheckstyle.js b/BuildServer/lib/tasks/dotnetcheckstyle.js index 44b7c8c..080f4b9 100644 --- a/BuildServer/lib/tasks/dotnetcheckstyle.js +++ b/BuildServer/lib/tasks/dotnetcheckstyle.js @@ -1,61 +1,74 @@ "use strict"; -const fs = require('fs'); -const async = require('async'); -const glob = require('glob'); +const path = require("path"); +const fs = require("fs"); +const async = require("async"); +const glob = require("glob"); -const autoGeneratedMarker = - "//------------------------------------------------------------------------------" + "\n" + - "// "; +const autoGeneratedMarker + = "//------------------------------------------------------------------------------\n" + + "// "; module.exports = (params, processor) => ({ - process: () => { + "process": () => { if (processor.context.dotnetcheckerDone) { return processor.done(); } processor.context.dotnetcheckerDone = true; - glob("**/*.cs", {cwd: processor.context.exported}, (err, files) => { + return glob("**/*.cs", { "cwd": processor.context.exported }, (err, files) => { if (err) { processor.onError(err); + return processor.done(); } - processor.onInfo("Found " + files.length + " .cs files"); + processor.onInfo(`Found ${files.length} .cs files`); if (!files || !files.length) { processor.onWarn("No .cs files found"); + return processor.done(); } - return async.parallel(files.map((file) => (callback) => fs.readFile(processor.context.exported + "/" + file, { encoding: "utf8" }, (err, data) => { + return async.parallel(files.map((file) => (callback) => fs.readFile(path.join(processor.context.exported, file), { "encoding": "utf8" }, (err, data) => { if (err) { - processor.onError("Unable to check file " + file + ": " + err); + processor.onError(`Unable to check file ${file}: ${err}`); + return callback(err); } + if (data.indexOf("\r\n") >= 0) { - processor.onError("Windows-style EOL (0D0A) found in file " + file); + processor.onError(`Windows-style EOL (0D0A) found in file ${file}`); + return callback(); } + if (params.ignoreCodeStyle) { return callback(); } - if (data.substr(1, autoGeneratedMarker.length) === autoGeneratedMarker || data.substr(0, autoGeneratedMarker.length) === autoGeneratedMarker) { - processor.onInfo("Skipping auto-generated file " + file); + + if ( + data.substr(1, autoGeneratedMarker.length) === autoGeneratedMarker + || data.substr(0, autoGeneratedMarker.length) === autoGeneratedMarker + ) { + processor.onInfo(`Skipping auto-generated file ${file}`); + return callback(); } if (data.indexOf("\t") >= 0 && data.indexOf(" ") >= 0) { - processor.onError("Both tabs and spaces found in file " + file); + processor.onError(`Both tabs and spaces found in file ${file}`); } if (data.indexOf("\t") >= 0) { - processor.onError("Tabs found in file " + file); + processor.onError(`Tabs found in file ${file}`); } - processor.onInfo("Checked file " + file); - callback(); + processor.onInfo(`Checked file ${file}`); + + return callback(); })), processor.done.bind(processor)); }); } diff --git a/BuildServer/lib/tasks/dotnetcompile.js b/BuildServer/lib/tasks/dotnetcompile.js index 1d0663b..d95a270 100644 --- a/BuildServer/lib/tasks/dotnetcompile.js +++ b/BuildServer/lib/tasks/dotnetcompile.js @@ -1,33 +1,35 @@ "use strict"; -const settings = require('../../settings'); -const dotnetbuilderwrapper = require('./dotnetbuilderwrapper'); +const path = require("path"); +const settings = require("../../settings"); +const dotnetbuilderwrapper = require("./dotnetbuilderwrapper"); module.exports = (params, processor) => { const compileParams = { - command: "compile", - SolutionPath: processor.context.exported + "/" + params.solution, - Configuration: params.configuration, - Target: params.target, - OutputDirectory: params.overrideOutputDirectory + "Configuration": params.configuration, + "OutputDirectory": params.overrideOutputDirectory, + "SolutionPath": path.join(processor.context.exported, params.solution), + "Target": params.target, + "command": "compile" }; + if (!settings.skipCodeSigning && !params.skipCodeSigning) { compileParams.SigningKey = settings.codeSigningKeyFile; } - if (settings.isCodeAnalysisUnsupported) { - if (params.forceCodeAnalysis) { - processor.onError("Code analysis is not supported"); - processor.done(); - return; - } + + if (settings.isCodeAnalysisUnsupported && params.forceCodeAnalysis) { + processor.onError("Code analysis is not supported"); + + return processor.done(); + } + + if ( + settings.isCodeAnalysisUnsupported + || params.ignoreCodeAnalysis + || (settings.ignoreCodeAnalysisByDefault && !params.forceCodeAnalysis) + ) { compileParams.SkipCodeAnalysis = true; - } else { - if (settings.ignoreCodeAnalysisByDefault && !params.forceCodeAnalysis) { - compileParams.SkipCodeAnalysis = true; - } - if (params.ignoreCodeAnalysis) { - compileParams.SkipCodeAnalysis = true; - } } + return dotnetbuilderwrapper(compileParams, processor); }; diff --git a/BuildServer/lib/tasks/dotnetnugetpack.js b/BuildServer/lib/tasks/dotnetnugetpack.js index 211cdeb..4c29aef 100644 --- a/BuildServer/lib/tasks/dotnetnugetpack.js +++ b/BuildServer/lib/tasks/dotnetnugetpack.js @@ -1,28 +1,39 @@ "use strict"; -const sequential = require('./sequential'); +const path = require("path"); +const sequential = require("./sequential"); + +const addPostfix = (version, params, processor) => { + if (params.withoutCommitSha) { + return version; + } + + return `${version}-r${processor.context.rev.substr(0, 16)}`; +}; module.exports = (params, processor) => { const date = new Date(); - const version = (params.version || ((params.major || "0") + "." + (date.getFullYear() * 10000 + (date.getMonth() + 1) * 100 + date.getDate()) + "." + ((date.getHours() * 100 + date.getMinutes()) * 100 + date.getSeconds()))) + (params.withoutCommitSha ? "" : ("-r" + processor.context.rev.substr(0, 16))); + const major = params.major || "0"; + const minor = (date.getFullYear() * 10000) + ((date.getMonth() + 1) * 100) + date.getDate(); + const build = (date.getHours() * 10000) + (date.getMinutes() * 100) + date.getSeconds(); + const version = addPostfix(params.version || `${major}.${minor}.${build}`, params, processor); + const nupkg = `${params.name}.${version}.nupkg`; return sequential({ - tasks: [ + "tasks": [ { - type: "dotnetbuilderwrapper", - params: { - command: "nugetpack", - BaseDirectory: processor.context.exported, - SpecPath: processor.context.exported + "/" + params.nuspec, - OutputDirectory: processor.context.exported, - Version: version - } + "params": { + "BaseDirectory": processor.context.exported, + "OutputDirectory": processor.context.exported, + "SpecPath": path.join(processor.context.exported, params.nuspec), + "Version": version, + "command": "nugetpack" + }, + "type": "dotnetbuilderwrapper" }, { - type: "copy", - params: { - filename: params.name + "." + version + ".nupkg" - } + "params": { "filename": nupkg }, + "type": "copy" } ] }, processor); diff --git a/BuildServer/lib/tasks/dotnetnugetprocess.js b/BuildServer/lib/tasks/dotnetnugetprocess.js index edca52b..c2110d6 100644 --- a/BuildServer/lib/tasks/dotnetnugetprocess.js +++ b/BuildServer/lib/tasks/dotnetnugetprocess.js @@ -1,30 +1,30 @@ "use strict"; -const conditional = require('./conditional'); +const conditional = require("./conditional"); module.exports = (params, processor) => conditional({ - owner: params.masterRepoOwner, - branch: "master", - task: { - name: "nuget-push", - type: "dotnetnugetpush", - params: { - nuspec: params.nuspecName + ".nuspec", - name: params.nuspecName, - withoutCommitSha: params.withoutCommitSha, - version: params.version, - major: params.major - } + "branch": "master", + "otherwise": { + "name": "nuget-pack", + "params": { + "major": params.major, + "name": params.nuspecName, + "nuspec": `${params.nuspecName}.nuspec`, + "version": params.version, + "withoutCommitSha": params.withoutCommitSha + }, + "type": "dotnetnugetpack" }, - otherwise: { - name: "nuget-pack", - type: "dotnetnugetpack", - params: { - nuspec: params.nuspecName + ".nuspec", - name: params.nuspecName, - withoutCommitSha: params.withoutCommitSha, - version: params.version, - major: params.major - } + "owner": params.masterRepoOwner, + "task": { + "name": "nuget-push", + "params": { + "major": params.major, + "name": params.nuspecName, + "nuspec": `${params.nuspecName}.nuspec`, + "version": params.version, + "withoutCommitSha": params.withoutCommitSha + }, + "type": "dotnetnugetpush" } }, processor); diff --git a/BuildServer/lib/tasks/dotnetnugetpush.js b/BuildServer/lib/tasks/dotnetnugetpush.js index c3c2557..df3bd51 100644 --- a/BuildServer/lib/tasks/dotnetnugetpush.js +++ b/BuildServer/lib/tasks/dotnetnugetpush.js @@ -1,29 +1,39 @@ "use strict"; +const path = require("path"); const sequential = require("./sequential"); +const addPostfix = (version, params, processor) => { + if (params.withoutCommitSha) { + return version; + } + + return `${version}-r${processor.context.rev.substr(0, 16)}`; +}; + module.exports = (params, processor) => { const date = new Date(); - const version = (params.version || ((params.major || "0") + "." + (date.getFullYear() * 10000 + (date.getMonth() + 1) * 100 + date.getDate()) + "." + ((date.getHours() * 100 + date.getMinutes()) * 100 + date.getSeconds()))) + (params.withoutCommitSha ? "" : ("-r" + processor.context.rev.substr(0, 16))); - const nupkg = params.name + "." + version + ".nupkg"; + const major = params.major || "0"; + const minor = (date.getFullYear() * 10000) + ((date.getMonth() + 1) * 100) + date.getDate(); + const build = (date.getHours() * 10000) + (date.getMinutes() * 100) + date.getSeconds(); + const version = addPostfix(params.version || `${major}.${minor}.${build}`, params, processor); + const nupkg = `${params.name}.${version}.nupkg`; return sequential({ - tasks: [ + "tasks": [ { - type: "dotnetbuilderwrapper", - params: { - command: "nugetpack", - BaseDirectory: processor.context.exported, - SpecPath: processor.context.exported + "/" + params.nuspec, - OutputDirectory: processor.context.exported, - Version: version - } + "params": { + "BaseDirectory": processor.context.exported, + "OutputDirectory": processor.context.exported, + "SpecPath": path.join(processor.context.exported, params.nuspec), + "Version": version, + "command": "nugetpack" + }, + "type": "dotnetbuilderwrapper" }, { - type: "dotnetnugetpushonly", - params: { - Package: nupkg - } + "params": { "Package": nupkg }, + "type": "dotnetnugetpushonly" } ] }, processor); diff --git a/BuildServer/lib/tasks/dotnetnugetpushonly.js b/BuildServer/lib/tasks/dotnetnugetpushonly.js index def1f9f..a7c0ffa 100644 --- a/BuildServer/lib/tasks/dotnetnugetpushonly.js +++ b/BuildServer/lib/tasks/dotnetnugetpushonly.js @@ -1,11 +1,12 @@ "use strict"; -const dotnetbuilderwrapper = require('./dotnetbuilderwrapper'); +const path = require("path"); +const dotnetbuilderwrapper = require("./dotnetbuilderwrapper"); const settings = require("../../settings"); module.exports = (params, processor) => dotnetbuilderwrapper({ - command: "nugetpush", - Package: processor.context.exported + "/" + params.Package, - NugetHost: settings.nugetHost, - ApiKey: settings.nugetApiKey + "ApiKey": settings.nugetApiKey, + "NugetHost": settings.nugetHost, + "Package": path.join(processor.context.exported, params.Package), + "command": "nugetpush" }, processor); diff --git a/BuildServer/lib/tasks/dotnetnugetrestore.js b/BuildServer/lib/tasks/dotnetnugetrestore.js index 2d296d7..e8134c8 100644 --- a/BuildServer/lib/tasks/dotnetnugetrestore.js +++ b/BuildServer/lib/tasks/dotnetnugetrestore.js @@ -1,16 +1,17 @@ "use strict"; -const sequential = require('./sequential'); +const path = require("path"); +const sequential = require("./sequential"); module.exports = (params, processor) => sequential({ - tasks: [ + "tasks": [ { - type: "dotnetbuilderwrapper", - params: { - command: "nugetrestore", - BaseDirectory: processor.context.exported, - SolutionPath: processor.context.exported + "/" + params.solution - } + "params": { + "BaseDirectory": processor.context.exported, + "SolutionPath": path.join(processor.context.exported, params.solution), + "command": "nugetrestore" + }, + "type": "dotnetbuilderwrapper" } ] }, processor); diff --git a/BuildServer/lib/tasks/dotnetnunit.js b/BuildServer/lib/tasks/dotnetnunit.js index 8867f8f..d5564d4 100644 --- a/BuildServer/lib/tasks/dotnetnunit.js +++ b/BuildServer/lib/tasks/dotnetnunit.js @@ -1,9 +1,9 @@ "use strict"; -const dotNetBuilderWrapper = require('./dotnetbuilderwrapper'); +const path = require("path"); +const dotNetBuilderWrapper = require("./dotnetbuilderwrapper"); module.exports = (params, processor) => dotNetBuilderWrapper({ - command: "nunit", - TestLibraryPath: processor.context.exported + "/" + params.assembly//, -// OutputPath: processor.context.release + "/" + params.solution + "/" + "TestLibraryPath": path.join(processor.context.exported, params.assembly), + "command": "nunit" }, processor); diff --git a/BuildServer/lib/tasks/dotnetnunitall.js b/BuildServer/lib/tasks/dotnetnunitall.js index a5a356e..daa76b9 100644 --- a/BuildServer/lib/tasks/dotnetnunitall.js +++ b/BuildServer/lib/tasks/dotnetnunitall.js @@ -1,9 +1,9 @@ "use strict"; -const glob = require('glob'); +const glob = require("glob"); module.exports = (params, processor) => ({ - process: () => { + "process": () => { if (processor.context.dotnetnunitallDone) { processor.onWarn("dotnetnunitall task is executed more than once; this is probably a bug in your mbs.json"); } @@ -11,30 +11,30 @@ module.exports = (params, processor) => ({ processor.context.dotnetnunitallDone = true; glob("**/{bin,build}/**/*.{Tests,Test,UnitTests}.dll", { - dot: true, - cwd: processor.context.exported + "cwd": processor.context.exported, + "dot": true }, (err, files) => { if (err) { processor.onError(err); + return processor.done(); } if (!files || !files.length) { - processor.onError("No test assemblies found in " + processor.context.exported); + processor.onError(`No test assemblies found in ${processor.context.exported}`); + return processor.done(); } return processor.processTask({ - type: params.preventParallelTests ? "sequential" : "parallel", - params: { - tasks: files.map((file) => ({ - name: file, - type: "dotnetnunit", - params: { - assembly: file - } + "params": { + "tasks": files.map((file) => ({ + "name": file, + "params": { "assembly": file }, + "type": "dotnetnunit" })) - } + }, + "type": (params.preventParallelTests && "sequential") || "parallel" }, processor.done.bind(processor)); }); } diff --git a/BuildServer/lib/tasks/dotnetpackwebapp.js b/BuildServer/lib/tasks/dotnetpackwebapp.js index 7f061d9..1b42dcb 100644 --- a/BuildServer/lib/tasks/dotnetpackwebapp.js +++ b/BuildServer/lib/tasks/dotnetpackwebapp.js @@ -1,47 +1,48 @@ "use strict"; -const fs = require('fs'); -const Mustache = require('mustache'); +const path = require("path"); +const fs = require("fs"); +const Mustache = require("mustache"); -const sequential = require('./sequential'); +const sequential = require("./sequential"); -const msbuildTemplate = fs.readFileSync(__dirname + "/dotnetpackwebapp.template.msbuild", {encoding: "utf8"}); -const deployTemplate = fs.readFileSync(__dirname + "/dotnetpackwebapp.template.bat", {encoding: "utf8"}); -const versionTemplate = fs.readFileSync(__dirname + "/dotnetpackwebapp.template.version.aspx", {encoding: "utf8"}); +const msbuildTemplate = fs.readFileSync(path.join(__dirname, "/dotnetpackwebapp.template.msbuild"), { "encoding": "utf8" }); +const deployTemplate = fs.readFileSync(path.join(__dirname, "/dotnetpackwebapp.template.bat"), { "encoding": "utf8" }); +const versionTemplate = fs.readFileSync(path.join(__dirname, "/dotnetpackwebapp.template.version.aspx"), { "encoding": "utf8" }); module.exports = (params, processor) => sequential({ - tasks: [ + "tasks": [ { - type: "writefile", - params: { - filename: "MakePackage.msbuild", - data: Mustache.render(msbuildTemplate, params) - } + "params": { + "data": Mustache.render(msbuildTemplate, params), + "filename": "MakePackage.msbuild" + }, + "type": "writefile" }, { - type: "writefile", - params: { - filename: "Deploy.bat", - data: Mustache.render(deployTemplate, params) - } + "params": { + "data": Mustache.render(deployTemplate, params), + "filename": "Deploy.bat" + }, + "type": "writefile" }, { - type: "writefile", - params: { - filename: "version.aspx", - data: Mustache.render(versionTemplate, params) - } + "params": { + "data": Mustache.render(versionTemplate, params), + "filename": "version.aspx" + }, + "type": "writefile" }, { - type: "dotnetcompile", - params: { - solution: "MakePackage.msbuild", - skipCodeSigning: params.skipCodeSigning, - isCodeAnalysisUnsupported: params.isCodeAnalysisUnsupported, - configuration: params.configuration, - target: "Package", - overrideOutputDirectory: processor.context.release - } + "params": { + "configuration": params.configuration, + "isCodeAnalysisUnsupported": params.isCodeAnalysisUnsupported, + "overrideOutputDirectory": processor.context.release, + "skipCodeSigning": params.skipCodeSigning, + "solution": "MakePackage.msbuild", + "target": "Package" + }, + "type": "dotnetcompile" } ] }, processor); diff --git a/BuildServer/lib/tasks/dotnetrewrite.js b/BuildServer/lib/tasks/dotnetrewrite.js index 8a16dfe..6741493 100644 --- a/BuildServer/lib/tasks/dotnetrewrite.js +++ b/BuildServer/lib/tasks/dotnetrewrite.js @@ -1,57 +1,60 @@ "use strict"; -const fs = require('fs'); -const async = require('async'); -const glob = require('glob'); -const settings = require('../../settings'); - -const addAssemblyAttribute = (content, attribute) => content + "\n" + attribute + "\n"; +const path = require("path"); +const fs = require("fs"); +const async = require("async"); +const glob = require("glob"); +const settings = require("../../settings"); module.exports = (params, processor) => ({ - process: () => { + "process": () => { if (processor.context.dotnetrewriterDone) { return processor.done(); } processor.context.dotnetrewriterDone = true; - const processAssemblyInfo = (appendInformationalVersion) => (content, cb) => { + const processAssemblyInfo = (appendInformationalVersion) => (originalContent, cb) => { + let content = originalContent; + if (!params.skipCodeSigning && !settings.skipCodeSigning) { content = content.replace( /InternalsVisibleTo\s*\(\s*"([\w.]+)"\s*\)/g, - (match, p1) => "InternalsVisibleTo(\"" + p1 + ",PublicKey=" + settings.codeSigningPublicKey + "\")" + (match, p1) => `InternalsVisibleTo("${p1},PublicKey=${settings.codeSigningPublicKey}")` ); } if (appendInformationalVersion) { - content = addAssemblyAttribute(content, "[assembly: System.Reflection.AssemblyInformationalVersion(\"" + processor.context.versionInfo + "\")]"); + content = `${content}\n[assembly: System.Reflection.AssemblyInformationalVersion("${processor.context.versionInfo}")]`; } return cb(null, content); }; - glob("**/{InternalsVisible,AssemblyInfo}*.cs", {cwd: processor.context.exported}, (err, files) => { + return glob("**/{InternalsVisible,AssemblyInfo}*.cs", { "cwd": processor.context.exported }, (err, files) => { if (err) { processor.onError(err); + return processor.done(); } - processor.onInfo("Found " + files.length + " AssemblyInfo.cs files"); + processor.onInfo(`Found ${files.length} AssemblyInfo.cs files`); if (!files || !files.length) { processor.onWarn("No AssemblyInfo.cs found"); + return processor.done(); } return async.parallel(files.map((file) => (callback) => async.waterfall([ - fs.readFile.bind(null, processor.context.exported + "/" + file, { encoding: "utf8" }), + fs.readFile.bind(null, path.join(processor.context.exported, file), { "encoding": "utf8" }), processAssemblyInfo(file.toLowerCase().indexOf("assemblyinfo.cs") >= 0), - fs.writeFile.bind(null, processor.context.exported + "/" + file) + fs.writeFile.bind(null, path.join(processor.context.exported, file)) ], (err) => { if (err) { - processor.onError("Unable to rewrite file " + file + ": " + err); + processor.onError(`Unable to rewrite file ${file}: ${err}`); } else { - processor.onInfo("Rewritten file " + file); + processor.onInfo(`Rewritten file ${file}`); } callback(err); })), processor.done.bind(processor)); diff --git a/BuildServer/lib/tasks/echo.js b/BuildServer/lib/tasks/echo.js index 6dd4a3d..dd5560b 100644 --- a/BuildServer/lib/tasks/echo.js +++ b/BuildServer/lib/tasks/echo.js @@ -1,7 +1,7 @@ "use strict"; module.exports = (params, processor) => ({ - process: () => { + "process": () => { if (params.error) { processor.onError(params.error); } diff --git a/BuildServer/lib/tasks/eslintbrowser.js b/BuildServer/lib/tasks/eslintbrowser.js index 60c4d88..005dd67 100644 --- a/BuildServer/lib/tasks/eslintbrowser.js +++ b/BuildServer/lib/tasks/eslintbrowser.js @@ -1,22 +1,21 @@ "use strict"; -const fs = require('fs'); -const path = require('path'); +const path = require("path"); const CLIEngine = require("eslint").CLIEngine; const settings = require("../../settings"); -const cli = new CLIEngine({ - configFile: settings.eslintBrowserConfig -}); +const cli = new CLIEngine({ "configFile": settings.eslintBrowserConfig }); module.exports = (params, processor) => ({ - process: () => { - const filePath = path.normalize(processor.context.exported + "/" + params.filename); + "process": () => { + const filePath = path.join(processor.context.exported, params.filename); const result = cli.executeOnFiles([filePath]); - processor.onInfo("ESLinted " + params.filename); + + processor.onInfo(`ESLinted ${params.filename}`); result.results.forEach((subresult) => { subresult.messages.forEach((message) => { - const messageText = params.filename + ":" + message.line + "," + message.column + " (" + message.ruleId + ") " + message.message; + const messageText = `${params.filename}:${message.line},${message.column} (${message.ruleId}) ${message.message}`; + if (message.fatal || message.severity === 2) { processor.onError(messageText); } else { diff --git a/BuildServer/lib/tasks/eslintbrowserall.js b/BuildServer/lib/tasks/eslintbrowserall.js index 222b3e1..2cbee42 100644 --- a/BuildServer/lib/tasks/eslintbrowserall.js +++ b/BuildServer/lib/tasks/eslintbrowserall.js @@ -1,9 +1,9 @@ "use strict"; -const glob = require('glob'); +const glob = require("glob"); module.exports = (params, processor) => ({ - process: () => { + "process": () => { if (processor.context.eslintbrowserallDone) { processor.onWarn("eslintbrowserall task is executed more than once; this is probably a bug in your mbs.json"); } @@ -13,25 +13,24 @@ module.exports = (params, processor) => ({ const excludeFiles = params.excludeFiles || []; glob("**/*.js", { - dot: true, - cwd: processor.context.exported + "cwd": processor.context.exported, + "dot": true }, (err, files) => { if (err) { processor.onError(err); + return processor.done(); } return processor.processTask({ - type: params.preventParallelTests ? "sequential" : "parallel", - params: { - tasks: files.filter(file => !excludeFiles.includes(file)).map((file) => ({ - name: file, - type: "eslintbrowser", - params: { - filename: file - } + "params": { + "tasks": files.filter((file) => !excludeFiles.includes(file)).map((file) => ({ + "name": file, + "params": { "filename": file }, + "type": "eslintbrowser" })) - } + }, + "type": (params.preventParallelTests && "sequential") || "parallel" }, processor.done.bind(processor)); }); } diff --git a/BuildServer/lib/tasks/noop.js b/BuildServer/lib/tasks/noop.js index 9e3b3fe..5e82011 100644 --- a/BuildServer/lib/tasks/noop.js +++ b/BuildServer/lib/tasks/noop.js @@ -1,5 +1,3 @@ "use strict"; -module.exports = (params, processor) => ({ - process: () => processor.done() -}); +module.exports = (params, processor) => ({ "process": () => processor.done() }); diff --git a/BuildServer/lib/tasks/packform.js b/BuildServer/lib/tasks/packform.js index 19d2807..38c4b3b 100644 --- a/BuildServer/lib/tasks/packform.js +++ b/BuildServer/lib/tasks/packform.js @@ -1,34 +1,28 @@ "use strict"; -const sequential = require('./sequential'); +const sequential = require("./sequential"); module.exports = (params, processor) => sequential({ - tasks: [ + "tasks": [ { - type: "eslintbrowserall", - params: { - excludeFiles: params.eslintExcludeFiles - } + "params": { "excludeFiles": params.eslintExcludeFiles }, + "type": "eslintbrowserall" }, + { "type": "uglifyjsall" }, + { "type": "cssnanoall" }, { - type: "uglifyjsall" + "params": { + "data": processor.context.versionInfo, + "filename": "version.txt" + }, + "type": "writefile" }, { - type: "cssnanoall" - }, - { - type: "writefile", - params: { - filename: "version.txt", - data: processor.context.versionInfo - } - }, - { - type: "zip", - params: { - directory: "", - archive: processor.context.reponame + ".zip" - } + "params": { + "archive": `${processor.context.reponame}.zip`, + "directory": "" + }, + "type": "zip" } ] }, processor); diff --git a/BuildServer/lib/tasks/parallel.js b/BuildServer/lib/tasks/parallel.js index d872cae..6e4525e 100644 --- a/BuildServer/lib/tasks/parallel.js +++ b/BuildServer/lib/tasks/parallel.js @@ -2,6 +2,6 @@ const async = require("async"); -module.exports = (params, processor) => ({ - process: () => async.parallel(params.tasks.map((task) => (callback) => processor.processTask(task, (err) => callback())), processor.done.bind(processor)) -}); +const mapper = (processor) => (task) => (callback) => processor.processTask(task, callback); + +module.exports = (params, processor) => ({ "process": () => async.parallel(params.tasks.map(mapper(processor)), () => processor.done()) }); diff --git a/BuildServer/lib/tasks/sequential.js b/BuildServer/lib/tasks/sequential.js index 5bd63f2..818813d 100644 --- a/BuildServer/lib/tasks/sequential.js +++ b/BuildServer/lib/tasks/sequential.js @@ -2,9 +2,6 @@ const async = require("async"); -module.exports = (params, processor) => { - const mapper = Function.bind.bind(processor.processTask, processor); - return { - process: () => async.series(params.tasks.map(mapper), processor.done.bind(processor)) - }; -}; +const mapper = (processor) => (task) => (callback) => processor.processTask(task, callback); + +module.exports = (params, processor) => ({ "process": () => async.series(params.tasks.map(mapper(processor)), () => processor.done()) }); diff --git a/BuildServer/lib/tasks/uglifyjs.js b/BuildServer/lib/tasks/uglifyjs.js index d38509d..cea7827 100644 --- a/BuildServer/lib/tasks/uglifyjs.js +++ b/BuildServer/lib/tasks/uglifyjs.js @@ -1,18 +1,19 @@ "use strict"; -const fs = require('fs'); -const path = require('path'); +const fs = require("fs"); +const path = require("path"); const UglifyJS = require("uglify-js"); module.exports = (params, processor) => ({ - process: () => { - const filePath = path.normalize(processor.context.exported + "/" + params.filename); + "process": () => { + const filePath = path.normalize(path.join(processor.context.exported, params.filename)); const result = UglifyJS.minify(filePath); + fs.writeFile(filePath, result.code, (err) => { if (err) { - processor.onError("Unable to write uglified script for " + params.filename + ": " + err); + processor.onError(`Unable to write uglified script for ${params.filename}: ${err}`); } else { - processor.onInfo("Saved uglified script for " + params.filename + "; uglified length: " + result.code.length); + processor.onInfo(`Saved uglified script for ${params.filename}; uglified length: ${result.code.length}`); } processor.done(); diff --git a/BuildServer/lib/tasks/uglifyjsall.js b/BuildServer/lib/tasks/uglifyjsall.js index 4bab787..1190c9d 100644 --- a/BuildServer/lib/tasks/uglifyjsall.js +++ b/BuildServer/lib/tasks/uglifyjsall.js @@ -1,9 +1,9 @@ "use strict"; -const glob = require('glob'); +const glob = require("glob"); module.exports = (params, processor) => ({ - process: () => { + "process": () => { if (processor.context.uglifyjsallDone) { processor.onWarn("dotnetnunitall task is executed more than once; this is probably a bug in your mbs.json"); } @@ -11,25 +11,24 @@ module.exports = (params, processor) => ({ processor.context.uglifyjsallDone = true; glob("**/*.js", { - dot: true, - cwd: processor.context.exported + "cwd": processor.context.exported, + "dot": true }, (err, files) => { if (err) { processor.onError(err); + return processor.done(); } return processor.processTask({ - type: params.preventParallelTests ? "sequential" : "parallel", - params: { - tasks: files.map((file) => ({ - name: file, - type: "uglifyjs", - params: { - filename: file - } + "params": { + "tasks": files.map((file) => ({ + "name": file, + "params": { "filename": file }, + "type": "uglifyjs" })) - } + }, + "type": (params.preventParallelTests && "sequential") || "parallel" }, processor.done.bind(processor)); }); } diff --git a/BuildServer/lib/tasks/writefile.js b/BuildServer/lib/tasks/writefile.js index 47edc87..30b02d5 100644 --- a/BuildServer/lib/tasks/writefile.js +++ b/BuildServer/lib/tasks/writefile.js @@ -1,18 +1,21 @@ "use strict"; -const fs = require('fs'); +const fs = require("fs"); +const path = require("path"); module.exports = (params, processor) => ({ - process: () => { - const filePath = processor.context.exported + "/" + params.filename; - processor.onInfo("Writing to " + filePath); + "process": () => { + const filePath = path.join(processor.context.exported, params.filename); + + processor.onInfo(`Writing to ${filePath}`); fs.writeFile(filePath, params.data, (err) => { if (err) { - processor.onError("Unable to write file: " + err); + processor.onError(`Unable to write file: ${err}`); } else { processor.onInfo("Written file"); } + return processor.done(); }); } diff --git a/BuildServer/lib/tasks/zip.js b/BuildServer/lib/tasks/zip.js index 50cd70b..49d8eaa 100644 --- a/BuildServer/lib/tasks/zip.js +++ b/BuildServer/lib/tasks/zip.js @@ -1,22 +1,22 @@ "use strict"; -const fs = require('fs'); -const path = require('path'); -const Archiver = require('archiver'); +const fs = require("fs"); +const path = require("path"); +const Archiver = require("archiver"); module.exports = (params, processor) => ({ - process: () => { - const sourceDirectoryPath = path.normalize(processor.context.exported + "/" + (params.directory || "")); - const targetArchivePath = path.normalize(processor.context.release + "/" + params.archive); + "process": () => { + const sourceDirectoryPath = path.normalize(path.join(processor.context.exported, params.directory || "")); + const targetArchivePath = path.normalize(path.join(processor.context.release, params.archive)); - processor.onInfo("Compressing '" + params.directory + "' to " + params.archive); + processor.onInfo(`Compressing "${params.directory}" to "${params.archive}"`); const output = fs.createWriteStream(targetArchivePath); const archive = new Archiver("zip"); output.on("close", () => processor.done()); - archive.on("error", (err) => processor.onError("Error while compressing: " + err)); + archive.on("error", (err) => processor.onError(`Error while compressing: ${err}`)); archive.pipe(output); archive.directory(sourceDirectoryPath, false); archive.finalize(); diff --git a/BuildServer/routes/artifact.js b/BuildServer/routes/artifact.js index 9189c51..57b3dee 100644 --- a/BuildServer/routes/artifact.js +++ b/BuildServer/routes/artifact.js @@ -2,13 +2,15 @@ module.exports = (req, res) => { const options = { - owner: req.params.owner, - reponame: req.params.reponame, - branchName: req.params.branch, - branch: "/refs/heads/" + req.params.branch, - rev: req.params.rev, - file: req.params[0] + "branch": `/refs/heads/${req.params.branch}`, + "branchName": req.params.branch, + "file": req.params[0], + "owner": req.params.owner, + "reponame": req.params.reponame, + "rev": req.params.rev }; - res.sendfile(req.app.get('releasepath') + "/" + options.owner + "/" + options.reponame + "/" + options.branch + "/" + options.rev + "/" + options.file); + const pathParts = [req.app.get("releasepath"), options.owner, options.reponame, options.branch, options.rev, options.file]; + + res.sendfile(pathParts.join("/")); }; diff --git a/BuildServer/routes/index.js b/BuildServer/routes/index.js index c811c69..985145b 100644 --- a/BuildServer/routes/index.js +++ b/BuildServer/routes/index.js @@ -1,9 +1,9 @@ "use strict"; -exports.index = (req, res) => res.render('index', { title: 'Express' + req + "qq" }); +exports.index = (req, res) => res.render("index", { "title": `Express
\r\n${req}` }); -exports.postreceive = require('./postreceive'); -exports.manual = require('./manual'); -exports.status = require('./status'); -exports.artifact = require('./artifact'); -exports.release = require('./release'); +exports.postreceive = require("./postreceive"); +exports.manual = require("./manual"); +exports.status = require("./status"); +exports.artifact = require("./artifact"); +exports.release = require("./release"); diff --git a/BuildServer/routes/manual.js b/BuildServer/routes/manual.js index 522973e..a9ee5e8 100644 --- a/BuildServer/routes/manual.js +++ b/BuildServer/routes/manual.js @@ -1,20 +1,21 @@ "use strict"; -const builder = require('../lib/builder'); +const builder = require("../lib/builder"); -exports.get = (req, res) => res.render('manual'); +exports.get = (req, res) => res.render("manual"); exports.post = (req, res) => { const options = req.body; - options.url = "https://pos-github.payonline.ru/" + options.owner + "/" + options.reponame; + + options.url = `https://pos-github.payonline.ru/${options.owner}/${options.reponame}`; options.app = req.app; builder.build(options, (err, result) => { console.log("Done processing manual request"); - console.log("Error: " + err); - //console.log("Result:"); - //console.log(result); - res.render('manual-done', {err: err, result: result}); - //res.render("manual-done", { err: err, result: result }); + console.log(`Error: ${err}`); + res.render("manual-done", { + err, + result + }); }); }; diff --git a/BuildServer/routes/postreceive.js b/BuildServer/routes/postreceive.js index da45ccf..de7eff2 100644 --- a/BuildServer/routes/postreceive.js +++ b/BuildServer/routes/postreceive.js @@ -1,27 +1,27 @@ "use strict"; -const builder = require('../lib/builder'); -const commenter = require('../lib/commenter'); +const builder = require("../lib/builder"); +const commenter = require("../lib/commenter"); + +const getBranchDescription = (options) => `${options.owner}/${options.reponame}:${options.branchname || options.branch}`; const processPush = (req, res, payload) => { const repository = payload.repository; const options = { - app: req.app, - url: repository.url, - owner: repository.owner.name, - reponame: repository.name, - rev: payload.after, - branch: payload.ref + "app": req.app, + "branch": payload.ref, + "owner": repository.owner.name, + "reponame": repository.name, + "rev": payload.after, + "url": repository.url }; - console.log("Got push event for " + options.owner + "/" + options.reponame + ":" + options.branch); + console.log(`Got push event for ${getBranchDescription(options)}`); builder.build(options, (err, result) => { console.log("Done processing request from GitHub"); - console.log("Error: " + err); - //console.log("Result:"); - //console.log(result); - res.send("Done processing request from GitHub\r\n" + "Error: " + err + "\r\n" + "Result: " + result); + console.log(`Error: ${err}`); + res.send(`Done processing request from GitHub\r\nError: ${err}\r\nResult: ${result}`); }); }; @@ -32,45 +32,45 @@ const processPullRequest = (req, res, payload) => { const head = pullRequest.head; const headRepo = head.repo; const headRepoOptions = { - url: headRepo.url, - owner: headRepo.owner.name || headRepo.owner.login, - reponame: headRepo.name, - rev: head.sha, - branchname: head.ref, - branch: "refs/heads/" + head.ref + "branch": `refs/heads/${head.ref}`, + "branchname": head.ref, + "owner": headRepo.owner.name || headRepo.owner.login, + "reponame": headRepo.name, + "rev": head.sha, + "url": headRepo.url }; const base = pullRequest.base; const baseRepo = base.repo; const baseRepoOptions = { - owner: baseRepo.owner.name || baseRepo.owner.login, - reponame: baseRepo.name, - branchname: base.ref + "branchname": base.ref, + "owner": baseRepo.owner.name || baseRepo.owner.login, + "reponame": baseRepo.name }; const options = { - app: req.app, - action: action, - number: number, - headRepoOptions: headRepoOptions, - baseRepoOptions: baseRepoOptions + action, + "app": req.app, + baseRepoOptions, + headRepoOptions, + number }; const masterOptions = { - app: req.app, - action: action, - number: number, - headRepoOptions: baseRepoOptions, - baseRepoOptions: baseRepoOptions + action, + "app": req.app, + baseRepoOptions, + "headRepoOptions": baseRepoOptions, + number }; - console.log("Got pull request " + action + " event, from " + headRepoOptions.owner + "/" + headRepoOptions.reponame + ":" + headRepoOptions.branchname + " (" + headRepoOptions.rev + ") to " + baseRepoOptions.owner + "/" + baseRepoOptions.reponame + ":" + baseRepoOptions.branchname); + console.log(`Got pull request ${action} event, ` + + `from ${getBranchDescription(headRepoOptions)} (${headRepoOptions.rev}) to ${getBranchDescription(baseRepoOptions)}`); if (action !== "opened" && action !== "reopened" && action !== "synchronize" && action !== "closed") { - //console.log("Got '" + action + "' event:"); - //console.log(req.body); return res.send("Only opened/reopened/synchronize/closed actions are supported"); } if (action === "closed" && !pullRequest.merged) { console.log("Pull request closed without merging"); + return res.send("Pull request closed without merging"); } @@ -78,11 +78,11 @@ const processPullRequest = (req, res, payload) => { return res.send(""); } - commenter.commentOnPullRequest( - action === "closed" ? masterOptions : options, + return commenter.commentOnPullRequest( + (action === "closed" && masterOptions) || options, (err, data) => { if (err) { - console.log("Unable to post comment: " + err); + console.log(`Unable to post comment: ${err}`); } res.send(err || data); @@ -96,7 +96,7 @@ module.exports = (req, res) => { } const eventType = req.header("x-github-event"); - const payload = req.body.payload ? JSON.parse(req.body.payload || "{}") : req.body; + const payload = (req.body.payload && JSON.parse(req.body.payload)) || req.body; if (eventType === "push") { return processPush(req, res, payload); @@ -106,7 +106,7 @@ module.exports = (req, res) => { return processPullRequest(req, res, payload); } - console.log("Got '" + eventType + "' event:"); - //console.log(req.body); + console.log(`Got "${eventType}" event:`); + return res.send("Only push/pull_request events are supported"); }; diff --git a/BuildServer/routes/release.js b/BuildServer/routes/release.js index 34c8c46..2fc54e5 100644 --- a/BuildServer/routes/release.js +++ b/BuildServer/routes/release.js @@ -1,26 +1,30 @@ "use strict"; -const path = require('path'); -const fs = require('fs'); -const Archiver = require('archiver'); +const path = require("path"); +const fs = require("fs"); +const Archiver = require("archiver"); const getReport = (releasePath, callback) => { - const reportFile = releasePath + "report.json"; + const reportFile = `${releasePath}report.json`; fs.exists(reportFile, (exists) => { if (!exists) { - return callback("ReportFileNotFound: " + reportFile); + return callback(`ReportFileNotFound: ${reportFile}`); } return fs.readFile(reportFile, (err, dataBuffer) => { if (err) { return callback(err, reportFile); } + const data = dataBuffer.toString(); + if (!data) { return callback("ReportFileNotFound", reportFile); } + const report = JSON.parse(data); + return callback(null, report); }); }); @@ -34,24 +38,28 @@ const getDatePart = (report) => { const date = new Date(report.date); const paddingLeft = (str, paddingValue) => String(paddingValue + str).slice(-paddingValue.length); - return date.getFullYear() + "." + - paddingLeft(date.getMonth() + 1, "00") + "." + - paddingLeft(date.getDate(), "00") + "." + - paddingLeft(date.getHours(), "00") + "." + - paddingLeft(date.getMinutes(), "00") + "." + - paddingLeft(date.getSeconds(), "00"); + const year = date.getFullYear(); + const month = paddingLeft(date.getMonth() + 1, "00"); + const day = paddingLeft(date.getDate(), "00"); + const hours = paddingLeft(date.getHours(), "00"); + const minutes = paddingLeft(date.getMinutes(), "00"); + const seconds = paddingLeft(date.getSeconds(), "00"); + + return `${year}.${month}.${day}.${hours}.${minutes}.${seconds}`; }; module.exports = (req, res, next) => { const options = { - owner: req.params.owner, - reponame: req.params.reponame, - branchName: req.params.branch, - branch: "/refs/heads/" + req.params.branch, - rev: req.params.rev + "branch": `/refs/heads/${req.params.branch}`, + "branchName": req.params.branch, + "owner": req.params.owner, + "reponame": req.params.reponame, + "rev": req.params.rev }; - const releasePath = path.normalize(req.app.get('releasepath') + "/" + options.owner + "/" + options.reponame + "/" + options.branch + "/" + options.rev + "/"); + const releasePathParts = [req.app.get("releasepath"), options.owner, options.reponame, options.branch, options.rev, ""]; + + const releasePath = path.normalize(releasePathParts.join("/")); getReport(releasePath, (err, report) => { if (err) { @@ -59,10 +67,12 @@ module.exports = (req, res, next) => { } const archive = new Archiver("zip"); + archive.on("error", next); - res.attachment(options.reponame + '.' + getDatePart(report) + '.' + options.rev + '.zip', '.'); + res.attachment(`${options.reponame}.${getDatePart(report)}.${options.rev}.zip`, "."); archive.pipe(res); archive.directory(releasePath, false); - archive.finalize(); + + return archive.finalize(); }); }; diff --git a/BuildServer/routes/status.js b/BuildServer/routes/status.js index 38091c6..d461524 100644 --- a/BuildServer/routes/status.js +++ b/BuildServer/routes/status.js @@ -1,31 +1,31 @@ "use strict"; -const url = require('url'); -const statusProcessor = require('../lib/status-processor'); +const url = require("url"); +const statusProcessor = require("../lib/status-processor"); const parseOptionsFromReferer = (path, callback) => { const pathParts = path.split("/").filter((value) => value); const result = {}; + const [, secondPart, thirdPart] = pathParts; - if (pathParts.length < 2) { + if (!secondPart) { return callback("BadRequest", result); } - if (pathParts[2] === "tree") { - pathParts.splice(2, 1); + if (thirdPart === "tree") { + [result.owner, result.reponame, , result.branchName, result.rev] = pathParts; + } else { + [result.owner, result.reponame, result.branchName, result.rev] = pathParts; } - result.owner = pathParts[0]; - result.reponame = pathParts[1]; - result.branchName = pathParts[2]; - result.rev = pathParts[3]; return callback(null, result); }; -const createShowReport = (res) => (err, options) => { - options = options || {}; +const createShowReport = (res) => (err, inputOptions) => { + const options = inputOptions || {}; + options.err = err; - res.render('status', options); + res.render("status", options); }; exports.image = (req, res) => { @@ -40,17 +40,19 @@ exports.image = (req, res) => { } else if (options.report.err) { options.status = "Error"; options.message = options.report.err; - } else if ((options.report.result.warns.$allMessages || []).length > 0) { + } else if ((options.report.result.warns.$allMessages || []).length) { + const [firstWarn] = options.report.result.warns.$allMessages; + options.status = "Warning"; - options.message = options.report.result.warns.$allMessages[0].message; + options.message = firstWarn.message; } else { options.status = "OK"; - if ((options.report.result.infos.$allMessages || []).length > 0) { - options.message = options.report.result.infos.$allMessages[options.report.result.infos.$allMessages.length-1].message; + if ((options.report.result.infos.$allMessages || []).length) { + options.message = options.report.result.infos.$allMessages[options.report.result.infos.$allMessages.length - 1].message; } } - res.setHeader('Content-Type', 'image/svg+xml'); - res.render('status-image', options); + res.setHeader("Content-Type", "image/svg+xml"); + res.render("status-image", options); }; parseOptionsFromReferer(url.parse(req.headers.referer || "").pathname || "", (err, options) => { @@ -58,17 +60,17 @@ exports.image = (req, res) => { return handle(err, options); } - statusProcessor.getReport(req.app, options, (err, options) => handle(err, options)); + return statusProcessor.getReport(req.app, options, handle); }); }; exports.page = (req, res) => { const options = { - owner: req.params.owner, - reponame: req.params.reponame, - branchName: req.params.branch, - branch: "/refs/heads/" + req.params.branch, - rev: req.params.rev + "branch": `/refs/heads/${req.params.branch}`, + "branchName": req.params.branch, + "owner": req.params.owner, + "reponame": req.params.reponame, + "rev": req.params.rev }; statusProcessor.getReport(req.app, options, createShowReport(res));