Tabs replaced with spaces

dependabot/npm_and_yarn/BuildServer/eslint-7.2.0
Inga 🏳‍🌈 8 years ago
parent ea8648e438
commit 48451a6721
  1. 10
      BuildServer/app.js
  2. 280
      BuildServer/lib/builder.js
  3. 314
      BuildServer/lib/commenter.js
  4. 114
      BuildServer/lib/git/copy.js
  5. 80
      BuildServer/lib/git/loader.js
  6. 12
      BuildServer/lib/mail-sender.js
  7. 144
      BuildServer/lib/status-processor.js
  8. 122
      BuildServer/lib/task-processor.js
  9. 48
      BuildServer/lib/tasks/cleanupafterdotnetbuild.js
  10. 10
      BuildServer/lib/tasks/conditional.js
  11. 26
      BuildServer/lib/tasks/copy.js
  12. 48
      BuildServer/lib/tasks/copyglob.js
  13. 46
      BuildServer/lib/tasks/cssnano.js
  14. 54
      BuildServer/lib/tasks/cssnanoall.js
  15. 28
      BuildServer/lib/tasks/deletefromcode.js
  16. 22
      BuildServer/lib/tasks/dotnetbuild.js
  17. 32
      BuildServer/lib/tasks/dotnetbuildandtest.js
  18. 80
      BuildServer/lib/tasks/dotnetbuilderwrapper.js
  19. 62
      BuildServer/lib/tasks/dotnetbuildwithoutcleanup.js
  20. 104
      BuildServer/lib/tasks/dotnetcheckstyle.js
  21. 52
      BuildServer/lib/tasks/dotnetcompile.js
  22. 44
      BuildServer/lib/tasks/dotnetnugetpack.js
  23. 48
      BuildServer/lib/tasks/dotnetnugetprocess.js
  24. 46
      BuildServer/lib/tasks/dotnetnugetpush.js
  25. 8
      BuildServer/lib/tasks/dotnetnugetpushonly.js
  26. 20
      BuildServer/lib/tasks/dotnetnugetrestore.js
  27. 6
      BuildServer/lib/tasks/dotnetnunit.js
  28. 62
      BuildServer/lib/tasks/dotnetnunitall.js
  29. 68
      BuildServer/lib/tasks/dotnetpackwebapp.js
  30. 98
      BuildServer/lib/tasks/dotnetrewrite.js
  31. 24
      BuildServer/lib/tasks/echo.js
  32. 34
      BuildServer/lib/tasks/eslintbrowser.js
  33. 56
      BuildServer/lib/tasks/eslintbrowserall.js
  34. 2
      BuildServer/lib/tasks/noop.js
  35. 56
      BuildServer/lib/tasks/packform.js
  36. 2
      BuildServer/lib/tasks/parallel.js
  37. 8
      BuildServer/lib/tasks/sequential.js
  38. 24
      BuildServer/lib/tasks/uglifyjs.js
  39. 54
      BuildServer/lib/tasks/uglifyjsall.js
  40. 24
      BuildServer/lib/tasks/writefile.js
  41. 24
      BuildServer/lib/tasks/zip.js
  42. 18
      BuildServer/routes/artifact.js
  43. 22
      BuildServer/routes/manual.js
  44. 190
      BuildServer/routes/postreceive.js
  45. 94
      BuildServer/routes/release.js
  46. 114
      BuildServer/routes/status.js

@ -34,17 +34,17 @@ app.use(serveStatic(path.join(__dirname, 'public')));
// development only
if ('development' === app.get('env')) {
app.use(errorhandler());
app.use(errorhandler());
}
app.route('/').get(routes.index);
app.route('/github/postreceive')
.post(routes.postreceive)
.get((req, res) => res.send("Only automated POST requests are allowed for postreceive route"));
.post(routes.postreceive)
.get((req, res) => res.send("Only automated POST requests are allowed for postreceive route"));
app.route('/manual')
.get(routes.manual.get)
.post(routes.manual.post);
.get(routes.manual.get)
.post(routes.manual.post);
app.route('/status/:owner/:reponame/:branch/:rev?').get(routes.status.page);
app.route('/pos-github.payonline.ru/*').get(routes.status.pageFromGithub);

@ -12,149 +12,149 @@ const settings = require('../settings');
const codePostfix = "";
const notifyStatus = (options, callback) => {
const status = {
owner: options.owner,
repo: options.reponame,
sha: options.hash,
state: options.state,
target_url: settings.siteRoot + "status/" + options.owner + "/" + options.reponame + "/" + options.hash,
description: ((options.description || "") + "").substr(0, 140)
};
settings.createGithub(options.owner).repos.createStatus(status, (err, result) => {
if (err) {
console.log("Error while creating status: " + err);
console.log(status);
return callback(err);
}
return callback();
});
const status = {
owner: options.owner,
repo: options.reponame,
sha: options.hash,
state: options.state,
target_url: settings.siteRoot + "status/" + options.owner + "/" + options.reponame + "/" + options.hash,
description: ((options.description || "") + "").substr(0, 140)
};
settings.createGithub(options.owner).repos.createStatus(status, (err, result) => {
if (err) {
console.log("Error while creating status: " + err);
console.log(status);
return callback(err);
}
return callback();
});
};
const build = (options, callback) => {
const url = options.url;
const owner = options.owner;
const reponame = options.reponame;
const rev = options.rev;
const branch = options.branch;
const skipGitLoader = options.skipGitLoader;
const local = options.app.get('gitpath') + "/r/";
const tmp = options.app.get('tmpcodepath') + "/" + rev.substr(0, 15);
const exported = tmp + codePostfix;
const release = options.app.get('releasepath') + "/" + owner + "/" + reponame + "/" + branch + "/" + rev;
const statusQueue = async.queue((task, callback) => task(callback), 1);
const actualGitLoader = skipGitLoader ? (options, callback) => process.nextTick(callback) : gitLoader;
const date = new Date();
const versionInfo = date.getFullYear() + "." +
(date.getMonth() + 1) + "." +
date.getDate() + "." +
(date.getHours() * 100 + date.getMinutes()) + "; " +
"built from " + rev + "; " +
"repository: " + owner + "/" + reponame + "; " +
"branch: " + branch;
statusQueue.push((callback) => notifyStatus({
state: "pending",
description: "Preparing to build...",
owner: owner,
reponame: reponame,
hash: rev
}, callback));
fse.mkdirsSync(release);
fs.writeFileSync(options.app.get('releasepath') + "/" + owner + "/" + reponame + "/" + branch + "/latest.id", rev);
fse.mkdirsSync(options.app.get('releasepath') + "/" + owner + "/" + reponame + "/$revs");
fs.writeFileSync(options.app.get('releasepath') + "/" + owner + "/" + reponame + "/$revs/" + rev + ".branch", branch);
const done = (err, result) => {
const errorMessage = result && result.errors ? ((result.errors.$allMessages || [])[0] || {}).message : err;
const warnMessage = result && result.warns ? ((result.warns.$allMessages || [])[0] || {}).message : err;
const infoMessage = result && result.infos ? ((result.infos.$allMessages || []).slice(-1)[0] || {}).message : err;
fs.writeFile(release + "/report.json", JSON.stringify({date: Date.now(), err: err, result: result}), (writeErr) => {
statusQueue.push((callback) => async.parallel([
(callback) => notifyStatus({
state: err ? "error" : "success",
description: errorMessage || warnMessage || infoMessage || "Success",
owner: owner,
reponame: reponame,
hash: rev
}, callback),
(callback) => mailSender.send({
from: settings.smtp.sender,
to: settings.smtp.receiver,
subject: (err ? "Build failed for " : "Successfully built ") + owner + "/" + reponame + "/" + branch,
headers: {
'X-Laziness-level': 1000
},
text: ("Build status URL: " + settings.siteRoot + "status/" + owner + "/" + reponame + "/" + rev + "\r\n\r\n") +
(err ? ("Error message: " + err + "\r\n\r\n") : "") +
((!result || !result.messages || !result.messages.$allMessages) ? JSON.stringify(result, null, 4) : result.messages.$allMessages.map(msg => msg.prefix + "\t" + msg.message).join("\r\n"))
}, callback),
(callback) => {
if (err) {
return process.nextTick(callback);
}
return fse.remove(tmp, callback);
}
], callback));
if (writeErr) {
return callback(writeErr);
}
return callback(err, result);
});
};
actualGitLoader({
remote: url + ".git",
local: local,
branch: branch,
hash: rev,
exported: tmp + codePostfix
}, (err) => {
if (err) {
console.log(err);
return done("Git fetch error: " + err);
}
console.log("Done loading from git");
fs.exists(exported + "/mbs.json", (exists) => {
if (!exists) {
return done(null, "MBSNotFound");
}
fs.readFile(exported + "/mbs.json", (err, data) => {
if (err) {
return done(err, "MBSUnableToRead");
}
let task;
try {
task = JSON.parse(data);
} catch(ex) {
console.log("Malformed data: " + data);
return done(ex, "MBSMalformed");
}
processor.processTask(task, {
owner: owner,
reponame: reponame,
branch: branch,
rev: rev,
tmp: tmp,
exported: exported,
release: release,
versionInfo: versionInfo
}, (err, result) => {
if (err) {
return done(err, result);
}
return done(err, result);
});
});
});
});
const url = options.url;
const owner = options.owner;
const reponame = options.reponame;
const rev = options.rev;
const branch = options.branch;
const skipGitLoader = options.skipGitLoader;
const local = options.app.get('gitpath') + "/r/";
const tmp = options.app.get('tmpcodepath') + "/" + rev.substr(0, 15);
const exported = tmp + codePostfix;
const release = options.app.get('releasepath') + "/" + owner + "/" + reponame + "/" + branch + "/" + rev;
const statusQueue = async.queue((task, callback) => task(callback), 1);
const actualGitLoader = skipGitLoader ? (options, callback) => process.nextTick(callback) : gitLoader;
const date = new Date();
const versionInfo = date.getFullYear() + "." +
(date.getMonth() + 1) + "." +
date.getDate() + "." +
(date.getHours() * 100 + date.getMinutes()) + "; " +
"built from " + rev + "; " +
"repository: " + owner + "/" + reponame + "; " +
"branch: " + branch;
statusQueue.push((callback) => notifyStatus({
state: "pending",
description: "Preparing to build...",
owner: owner,
reponame: reponame,
hash: rev
}, callback));
fse.mkdirsSync(release);
fs.writeFileSync(options.app.get('releasepath') + "/" + owner + "/" + reponame + "/" + branch + "/latest.id", rev);
fse.mkdirsSync(options.app.get('releasepath') + "/" + owner + "/" + reponame + "/$revs");
fs.writeFileSync(options.app.get('releasepath') + "/" + owner + "/" + reponame + "/$revs/" + rev + ".branch", branch);
const done = (err, result) => {
const errorMessage = result && result.errors ? ((result.errors.$allMessages || [])[0] || {}).message : err;
const warnMessage = result && result.warns ? ((result.warns.$allMessages || [])[0] || {}).message : err;
const infoMessage = result && result.infos ? ((result.infos.$allMessages || []).slice(-1)[0] || {}).message : err;
fs.writeFile(release + "/report.json", JSON.stringify({date: Date.now(), err: err, result: result}), (writeErr) => {
statusQueue.push((callback) => async.parallel([
(callback) => notifyStatus({
state: err ? "error" : "success",
description: errorMessage || warnMessage || infoMessage || "Success",
owner: owner,
reponame: reponame,
hash: rev
}, callback),
(callback) => mailSender.send({
from: settings.smtp.sender,
to: settings.smtp.receiver,
subject: (err ? "Build failed for " : "Successfully built ") + owner + "/" + reponame + "/" + branch,
headers: {
'X-Laziness-level': 1000
},
text: ("Build status URL: " + settings.siteRoot + "status/" + owner + "/" + reponame + "/" + rev + "\r\n\r\n") +
(err ? ("Error message: " + err + "\r\n\r\n") : "") +
((!result || !result.messages || !result.messages.$allMessages) ? JSON.stringify(result, null, 4) : result.messages.$allMessages.map(msg => msg.prefix + "\t" + msg.message).join("\r\n"))
}, callback),
(callback) => {
if (err) {
return process.nextTick(callback);
}
return fse.remove(tmp, callback);
}
], callback));
if (writeErr) {
return callback(writeErr);
}
return callback(err, result);
});
};
actualGitLoader({
remote: url + ".git",
local: local,
branch: branch,
hash: rev,
exported: tmp + codePostfix
}, (err) => {
if (err) {
console.log(err);
return done("Git fetch error: " + err);
}
console.log("Done loading from git");
fs.exists(exported + "/mbs.json", (exists) => {
if (!exists) {
return done(null, "MBSNotFound");
}
fs.readFile(exported + "/mbs.json", (err, data) => {
if (err) {
return done(err, "MBSUnableToRead");
}
let task;
try {
task = JSON.parse(data);
} catch(ex) {
console.log("Malformed data: " + data);
return done(ex, "MBSMalformed");
}
processor.processTask(task, {
owner: owner,
reponame: reponame,
branch: branch,
rev: rev,
tmp: tmp,
exported: exported,
release: release,
versionInfo: versionInfo
}, (err, result) => {
if (err) {
return done(err, result);
}
return done(err, result);
});
});
});
});
};
exports.build = build;

@ -9,185 +9,185 @@ const versionNamePattern = /^v\d+(\.\d+)*$/;
const masterNamePattern = /^master$/;
const writeComment = (options, message, callback) => options.github.issues.createComment({
owner: options.baseRepoOptions.owner,
repo: options.baseRepoOptions.reponame,
number: options.number,
body: message
owner: options.baseRepoOptions.owner,
repo: options.baseRepoOptions.reponame,
number: options.number,
body: message
}, callback);
const closePullRequest = (options, message, callback) => writeComment(options, message, (err) => {
if (err) {
return callback(err);
}
return options.github.issues.edit({
owner: options.baseRepoOptions.owner,
repo: options.baseRepoOptions.reponame,
number: options.number,
state: "closed"
}, callback);
if (err) {
return callback(err);
}
return options.github.issues.edit({
owner: options.baseRepoOptions.owner,
repo: options.baseRepoOptions.reponame,
number: options.number,
state: "closed"
}, callback);
});
const checkHasIssue = (options, issueNumber, callback) => options.github.issues.get({
owner: options.baseRepoOptions.owner,
repo: options.baseRepoOptions.reponame,
number: issueNumber
owner: options.baseRepoOptions.owner,
repo: options.baseRepoOptions.reponame,
number: issueNumber
}, (err, result) => {
if (err && err.code !== 404) {
return callback(err);
}
if (err && err.code !== 404) {
return callback(err);
}
if (err || result.number.toString() !== issueNumber) {
return callback(undefined, false);
}
if (err || result.number.toString() !== issueNumber) {
return callback(undefined, false);
}
if (result.pull_request && result.pull_request.url) {
return callback(undefined, false);
}
if (result.pull_request && result.pull_request.url) {
return callback(undefined, false);
}
return callback(undefined, true, result.title);
return callback(undefined, true, result.title);
});
const checkHasReleases = (options, callback) => options.github.repos.getReleases({
owner: options.baseRepoOptions.owner,
repo: options.baseRepoOptions.reponame,
per_page: 1
owner: options.baseRepoOptions.owner,
repo: options.baseRepoOptions.reponame,
per_page: 1
}, (err, result) => {
if (err) {
return callback(err);
}
if (err) {
return callback(err);
}
return callback(undefined, result && result.length);
return callback(undefined, result && result.length);
});
const checkPullRequest = (options, callback) => {
const head = options.headRepoOptions;
const base = options.baseRepoOptions;
if (head.reponame !== base.reponame) {
return closePullRequest(options, "Base and head repository names should match", callback);
}
if (head.owner === base.owner) {
if (!versionNamePattern.test(head.branchname) || !masterNamePattern.test(base.branchname)) {
return closePullRequest(options, "Only merging from version to master is allowed", callback);
}
return checkHasReleases(options, (err, hasReleases) => {
if (err) {
return writeComment(options, "Unable to check for releases", callback);
}
if (!hasReleases) {
return closePullRequest(options, "Merging from version to master is only allowed for repositories with releases", callback);
}
if (options.action === "opened") {
return writeComment(options, "Switching master branch to " + head.branchname + " release", callback);
}
return process.nextTick(callback);
});
}
if (!featureNamePattern.test(head.branchname)) {
return closePullRequest(options, "Only merging from feature branch is allowed (pattern: `" + featureNamePattern.toString() + "`)", callback);
}
if (!versionNamePattern.test(base.branchname) && !masterNamePattern.test(base.branchname)) {
return closePullRequest(options, "Only merging to master or version branch is allowed; merging to '" + base.branchname + "' is not supported", callback);
}
const issueNumber = featureNamePattern.exec(head.branchname)[1];
return checkHasIssue(options, issueNumber, (err, hasIssue, issueTitle) => {
if (err) {
return writeComment(options, "Unable to check for issue:\r\n\r\n" + err.message, callback);
}
if (!hasIssue) {
return closePullRequest(options, "Unable to find issue #" + issueNumber, callback);
}
const shouldHaveReleases = versionNamePattern.test(base.branchname);
return checkHasReleases(options, (err, hasReleases) => {
if (err) {
return writeComment(options, "Unable to check for releases", callback);
}
if (shouldHaveReleases && !hasReleases) {
return closePullRequest(options, "Merging from feature to version is only allowed for repositories with releases", callback);
}
if (!shouldHaveReleases && hasReleases) {
return closePullRequest(options, "Merging from feature to master is only allowed for repositories without releases", callback);
}
if (options.action === "opened") {
return writeComment(options, "Merging feature #" + issueNumber + " (" + issueTitle + ") to " + base.branchname + (shouldHaveReleases ? " release" : ""), callback);
}
return process.nextTick(callback);
});
});
const head = options.headRepoOptions;
const base = options.baseRepoOptions;
if (head.reponame !== base.reponame) {
return closePullRequest(options, "Base and head repository names should match", callback);
}
if (head.owner === base.owner) {
if (!versionNamePattern.test(head.branchname) || !masterNamePattern.test(base.branchname)) {
return closePullRequest(options, "Only merging from version to master is allowed", callback);
}
return checkHasReleases(options, (err, hasReleases) => {
if (err) {
return writeComment(options, "Unable to check for releases", callback);
}
if (!hasReleases) {
return closePullRequest(options, "Merging from version to master is only allowed for repositories with releases", callback);
}
if (options.action === "opened") {
return writeComment(options, "Switching master branch to " + head.branchname + " release", callback);
}
return process.nextTick(callback);
});
}
if (!featureNamePattern.test(head.branchname)) {
return closePullRequest(options, "Only merging from feature branch is allowed (pattern: `" + featureNamePattern.toString() + "`)", callback);
}
if (!versionNamePattern.test(base.branchname) && !masterNamePattern.test(base.branchname)) {
return closePullRequest(options, "Only merging to master or version branch is allowed; merging to '" + base.branchname + "' is not supported", callback);
}
const issueNumber = featureNamePattern.exec(head.branchname)[1];
return checkHasIssue(options, issueNumber, (err, hasIssue, issueTitle) => {
if (err) {
return writeComment(options, "Unable to check for issue:\r\n\r\n" + err.message, callback);
}
if (!hasIssue) {
return closePullRequest(options, "Unable to find issue #" + issueNumber, callback);
}
const shouldHaveReleases = versionNamePattern.test(base.branchname);
return checkHasReleases(options, (err, hasReleases) => {
if (err) {
return writeComment(options, "Unable to check for releases", callback);
}
if (shouldHaveReleases && !hasReleases) {
return closePullRequest(options, "Merging from feature to version is only allowed for repositories with releases", callback);
}
if (!shouldHaveReleases && hasReleases) {
return closePullRequest(options, "Merging from feature to master is only allowed for repositories without releases", callback);
}
if (options.action === "opened") {
return writeComment(options, "Merging feature #" + issueNumber + " (" + issueTitle + ") to " + base.branchname + (shouldHaveReleases ? " release" : ""), callback);
}
return process.nextTick(callback);
});
});
};
const getStatusMessageFromRelease = (app, options, callback) => {
const releaseDir = app.get("releasepath") + "/" + options.owner + "/" + options.reponame + "/" + options.branch + "/" + options.rev;
const reportFile = releaseDir + "/report.json";
options.attemptsGetReport = (options.attemptsGetReport || 0) + 1;
fs.exists(reportFile, (exists) => {
if (!exists) {
return setTimeout(() => fs.exists(releaseDir, (dirExists) => {
if (!dirExists) {
return callback("Release directory not found. Probably repository hooks are not configured");
}
if (options.attemptsGetReport > 100) {
return callback("Report file not found");
}
//maybe it is building right now
return setTimeout(() => getStatusMessageFromRelease(app, options, callback), 10000);
}), 2000);
}
return setTimeout(() => fs.readFile(reportFile, (err, dataBuffer) => {
if (err) {
return callback(err);
}
const data = dataBuffer.toString();
if (!data) {
return callback("Report file not found");
}
const report = JSON.parse(data);
if (report.result === "MBSNotFound") {
return callback("mbs.json is not found");
}
if (report.result && ((report.result.errors || {}).$allMessages || []).length + ((report.result.warns || {}).$allMessages || []).length > 0) {
return callback(_.map(
(report.result.errors || {}).$allMessages || [], (message) => "ERR: " + message.message
).concat(_.map(
(report.result.warns || {}).$allMessages || [], (message) => "WARN: " + message.message
)).join("\r\n"));
}
if (!report.result || report.err) {
return callback("CRITICAL ERROR: " + report.err);
}
if ((report.result.infos.$allMessages || []).length > 0) {
return callback(undefined, report.result.infos.$allMessages[report.result.infos.$allMessages.length-1].message);
}
return callback(undefined, "OK");
}), 1000);
});
const releaseDir = app.get("releasepath") + "/" + options.owner + "/" + options.reponame + "/" + options.branch + "/" + options.rev;
const reportFile = releaseDir + "/report.json";
options.attemptsGetReport = (options.attemptsGetReport || 0) + 1;
fs.exists(reportFile, (exists) => {
if (!exists) {
return setTimeout(() => fs.exists(releaseDir, (dirExists) => {
if (!dirExists) {
return callback("Release directory not found. Probably repository hooks are not configured");
}
if (options.attemptsGetReport > 100) {
return callback("Report file not found");
}
//maybe it is building right now
return setTimeout(() => getStatusMessageFromRelease(app, options, callback), 10000);
}), 2000);
}
return setTimeout(() => fs.readFile(reportFile, (err, dataBuffer) => {
if (err) {
return callback(err);
}
const data = dataBuffer.toString();
if (!data) {
return callback("Report file not found");
}
const report = JSON.parse(data);
if (report.result === "MBSNotFound") {
return callback("mbs.json is not found");
}
if (report.result && ((report.result.errors || {}).$allMessages || []).length + ((report.result.warns || {}).$allMessages || []).length > 0) {
return callback(_.map(
(report.result.errors || {}).$allMessages || [], (message) => "ERR: " + message.message
).concat(_.map(
(report.result.warns || {}).$allMessages || [], (message) => "WARN: " + message.message
)).join("\r\n"));
}
if (!report.result || report.err) {
return callback("CRITICAL ERROR: " + report.err);
}
if ((report.result.infos.$allMessages || []).length > 0) {
return callback(undefined, report.result.infos.$allMessages[report.result.infos.$allMessages.length-1].message);
}
return callback(undefined, "OK");
}), 1000);
});
};
exports.commentOnPullRequest = (options, callback) => {
options.github = settings.createGithub(options.baseRepoOptions.owner);
return checkPullRequest(options, (err, successMessage) => getStatusMessageFromRelease(options.app, options.headRepoOptions, (err, successMessage) => {
const message = err ? ("Was not built:\r\n\r\n```\r\n" + err.substring(0, 64000).replace(/```/g, "` ` `") + "\r\n```\r\n\r\nDO NOT MERGE!") : ("Build OK\r\n\r\n" + successMessage);
const statusUrlMessage = "Build status URL: " + settings.siteRoot + "status/" + options.headRepoOptions.owner + "/" + options.headRepoOptions.reponame + "/" + options.headRepoOptions.rev + "\r\n\r\n";
return writeComment(options, message + "\r\n\r\n" + statusUrlMessage, callback);
}));
options.github = settings.createGithub(options.baseRepoOptions.owner);
return checkPullRequest(options, (err, successMessage) => getStatusMessageFromRelease(options.app, options.headRepoOptions, (err, successMessage) => {
const message = err ? ("Was not built:\r\n\r\n```\r\n" + err.substring(0, 64000).replace(/```/g, "` ` `") + "\r\n```\r\n\r\nDO NOT MERGE!") : ("Build OK\r\n\r\n" + successMessage);
const statusUrlMessage = "Build status URL: " + settings.siteRoot + "status/" + options.headRepoOptions.owner + "/" + options.headRepoOptions.reponame + "/" + options.headRepoOptions.rev + "\r\n\r\n";
return writeComment(options, message + "\r\n\r\n" + statusUrlMessage, callback);
}));
};

@ -7,70 +7,70 @@ const async = require('async');
const Copier = require('recursive-tree-copy').Copier;
const gitToFsCopier = new Copier({
concurrency: 4,
walkSourceTree: (tree) => {
const emitter = new EventEmitter();
process.nextTick(() => {
let entries;
try {
entries = tree.gitTree.entries();
} catch(err) {
return emitter.emit('error', err);
}
concurrency: 4,
walkSourceTree: (tree) => {
const emitter = new EventEmitter();
process.nextTick(() => {
let entries;
try {
entries = tree.gitTree.entries();
} catch(err) {
return emitter.emit('error', err);
}
async.parallel(entries.map((entry) => (callback) => {
if (entry.isTree()) {
entry.getTree((err, subTree) => {
if (err) {
return callback(err);
}
async.parallel(entries.map((entry) => (callback) => {
if (entry.isTree()) {
entry.getTree((err, subTree) => {
if (err) {
return callback(err);
}
emitter.emit('tree', { gitTree: subTree, name: entry.name() });
callback();
});
} else if (entry.isFile()) {
emitter.emit('leaf', entry);
callback();
} else {
callback();
}
}), (err) => {
if (err) {
return emitter.emit('error', err);
}
emitter.emit('tree', { gitTree: subTree, name: entry.name() });
callback();
});
} else if (entry.isFile()) {
emitter.emit('leaf', entry);
callback();
} else {
callback();
}
}), (err) => {
if (err) {
return emitter.emit('error', err);
}
return emitter.emit('done');
});
});
return emitter;
},
createTargetTree: (tree, targetDir, callback) => {
const targetSubdir = path.join(targetDir, tree.name);
fs.mkdir(targetSubdir, (err) => {
if (err && err.code !== 'EEXIST' /* workaround for broken trees */) {
return callback(err);
}
return emitter.emit('done');
});
});
return emitter;
},
createTargetTree: (tree, targetDir, callback) => {
const targetSubdir = path.join(targetDir, tree.name);
fs.mkdir(targetSubdir, (err) => {
if (err && err.code !== 'EEXIST' /* workaround for broken trees */) {
return callback(err);
}
callback(undefined, targetSubdir);
});
},
finalizeTargetTree: (targetSubdir, callback) => callback(),
copyLeaf: (entry, targetDir, callback) => {
const targetPath = path.join(targetDir, entry.name());
entry.getBlob((err, blob) => {
if (err) {
return callback(err);
}
callback(undefined, targetSubdir);
});
},
finalizeTargetTree: (targetSubdir, callback) => callback(),
copyLeaf: (entry, targetDir, callback) => {
const targetPath = path.join(targetDir, entry.name());
entry.getBlob((err, blob) => {
if (err) {
return callback(err);
}
fs.writeFile(targetPath, blob.content(), callback);
});
}
fs.writeFile(targetPath, blob.content(), callback);
});
}
});
exports.gitToFs = (commit, exportDir, callback) => commit.getTree((err, tree) => {
if (err) {
return callback(err);
}
if (err) {
return callback(err);
}
gitToFsCopier.copy({ gitTree: tree, name: "." }, exportDir, callback);
gitToFsCopier.copy({ gitTree: tree, name: "." }, exportDir, callback);
});

@ -4,61 +4,61 @@ const nodegit = require('nodegit');
const fse = require('fs-extra');
const gitToFs = require('./copy').gitToFs;
const mkdirs = (path) => {
/*jslint stupid: true */
fse.mkdirsSync(path);
/*jslint stupid: true */
fse.mkdirsSync(path);
};
const removedirs = (path) => {
/*jslint stupid: true */
fse.removeSync(path);
/*jslint stupid: true */
fse.removeSync(path);
};
/*
options = {
"remote": "https://github.com/visionmedia/express.git",
"local": "D:\\data\\repositories\\visionmedia\\express.git\\",
"branch": "1.x",
"hash": "82e15cf321fccf3215068814d1ea1aeb3581ddb3",
"exported": "D:\\data\\exportedsource\\visionmedia\\express\\82e15cf321fccf3215068814d1ea1aeb3581ddb3\\",
"remote": "https://github.com/visionmedia/express.git",
"local": "D:\\data\\repositories\\visionmedia\\express.git\\",
"branch": "1.x",
"hash": "82e15cf321fccf3215068814d1ea1aeb3581ddb3",
"exported": "D:\\data\\exportedsource\\visionmedia\\express\\82e15cf321fccf3215068814d1ea1aeb3581ddb3\\",
}
*/
module.exports = (options, globalCallback) => {
let url = options.remote;
const path = options.local + "/" + options.hash;
const exported = options.exported;
let url = options.remote;
const path = options.local + "/" + options.hash;
const exported = options.exported;
removedirs(path);
mkdirs(path);
removedirs(path);
mkdirs(path);
if (url.substr(0, 8) === "https://") {
url = "git://" + url.substr(8);
}
if (url.substr(0, 8) === "https://") {
url = "git://" + url.substr(8);
}
console.log("Cloning %s to %s", url, path);
console.log("Cloning %s to %s", url, path);
nodegit.Repository.init(path, 1)
.catch(globalCallback)
.then((repo) => nodegit.Remote.create(repo, "origin", url)
.catch(globalCallback)
.then((remote) => remote.fetch([options.branch])
.catch(globalCallback)
.then((number) => {
if (number) {
return globalCallback("Failed to fetch commit: error number " + number);
}
nodegit.Repository.init(path, 1)
.catch(globalCallback)
.then((repo) => nodegit.Remote.create(repo, "origin", url)
.catch(globalCallback)
.then((remote) => remote.fetch([options.branch])
.catch(globalCallback)
.then((number) => {
if (number) {
return globalCallback("Failed to fetch commit: error number " + number);
}
console.log("Cloned %s to %s", url, path);
console.log("Cloned %s to %s", url, path);
repo.getCommit(options.hash)
.catch(globalCallback)
.then((commit) => {
removedirs(exported);
mkdirs(exported);
repo.getCommit(options.hash)
.catch(globalCallback)
.then((commit) => {
removedirs(exported);
mkdirs(exported);
gitToFs(commit, exported, (err, result) => {
repo.free();
return globalCallback(err, result);
});
});
})));
gitToFs(commit, exported, (err, result) => {
repo.free();
return globalCallback(err, result);
});
});
})));
};

@ -4,12 +4,12 @@ const nodemailer = require('nodemailer');
const settings = require('../settings');
exports.send = (message, callback) => {
return process.nextTick(callback);
return process.nextTick(callback);
/*
var transport = nodemailer.createTransport("SMTP", settings.smtp);
transport.sendMail(message, (err, result) => {
transport.close();
callback(err, result);
});
var transport = nodemailer.createTransport("SMTP", settings.smtp);
transport.sendMail(message, (err, result) => {
transport.close();
callback(err, result);
});
*/
};

@ -4,95 +4,95 @@ const fs = require('fs');
const glob = require('glob');
const addBranchInfo = (app, options, callback) => {
const branchFile = app.get('releasepath') + "/" + options.owner + "/" + options.reponame + "/$revs/" + options.rev + ".branch";
fs.exists(branchFile, (exists) => {
if (!exists) {
return callback("BranchFileNotFound", options);
}
fs.readFile(branchFile, (err, data) => {
if (err) {
return callback(err, options);
}
options.branch = data.toString();
options.branchName = options.branch.split("/").pop();
return callback(null, options);
});
});
const branchFile = app.get('releasepath') + "/" + options.owner + "/" + options.reponame + "/$revs/" + options.rev + ".branch";
fs.exists(branchFile, (exists) => {
if (!exists) {
return callback("BranchFileNotFound", options);
}
fs.readFile(branchFile, (err, data) => {
if (err) {
return callback(err, options);
}
options.branch = data.toString();
options.branchName = options.branch.split("/").pop();
return callback(null, options);
});
});
};
const addRevInfo = (app, options, callback) => {
const revFile = app.get('releasepath') + "/" + options.owner + "/" + options.reponame + "/" + options.branch + "/latest.id";
fs.exists(revFile, (exists) => {
if (!exists) {
return callback("RevFileNotFound", options);
}
fs.readFile(revFile, (err, data) => {
if (err) {
return callback(err, options);
}
options.rev = data.toString();
return callback(null, options);
});
});
const revFile = app.get('releasepath') + "/" + options.owner + "/" + options.reponame + "/" + options.branch + "/latest.id";
fs.exists(revFile, (exists) => {
if (!exists) {
return callback("RevFileNotFound", options);
}
fs.readFile(revFile, (err, data) => {
if (err) {
return callback(err, options);
}
options.rev = data.toString();
return callback(null, options);
});
});
};
const parseOptions = (app, options, callback) => {
const result = {};
const result = {};
result.owner = options.owner;
result.reponame = options.reponame;
result.owner = options.owner;
result.reponame = options.reponame;
if (options.rev && !(/^[\da-f]{40}$/i).test(options.rev)) {
return callback("Wrong rev format: " + options.rev, options);
}
if (options.rev && !(/^[\da-f]{40}$/i).test(options.rev)) {
return callback("Wrong rev format: " + options.rev, options);
}
if (options.rev) {
result.rev = options.rev;
return addBranchInfo(app, result, callback);
} else if (/^[\da-f]{40}$/i.test(options.branchName)) {
result.rev = options.branchName;
return addBranchInfo(app, result, callback);
} else {
result.branchName = options.branchName || "master";
result.branch = "refs/heads/" + result.branchName;
return addRevInfo(app, result, callback);
}
if (options.rev) {
result.rev = options.rev;
return addBranchInfo(app, result, callback);
} else if (/^[\da-f]{40}$/i.test(options.branchName)) {
result.rev = options.branchName;
return addBranchInfo(app, result, callback);
} else {
result.branchName = options.branchName || "master";
result.branch = "refs/heads/" + result.branchName;
return addRevInfo(app, result, callback);
}
};
const loadReport = (app, options, callback) => {
const releaseDir = app.get('releasepath') + "/" + options.owner + "/" + options.reponame + "/" + options.branch + "/" + options.rev;
const releaseDir = app.get('releasepath') + "/" + options.owner + "/" + options.reponame + "/" + options.branch + "/" + options.rev;
glob("**", {cwd: releaseDir, mark: true}, (err, files) => {
if (err) {
return callback(err, options);
}
glob("**", {cwd: releaseDir, mark: true}, (err, files) => {
if (err) {
return callback(err, options);
}
const reportFile = releaseDir + "/report.json";
options.files = files;
fs.exists(reportFile, (exists) => {
if (!exists) {
return callback("ReportFileNotFound", options);
}
const reportFile = releaseDir + "/report.json";
options.files = files;
fs.exists(reportFile, (exists) => {
if (!exists) {
return callback("ReportFileNotFound", options);
}
fs.readFile(reportFile, (err, dataBuffer) => {
if (err) {
return callback(err, options);
}
const data = dataBuffer.toString();
if (!data) {
return callback("ReportFileNotFound", options);
}
options.report = JSON.parse(data);
return callback(null, options);
});
});
});
fs.readFile(reportFile, (err, dataBuffer) => {
if (err) {
return callback(err, options);
}
const data = dataBuffer.toString();
if (!data) {
return callback("ReportFileNotFound", options);
}
options.report = JSON.parse(data);
return callback(null, options);
});
});
});
};
exports.getReport = (app, options, callback) => parseOptions(app, options, (err, result) => {
if (err) {
return callback(err, {});
}
if (err) {
return callback(err, {});
}
return loadReport(app, result, callback);
return loadReport(app, result, callback);
});

@ -2,76 +2,76 @@
//TaskProcessor does not look like EventEmitter, so no need to extend EventEmitter and use `emit' here.
const TaskProcessor = function (task, outerProcessor, callback) {
if (!this) {
return new TaskProcessor(task);
}
if (!this) {
return new TaskProcessor(task);
}
const self = this;
let taskWorker = undefined;
const errors = [];
const process = () => taskWorker.process();
const getOuterPrefix = (prefix) => (task.name && prefix) ? (task.name + "/" + prefix) : (task.name || "") + (prefix || "");
const onError = (message, prefix) => {
errors.push(message);
outerProcessor.onError(message, getOuterPrefix(prefix));
};
const onWarn = (message, prefix) => outerProcessor.onWarn(message, getOuterPrefix(prefix));
const onInfo = (message, prefix) => outerProcessor.onInfo(message, getOuterPrefix(prefix));
const processTask = (innerTask, innerCallback) => {
const innerProcessor = new TaskProcessor(innerTask, self, innerCallback);
innerProcessor.process();
};
const done = () => callback(errors.join("\r\n"));
const self = this;
let taskWorker = undefined;
const errors = [];
const process = () => taskWorker.process();
const getOuterPrefix = (prefix) => (task.name && prefix) ? (task.name + "/" + prefix) : (task.name || "") + (prefix || "");
const onError = (message, prefix) => {
errors.push(message);
outerProcessor.onError(message, getOuterPrefix(prefix));
};
const onWarn = (message, prefix) => outerProcessor.onWarn(message, getOuterPrefix(prefix));
const onInfo = (message, prefix) => outerProcessor.onInfo(message, getOuterPrefix(prefix));
const processTask = (innerTask, innerCallback) => {
const innerProcessor = new TaskProcessor(innerTask, self, innerCallback);
innerProcessor.process();
};
const done = () => callback(errors.join("\r\n"));
self.process = process;
self.onError = onError;
self.onWarn = onWarn;
self.onInfo = onInfo;
self.processTask = processTask;
self.done = done;
self.context = outerProcessor.context;
self.process = process;
self.onError = onError;
self.onWarn = onWarn;
self.onInfo = onInfo;
self.processTask = processTask;
self.done = done;
self.context = outerProcessor.context;
const taskImpl = require('./tasks/' + task.type.match(/[\w\-]/g).join(""));
taskWorker = taskImpl(task.params || {}, self);
const taskImpl = require('./tasks/' + task.type.match(/[\w\-]/g).join(""));
taskWorker = taskImpl(task.params || {}, self);
};
exports.processTask = (task, context, callback) => {
const errors = {};
const warns = {};
const infos = {};
const messages = {};
const messageProcessor = (list) => {
const f = (list, message, prefix) => {
const parts = prefix.split("/");
let innerList = list;
const errors = {};
const warns = {};
const infos = {};
const messages = {};
const messageProcessor = (list) => {
const f = (list, message, prefix) => {
const parts = prefix.split("/");
let innerList = list;
parts.forEach((part) => {
innerList = (innerList[part] = innerList[part] || {});
});
parts.forEach((part) => {
innerList = (innerList[part] = innerList[part] || {});
});
innerList.$messages = innerList.$messages || [];
innerList.$messages.push(message);
innerList.$messages = innerList.$messages || [];
innerList.$messages.push(message);
list.$allMessages = list.$allMessages || [];
list.$allMessages.push({ prefix: prefix, message: message });
};
list.$allMessages = list.$allMessages || [];
list.$allMessages.push({ prefix: prefix, message: message });
};
return (message, prefix) => {
f(list, message, prefix);
f(messages, message, prefix);
};
};
const processor = new TaskProcessor(task, {
onError: messageProcessor(errors),
onWarn: messageProcessor(warns),
onInfo: messageProcessor(infos),
context: context
}, (err) => callback(err, {
errors: errors,
warns: warns,
infos: infos,
messages: messages
}));
return (message, prefix) => {
f(list, message, prefix);
f(messages, message, prefix);
};
};
const processor = new TaskProcessor(task, {
onError: messageProcessor(errors),
onWarn: messageProcessor(warns),
onInfo: messageProcessor(infos),
context: context
}, (err) => callback(err, {
errors: errors,
warns: warns,
infos: infos,
messages: messages
}));
processor.process();
processor.process();
};

@ -3,30 +3,30 @@
const glob = require('glob');
module.exports = (params, processor) => ({
process: () => glob("**/obj/{Debug,Release}/*.{dll,pdb,xml}", {
dot: true,
cwd: processor.context.exported
}, (err, files) => {
if (err) {
processor.onError(err);
return processor.done();
}
process: () => glob("**/obj/{Debug,Release}/*.{dll,pdb,xml}", {
dot: true,
cwd: processor.context.exported
}, (err, files) => {
if (err) {
processor.onError(err);
return processor.done();
}
if (!files || !files.length) {
return processor.done();
}
if (!files || !files.length) {
return processor.done();
}
return processor.processTask({
type: "parallel",
params: {
tasks: files.map((file) => ({
name: file,
type: "deletefromcode",
params: {
filename: file
}
}))
}
}, processor.done.bind(processor));
})
return processor.processTask({
type: "parallel",
params: {
tasks: files.map((file) => ({
name: file,
type: "deletefromcode",
params: {
filename: file
}
}))
}
}, processor.done.bind(processor));
})
});

@ -1,10 +1,10 @@
"use strict";
module.exports = (params, processor) => {
const condition = (!params.owner || params.owner === processor.context.owner) && (!params.branch || params.branch === processor.context.branch || "refs/heads/" + params.branch === processor.context.branch);
const task = condition ? params.task : params.otherwise;
const condition = (!params.owner || params.owner === processor.context.owner) && (!params.branch || params.branch === processor.context.branch || "refs/heads/" + params.branch === processor.context.branch);
const task = condition ? params.task : params.otherwise;
return {
process: () => processor.processTask(task || {type: "noop"}, processor.done.bind(processor))
};
return {
process: () => processor.processTask(task || {type: "noop"}, processor.done.bind(processor))
};
};

@ -3,19 +3,19 @@
const fse = require('fs-extra');
module.exports = (params, processor) => ({
process: () => {
const sourceFilePath = processor.context.exported + "/" + params.filename;
const targetFilePath = processor.context.release + "/" + params.filename;
process: () => {
const sourceFilePath = processor.context.exported + "/" + params.filename;
const targetFilePath = processor.context.release + "/" + params.filename;
processor.onInfo("Copying " + sourceFilePath + " to " + targetFilePath);
processor.onInfo("Copying " + sourceFilePath + " to " + targetFilePath);
fse.copy(sourceFilePath, targetFilePath, (err) => {
if (err) {
processor.onError("Unable to copy file: " + err);
} else {
processor.onInfo("Copied file");
}
return processor.done();
});
}
fse.copy(sourceFilePath, targetFilePath, (err) => {
if (err) {
processor.onError("Unable to copy file: " + err);
} else {
processor.onInfo("Copied file");
}
return processor.done();
});
}
});

@ -3,30 +3,30 @@
const glob = require('glob');
module.exports = (params, processor) => ({
process: () => glob(params.mask, {
dot: true,
cwd: processor.context.exported
}, (err, files) => {
if (err) {
processor.onError(err);
return processor.done();
}
process: () => glob(params.mask, {
dot: true,
cwd: processor.context.exported
}, (err, files) => {
if (err) {
processor.onError(err);
return processor.done();
}
if (!files || !files.length) {
return processor.done();
}
if (!files || !files.length) {
return processor.done();
}
return processor.processTask({
type: "parallel",
params: {
tasks: files.map((file) => ({
name: file,
type: "copy",
params: {
filename: file
}
}))
}
}, processor.done.bind(processor));
})
return processor.processTask({
type: "parallel",
params: {
tasks: files.map((file) => ({
name: file,
type: "copy",
params: {
filename: file
}
}))
}
}, processor.done.bind(processor));
})
});

@ -5,31 +5,31 @@ const path = require('path');
const cssnano = require("cssnano");
module.exports = (params, processor) => ({
process: () => {
const filePath = path.normalize(processor.context.exported + "/" + params.filename);
fs.readFile(filePath, (err, css) => {
if (err) {
processor.onError("Unable to read stylesheet " + params.filename + ": " + err);
return processor.done();
}
process: () => {
const filePath = path.normalize(processor.context.exported + "/" + params.filename);
fs.readFile(filePath, (err, css) => {
if (err) {
processor.onError("Unable to read stylesheet " + params.filename + ": " + err);
return processor.done();
}
cssnano.process(css)
.catch((err) => {
processor.onError("Unable to uglify stylesheet: " + err);
processor.done();
})
.then((result) => {
fs.writeFile(filePath, result.css, (err) => {
if (err) {
processor.onError("Unable to write uglified stylesheet for " + params.filename + ": " + err);
} else {
processor.onInfo("Saved uglified stylesheet for " + params.filename + "; uglified length: " + result.css.length);
}
cssnano.process(css)
.catch((err) => {
processor.onError("Unable to uglify stylesheet: " + err);
processor.done();
})
.then((result) => {
fs.writeFile(filePath, result.css, (err) => {
if (err) {
processor.onError("Unable to write uglified stylesheet for " + params.filename + ": " + err);
} else {
processor.onInfo("Saved uglified stylesheet for " + params.filename + "; uglified length: " + result.css.length);
}
processor.done();
});
});
processor.done();
});
});
}
});
}
});

@ -3,34 +3,34 @@
const glob = require('glob');
module.exports = (params, processor) => ({
process: () => {
if (processor.context.cssnanoallDone) {
processor.onWarn("cssnanoall task is executed more than once; this is probably a bug in your mbs.json");
}
process: () => {
if (processor.context.cssnanoallDone) {
processor.onWarn("cssnanoall task is executed more than once; this is probably a bug in your mbs.json");
}
processor.context.cssnanoallDone = true;
processor.context.cssnanoallDone = true;
glob("**/*.css", {
dot: true,
cwd: processor.context.exported
}, (err, files) => {
if (err) {
processor.onError(err);
return processor.done();
}
glob("**/*.css", {
dot: true,
cwd: processor.context.exported
}, (err, files) => {
if (err) {
processor.onError(err);
return processor.done();
}
return processor.processTask({
type: params.preventParallelTests ? "sequential" : "parallel",
params: {
tasks: files.map((file) => ({
name: file,
type: "cssnano",
params: {
filename: file
}
}))
}
}, processor.done.bind(processor));
});
}
return processor.processTask({
type: params.preventParallelTests ? "sequential" : "parallel",
params: {
tasks: files.map((file) => ({
name: file,
type: "cssnano",
params: {
filename: file
}
}))
}
}, processor.done.bind(processor));
});
}
});

@ -3,20 +3,20 @@
const fse = require('fs-extra');
module.exports = function (params, processor) {
return {
process: () => {
var sourceFilePath = processor.context.exported + "/" + params.filename;
return {
process: () => {
var sourceFilePath = processor.context.exported + "/" + params.filename;
processor.onInfo("Deleting " + sourceFilePath);
processor.onInfo("Deleting " + sourceFilePath);
fse.remove(sourceFilePath, function(err) {
if (err) {
processor.onError("Unable to delete file: " + err);
} else {
processor.onInfo("Deleted file");
}
return processor.done();
});
}
};
fse.remove(sourceFilePath, function(err) {
if (err) {
processor.onError("Unable to delete file: " + err);
} else {
processor.onInfo("Deleted file");
}
return processor.done();
});
}
};
};

@ -3,15 +3,15 @@
const sequential = require('./sequential');
module.exports = (params, processor) => sequential({
tasks: [
{
type: "dotnetbuildwithoutcleanup",
name: "build",
params: params
},
{
type: "cleanupafterdotnetbuild",
name: "cleanup"
}
]
tasks: [
{
type: "dotnetbuildwithoutcleanup",
name: "build",
params: params
},
{
type: "cleanupafterdotnetbuild",
name: "cleanup"
}
]
}, processor);

@ -3,20 +3,20 @@
const sequential = require("./sequential");
module.exports = (params, processor) => sequential({
tasks: [
{
type: "dotnetbuildwithoutcleanup",
name: "build",
params: params
},
{
type: "dotnetnunitall",
name: "test",
params: params
},
{
type: "cleanupafterdotnetbuild",
name: "cleanup"
}
]
tasks: [
{
type: "dotnetbuildwithoutcleanup",
name: "build",
params: params
},
{
type: "dotnetnunitall",
name: "test",
params: params
},
{
type: "cleanupafterdotnetbuild",
name: "cleanup"
}
]
}, processor);

@ -4,44 +4,44 @@ const spawn = require('child_process').spawn;
const settings = require("../../settings");
module.exports = (params, processor) => ({
process: () => {
let result = "";
let error = "";
const builder = spawn(settings.builderExecutable, [params.command]);
processor.onInfo("DotNetBuilderWrapper processing (at " + (new Date().toISOString()) + "): " + JSON.stringify(params, null, 4));
builder.stdout.on('data', (data) => result += data);
builder.stderr.on('data', (data) => error += data);
builder.on('exit', (code) => {
if (code !== 0) {
error = "Return code is " + code + "\r\n" + error;
processor.onError(error);
return processor.done();
}
const report = JSON.parse(result);
const messages = report.Messages;
messages.forEach((message) => {
if (!message) {
return processor.onError("Message is null");
}
switch(message.Type) {
case "info":
return processor.onInfo(message.Body);
case "warn":
return processor.onWarn(message.Body);
default:
return processor.onError(message.Body);
}
});
processor.onInfo("Done DotNetBuilderWrapper processing (at " + (new Date().toISOString()) + ")");
return processor.done();
});
builder.stdin.write(JSON.stringify(params));
builder.stdin.end();
}
process: () => {
let result = "";
let error = "";
const builder = spawn(settings.builderExecutable, [params.command]);
processor.onInfo("DotNetBuilderWrapper processing (at " + (new Date().toISOString()) + "): " + JSON.stringify(params, null, 4));
builder.stdout.on('data', (data) => result += data);
builder.stderr.on('data', (data) => error += data);
builder.on('exit', (code) => {
if (code !== 0) {
error = "Return code is " + code + "\r\n" + error;
processor.onError(error);
return processor.done();
}
const report = JSON.parse(result);
const messages = report.Messages;
messages.forEach((message) => {
if (!message) {
return processor.onError("Message is null");
}
switch(message.Type) {
case "info":
return processor.onInfo(message.Body);
case "warn":
return processor.onWarn(message.Body);
default:
return processor.onError(message.Body);
}
});
processor.onInfo("Done DotNetBuilderWrapper processing (at " + (new Date().toISOString()) + ")");
return processor.done();
});
builder.stdin.write(JSON.stringify(params));
builder.stdin.end();
}
});

@ -3,40 +3,40 @@
const sequential = require('./sequential');
module.exports = (params, processor) => {
let tasks = [];
let tasks = [];
if (!params.skipMbsCheckStyle) {
tasks.push({
type: "dotnetcheckstyle",
params: params
});
}
if (!params.skipMbsCheckStyle) {
tasks.push({
type: "dotnetcheckstyle",
params: params
});
}
tasks.push({
type: "dotnetrewrite",
params: params
});
tasks.push({
type: "dotnetrewrite",
params: params
});
if (!params.skipNugetRestore) {
tasks.push({
type: "dotnetnugetrestore",
params: params
});
}
if (!params.skipNugetRestore) {
tasks.push({
type: "dotnetnugetrestore",
params: params
});
}
tasks.push({
type: "dotnetcompile",
params: {
solution: params.solution,
skipCodeSigning: params.skipCodeSigning,
forceCodeAnalysis: params.forceCodeAnalysis,
ignoreCodeAnalysis: params.ignoreCodeAnalysis,
configuration: params.configuration,
target: "Rebuild"
}
});
tasks.push({
type: "dotnetcompile",
params: {
solution: params.solution,
skipCodeSigning: params.skipCodeSigning,
forceCodeAnalysis: params.forceCodeAnalysis,
ignoreCodeAnalysis: params.ignoreCodeAnalysis,
configuration: params.configuration,
target: "Rebuild"
}
});
return sequential({
tasks: tasks
}, processor);
return sequential({
tasks: tasks
}, processor);
};

@ -5,58 +5,58 @@ const async = require('async');
const glob = require('glob');
const autoGeneratedMarker =
"//------------------------------------------------------------------------------" + "\n" +
"// <auto-generated>";
"//------------------------------------------------------------------------------" + "\n" +
"// <auto-generated>";
module.exports = (params, processor) => ({
process: () => {
if (processor.context.dotnetcheckerDone) {
return processor.done();
}
processor.context.dotnetcheckerDone = true;
glob("**/*.cs", {cwd: processor.context.exported}, (err, files) => {
if (err) {
processor.onError(err);
return processor.done();
}
processor.onInfo("Found " + files.length + " .cs files");
if (!files || !files.length) {
processor.onWarn("No .cs files found");
return processor.done();
}
return async.parallel(files.map((file) => (callback) => fs.readFile(processor.context.exported + "/" + file, { encoding: "utf8" }, (err, data) => {
if (err) {
processor.onError("Unable to check file " + file + ": " + err);
return callback(err);
}
if (data.indexOf("\r\n") >= 0) {
processor.onError("Windows-style EOL (0D0A) found in file " + file);
return callback();
}
if (params.ignoreCodeStyle) {
return callback();
}
if (data.substr(1, autoGeneratedMarker.length) === autoGeneratedMarker || data.substr(0, autoGeneratedMarker.length) === autoGeneratedMarker) {
processor.onInfo("Skipping auto-generated file " + file);
return callback();
}
if (data.indexOf("\t") >= 0 && data.indexOf(" ") >= 0) {
processor.onError("Both tabs and spaces found in file " + file);
}
if (data.indexOf("\t") >= 0) {
processor.onError("Tabs found in file " + file);
}
processor.onInfo("Checked file " + file);
callback();
})), processor.done.bind(processor));
});
}
process: () => {
if (processor.context.dotnetcheckerDone) {
return processor.done();
}
processor.context.dotnetcheckerDone = true;
glob("**/*.cs", {cwd: processor.context.exported}, (err, files) => {
if (err) {
processor.onError(err);
return processor.done();
}
processor.onInfo("Found " + files.length + " .cs files");
if (!files || !files.length) {
processor.onWarn("No .cs files found");
return processor.done();
}
return async.parallel(files.map((file) => (callback) => fs.readFile(processor.context.exported + "/" + file, { encoding: "utf8" }, (err, data) => {
if (err) {
processor.onError("Unable to check file " + file + ": " + err);
return callback(err);
}
if (data.indexOf("\r\n") >= 0) {
processor.onError("Windows-style EOL (0D0A) found in file " + file);
return callback();
}
if (params.ignoreCodeStyle) {
return callback();
}
if (data.substr(1, autoGeneratedMarker.length) === autoGeneratedMarker || data.substr(0, autoGeneratedMarker.length) === autoGeneratedMarker) {
processor.onInfo("Skipping auto-generated file " + file);
return callback();
}
if (data.indexOf("\t") >= 0 && data.indexOf(" ") >= 0) {
processor.onError("Both tabs and spaces found in file " + file);
}
if (data.indexOf("\t") >= 0) {
processor.onError("Tabs found in file " + file);
}
processor.onInfo("Checked file " + file);
callback();
})), processor.done.bind(processor));
});
}
});

@ -4,30 +4,30 @@ const settings = require('../../settings');
const dotnetbuilderwrapper = require('./dotnetbuilderwrapper');
module.exports = (params, processor) => {
const compileParams = {
command: "compile",
SolutionPath: processor.context.exported + "/" + params.solution,
Configuration: params.configuration,
Target: params.target,
OutputDirectory: params.overrideOutputDirectory
};
if (!settings.skipCodeSigning && !params.skipCodeSigning) {
compileParams.SigningKey = settings.codeSigningKeyFile;
}
if (settings.isCodeAnalysisUnsupported) {
if (params.forceCodeAnalysis) {
processor.onError("Code analysis is not supported");
processor.done();
return;
}
compileParams.SkipCodeAnalysis = true;
} else {
if (settings.ignoreCodeAnalysisByDefault && !params.forceCodeAnalysis) {
compileParams.SkipCodeAnalysis = true;
}
if (params.ignoreCodeAnalysis) {
compileParams.SkipCodeAnalysis = true;
}
}
return dotnetbuilderwrapper(compileParams, processor);
const compileParams = {
command: "compile",
SolutionPath: processor.context.exported + "/" + params.solution,
Configuration: params.configuration,
Target: params.target,
OutputDirectory: params.overrideOutputDirectory
};
if (!settings.skipCodeSigning && !params.skipCodeSigning) {
compileParams.SigningKey = settings.codeSigningKeyFile;
}
if (settings.isCodeAnalysisUnsupported) {
if (params.forceCodeAnalysis) {
processor.onError("Code analysis is not supported");
processor.done();
return;
}
compileParams.SkipCodeAnalysis = true;
} else {
if (settings.ignoreCodeAnalysisByDefault && !params.forceCodeAnalysis) {
compileParams.SkipCodeAnalysis = true;
}
if (params.ignoreCodeAnalysis) {
compileParams.SkipCodeAnalysis = true;
}
}
return dotnetbuilderwrapper(compileParams, processor);
};

@ -3,27 +3,27 @@
const sequential = require('./sequential');
module.exports = (params, processor) => {
const date = new Date();
const version = (params.version || ((params.major || "0") + "." + (date.getFullYear() * 10000 + (date.getMonth() + 1) * 100 + date.getDate()) + "." + ((date.getHours() * 100 + date.getMinutes()) * 100 + date.getSeconds()))) + (params.withoutCommitSha ? "" : ("-r" + processor.context.rev.substr(0, 16)));
const date = new Date();
const version = (params.version || ((params.major || "0") + "." + (date.getFullYear() * 10000 + (date.getMonth() + 1) * 100 + date.getDate()) + "." + ((date.getHours() * 100 + date.getMinutes()) * 100 + date.getSeconds()))) + (params.withoutCommitSha ? "" : ("-r" + processor.context.rev.substr(0, 16)));
return sequential({
tasks: [
{
type: "dotnetbuilderwrapper",
params: {
command: "nugetpack",
BaseDirectory: processor.context.exported,
SpecPath: processor.context.exported + "/" + params.nuspec,
OutputDirectory: processor.context.exported,
Version: version
}
},
{
type: "copy",
params: {
filename: params.name + "." + version + ".nupkg"
}
}
]
}, processor);
return sequential({
tasks: [
{
type: "dotnetbuilderwrapper",
params: {
command: "nugetpack",
BaseDirectory: processor.context.exported,
SpecPath: processor.context.exported + "/" + params.nuspec,
OutputDirectory: processor.context.exported,
Version: version
}
},
{
type: "copy",
params: {
filename: params.name + "." + version + ".nupkg"
}
}
]
}, processor);
};

@ -3,28 +3,28 @@
const conditional = require('./conditional');
module.exports = (params, processor) => conditional({
owner: params.masterRepoOwner,
branch: "master",
task: {
name: "nuget-push",
type: "dotnetnugetpush",
params: {
nuspec: params.nuspecName + ".nuspec",
name: params.nuspecName,
withoutCommitSha: params.withoutCommitSha,
version: params.version,
major: params.major
}
},
otherwise: {
name: "nuget-pack",
type: "dotnetnugetpack",
params: {
nuspec: params.nuspecName + ".nuspec",
name: params.nuspecName,
withoutCommitSha: params.withoutCommitSha,
version: params.version,
major: params.major
}
}
owner: params.masterRepoOwner,
branch: "master",
task: {
name: "nuget-push",
type: "dotnetnugetpush",
params: {
nuspec: params.nuspecName + ".nuspec",
name: params.nuspecName,
withoutCommitSha: params.withoutCommitSha,
version: params.version,
major: params.major
}
},
otherwise: {
name: "nuget-pack",
type: "dotnetnugetpack",
params: {
nuspec: params.nuspecName + ".nuspec",
name: params.nuspecName,
withoutCommitSha: params.withoutCommitSha,
version: params.version,
major: params.major
}
}
}, processor);

@ -3,28 +3,28 @@
const sequential = require("./sequential");
module.exports = (params, processor) => {
const date = new Date();
const version = (params.version || ((params.major || "0") + "." + (date.getFullYear() * 10000 + (date.getMonth() + 1) * 100 + date.getDate()) + "." + ((date.getHours() * 100 + date.getMinutes()) * 100 + date.getSeconds()))) + (params.withoutCommitSha ? "" : ("-r" + processor.context.rev.substr(0, 16)));
const nupkg = params.name + "." + version + ".nupkg";
const date = new Date();
const version = (params.version || ((params.major || "0") + "." + (date.getFullYear() * 10000 + (date.getMonth() + 1) * 100 + date.getDate()) + "." + ((date.getHours() * 100 + date.getMinutes()) * 100 + date.getSeconds()))) + (params.withoutCommitSha ? "" : ("-r" + processor.context.rev.substr(0, 16)));
const nupkg = params.name + "." + version + ".nupkg";
return sequential({
tasks: [
{
type: "dotnetbuilderwrapper",
params: {
command: "nugetpack",
BaseDirectory: processor.context.exported,
SpecPath: processor.context.exported + "/" + params.nuspec,
OutputDirectory: processor.context.exported,
Version: version
}
},
{
type: "dotnetnugetpushonly",
params: {
Package: nupkg
}
}
]
}, processor);
return sequential({
tasks: [
{
type: "dotnetbuilderwrapper",
params: {
command: "nugetpack",
BaseDirectory: processor.context.exported,
SpecPath: processor.context.exported + "/" + params.nuspec,
OutputDirectory: processor.context.exported,
Version: version
}
},
{
type: "dotnetnugetpushonly",
params: {
Package: nupkg
}
}
]
}, processor);
};

@ -4,8 +4,8 @@ const dotnetbuilderwrapper = require('./dotnetbuilderwrapper');
const settings = require("../../settings");
module.exports = (params, processor) => dotnetbuilderwrapper({
command: "nugetpush",
Package: processor.context.exported + "/" + params.Package,
NugetHost: settings.nugetHost,
ApiKey: settings.nugetApiKey
command: "nugetpush",
Package: processor.context.exported + "/" + params.Package,
NugetHost: settings.nugetHost,
ApiKey: settings.nugetApiKey
}, processor);

@ -3,14 +3,14 @@
const sequential = require('./sequential');
module.exports = (params, processor) => sequential({
tasks: [
{
type: "dotnetbuilderwrapper",
params: {
command: "nugetrestore",
BaseDirectory: processor.context.exported,
SolutionPath: processor.context.exported + "/" + params.solution
}
}
]
tasks: [
{
type: "dotnetbuilderwrapper",
params: {
command: "nugetrestore",
BaseDirectory: processor.context.exported,
SolutionPath: processor.context.exported + "/" + params.solution
}
}
]
}, processor);

@ -3,7 +3,7 @@
const dotNetBuilderWrapper = require('./dotnetbuilderwrapper');
module.exports = (params, processor) => dotNetBuilderWrapper({
command: "nunit",
TestLibraryPath: processor.context.exported + "/" + params.assembly//,
// OutputPath: processor.context.release + "/" + params.solution + "/"
command: "nunit",
TestLibraryPath: processor.context.exported + "/" + params.assembly//,
// OutputPath: processor.context.release + "/" + params.solution + "/"
}, processor);

@ -3,39 +3,39 @@
const glob = require('glob');
module.exports = (params, processor) => ({
process: () => {
if (processor.context.dotnetnunitallDone) {
processor.onWarn("dotnetnunitall task is executed more than once; this is probably a bug in your mbs.json");
}
process: () => {
if (processor.context.dotnetnunitallDone) {
processor.onWarn("dotnetnunitall task is executed more than once; this is probably a bug in your mbs.json");
}
processor.context.dotnetnunitallDone = true;
processor.context.dotnetnunitallDone = true;
glob("**/{bin,build}/**/*.{Tests,Test,UnitTests}.dll", {
dot: true,
cwd: processor.context.exported
}, (err, files) => {
if (err) {
processor.onError(err);
return processor.done();
}
glob("**/{bin,build}/**/*.{Tests,Test,UnitTests}.dll", {
dot: true,
cwd: processor.context.exported
}, (err, files) => {
if (err) {
processor.onError(err);
return processor.done();
}
if (!files || !files.length) {
processor.onError("No test assemblies found in " + processor.context.exported);
return processor.done();
}
if (!files || !files.length) {
processor.onError("No test assemblies found in " + processor.context.exported);
return processor.done();
}
return processor.processTask({
type: params.preventParallelTests ? "sequential" : "parallel",
params: {
tasks: files.map((file) => ({
name: file,
type: "dotnetnunit",
params: {
assembly: file
}
}))
}
}, processor.done.bind(processor));
});
}
return processor.processTask({
type: params.preventParallelTests ? "sequential" : "parallel",
params: {
tasks: files.map((file) => ({
name: file,
type: "dotnetnunit",
params: {
assembly: file
}
}))
}
}, processor.done.bind(processor));
});
}
});

@ -10,38 +10,38 @@ const deployTemplate = fs.readFileSync(__dirname + "/dotnetpackwebapp.template.b
const versionTemplate = fs.readFileSync(__dirname + "/dotnetpackwebapp.template.version.aspx", {encoding: "utf8"});
module.exports = (params, processor) => sequential({
tasks: [
{
type: "writefile",
params: {
filename: "MakePackage.msbuild",
data: Mustache.render(msbuildTemplate, params)
}
},
{
type: "writefile",
params: {
filename: "Deploy.bat",
data: Mustache.render(deployTemplate, params)
}
},
{
type: "writefile",
params: {
filename: "version.aspx",
data: Mustache.render(versionTemplate, params)
}
},
{
type: "dotnetcompile",
params: {
solution: "MakePackage.msbuild",
skipCodeSigning: params.skipCodeSigning,
isCodeAnalysisUnsupported: params.isCodeAnalysisUnsupported,
configuration: params.configuration,
target: "Package",
overrideOutputDirectory: processor.context.release
}
}
]
tasks: [
{
type: "writefile",
params: {
filename: "MakePackage.msbuild",
data: Mustache.render(msbuildTemplate, params)
}
},
{
type: "writefile",
params: {
filename: "Deploy.bat",
data: Mustache.render(deployTemplate, params)
}
},
{
type: "writefile",
params: {
filename: "version.aspx",
data: Mustache.render(versionTemplate, params)
}
},
{
type: "dotnetcompile",
params: {
solution: "MakePackage.msbuild",
skipCodeSigning: params.skipCodeSigning,
isCodeAnalysisUnsupported: params.isCodeAnalysisUnsupported,
configuration: params.configuration,
target: "Package",
overrideOutputDirectory: processor.context.release
}
}
]
}, processor);

@ -8,53 +8,53 @@ const settings = require('../../settings');
const addAssemblyAttribute = (content, attribute) => content + "\n" + attribute + "\n";
module.exports = (params, processor) => ({
process: () => {
if (processor.context.dotnetrewriterDone) {
return processor.done();
}
processor.context.dotnetrewriterDone = true;
const processAssemblyInfo = (appendInformationalVersion) => (content, cb) => {
if (!params.skipCodeSigning && !settings.skipCodeSigning) {
content = content.replace(
/InternalsVisibleTo\s*\(\s*"([\w.]+)"\s*\)/g,
(match, p1) => "InternalsVisibleTo(\"" + p1 + ",PublicKey=" + settings.codeSigningPublicKey + "\")"
);
}
if (appendInformationalVersion) {
content = addAssemblyAttribute(content, "[assembly: System.Reflection.AssemblyInformationalVersion(\"" + processor.context.versionInfo + "\")]");
}
return cb(null, content);
};
glob("**/{InternalsVisible,AssemblyInfo}*.cs", {cwd: processor.context.exported}, (err, files) => {
if (err) {
processor.onError(err);
return processor.done();
}
processor.onInfo("Found " + files.length + " AssemblyInfo.cs files");
if (!files || !files.length) {
processor.onWarn("No AssemblyInfo.cs found");
return processor.done();
}
return async.parallel(files.map((file) => (callback) => async.waterfall([
fs.readFile.bind(null, processor.context.exported + "/" + file, { encoding: "utf8" }),
processAssemblyInfo(file.toLowerCase().indexOf("assemblyinfo.cs") >= 0),
fs.writeFile.bind(null, processor.context.exported + "/" + file)
], (err) => {
if (err) {
processor.onError("Unable to rewrite file " + file + ": " + err);
} else {
processor.onInfo("Rewritten file " + file);
}
callback(err);
})), processor.done.bind(processor));
});
}
process: () => {
if (processor.context.dotnetrewriterDone) {
return processor.done();
}
processor.context.dotnetrewriterDone = true;
const processAssemblyInfo = (appendInformationalVersion) => (content, cb) => {
if (!params.skipCodeSigning && !settings.skipCodeSigning) {
content = content.replace(
/InternalsVisibleTo\s*\(\s*"([\w.]+)"\s*\)/g,
(match, p1) => "InternalsVisibleTo(\"" + p1 + ",PublicKey=" + settings.codeSigningPublicKey + "\")"
);
}
if (appendInformationalVersion) {
content = addAssemblyAttribute(content, "[assembly: System.Reflection.AssemblyInformationalVersion(\"" + processor.context.versionInfo + "\")]");
}
return cb(null, content);
};
glob("**/{InternalsVisible,AssemblyInfo}*.cs", {cwd: processor.context.exported}, (err, files) => {
if (err) {
processor.onError(err);
return processor.done();
}
processor.onInfo("Found " + files.length + " AssemblyInfo.cs files");
if (!files || !files.length) {
processor.onWarn("No AssemblyInfo.cs found");
return processor.done();
}
return async.parallel(files.map((file) => (callback) => async.waterfall([
fs.readFile.bind(null, processor.context.exported + "/" + file, { encoding: "utf8" }),
processAssemblyInfo(file.toLowerCase().indexOf("assemblyinfo.cs") >= 0),
fs.writeFile.bind(null, processor.context.exported + "/" + file)
], (err) => {
if (err) {
processor.onError("Unable to rewrite file " + file + ": " + err);
} else {
processor.onInfo("Rewritten file " + file);
}
callback(err);
})), processor.done.bind(processor));
});
}
});

@ -1,19 +1,19 @@
"use strict";
module.exports = (params, processor) => ({
process: () => {
if (params.error) {
processor.onError(params.error);
}
process: () => {
if (params.error) {
processor.onError(params.error);
}
if (params.warn) {
processor.onWarn(params.warn);
}
if (params.warn) {
processor.onWarn(params.warn);
}
if (params.info) {
processor.onInfo(params.info);
}
if (params.info) {
processor.onInfo(params.info);
}
processor.done();
}
processor.done();
}
});

@ -5,27 +5,27 @@ const path = require('path');
const CLIEngine = require("eslint").CLIEngine;
const settings = require("../../settings");
const cli = new CLIEngine({
configFile: settings.eslintBrowserConfig
configFile: settings.eslintBrowserConfig
});
module.exports = (params, processor) => ({
process: () => {
const filePath = path.normalize(processor.context.exported + "/" + params.filename);
const result = cli.executeOnFiles([filePath]);
processor.onInfo("ESLinted " + params.filename);
process: () => {
const filePath = path.normalize(processor.context.exported + "/" + params.filename);
const result = cli.executeOnFiles([filePath]);
processor.onInfo("ESLinted " + params.filename);
result.results.forEach((subresult) => {
subresult.messages.forEach((message) => {
const messageText = params.filename + ":" + message.line + "," + message.column + " (" + message.ruleId + ") " + message.message;
if (message.fatal || message.severity === 2) {
processor.onError(messageText);
} else {
processor.onWarn(messageText);
}
});
});
result.results.forEach((subresult) => {
subresult.messages.forEach((message) => {
const messageText = params.filename + ":" + message.line + "," + message.column + " (" + message.ruleId + ") " + message.message;
if (message.fatal || message.severity === 2) {
processor.onError(messageText);
} else {
processor.onWarn(messageText);
}
});
});
processor.done();
}
processor.done();
}
});

@ -3,36 +3,36 @@
const glob = require('glob');
module.exports = (params, processor) => ({
process: () => {
if (processor.context.eslintbrowserallDone) {
processor.onWarn("eslintbrowserall task is executed more than once; this is probably a bug in your mbs.json");
}
process: () => {
if (processor.context.eslintbrowserallDone) {
processor.onWarn("eslintbrowserall task is executed more than once; this is probably a bug in your mbs.json");
}
processor.context.eslintbrowserallDone = true;
processor.context.eslintbrowserallDone = true;
const excludeFiles = params.excludeFiles || [];
const excludeFiles = params.excludeFiles || [];
glob("**/*.js", {
dot: true,
cwd: processor.context.exported
}, (err, files) => {
if (err) {
processor.onError(err);
return processor.done();
}
glob("**/*.js", {
dot: true,
cwd: processor.context.exported
}, (err, files) => {
if (err) {
processor.onError(err);
return processor.done();
}
return processor.processTask({
type: params.preventParallelTests ? "sequential" : "parallel",
params: {
tasks: files.filter(file => !excludeFiles.includes(file)).map((file) => ({
name: file,
type: "eslintbrowser",
params: {
filename: file
}
}))
}
}, processor.done.bind(processor));
});
}
return processor.processTask({
type: params.preventParallelTests ? "sequential" : "parallel",
params: {
tasks: files.filter(file => !excludeFiles.includes(file)).map((file) => ({
name: file,
type: "eslintbrowser",
params: {
filename: file
}
}))
}
}, processor.done.bind(processor));
});
}
});

@ -1,5 +1,5 @@
"use strict";
module.exports = (params, processor) => ({
process: () => processor.done()
process: () => processor.done()
});

@ -3,32 +3,32 @@
const sequential = require('./sequential');
module.exports = (params, processor) => sequential({
tasks: [
{
type: "eslintbrowserall",
params: {
excludeFiles: params.eslintExcludeFiles
}
},
{
type: "uglifyjsall"
},
{
type: "cssnanoall"
},
{
type: "writefile",
params: {
filename: "version.txt",
data: processor.context.versionInfo
}
},
{
type: "zip",
params: {
directory: "",
archive: processor.context.reponame + ".zip"
}
}
]
tasks: [
{
type: "eslintbrowserall",
params: {
excludeFiles: params.eslintExcludeFiles
}
},
{
type: "uglifyjsall"
},
{
type: "cssnanoall"
},
{
type: "writefile",
params: {
filename: "version.txt",
data: processor.context.versionInfo
}
},
{
type: "zip",
params: {
directory: "",
archive: processor.context.reponame + ".zip"
}
}
]
}, processor);

@ -3,5 +3,5 @@
const async = require("async");
module.exports = (params, processor) => ({
process: () => async.parallel(params.tasks.map((task) => (callback) => processor.processTask(task, (err) => callback())), processor.done.bind(processor))
process: () => async.parallel(params.tasks.map((task) => (callback) => processor.processTask(task, (err) => callback())), processor.done.bind(processor))
});

@ -3,8 +3,8 @@
const async = require("async");
module.exports = (params, processor) => {
const mapper = Function.bind.bind(processor.processTask, processor);
return {
process: () => async.series(params.tasks.map((element) => mapper(element)), processor.done.bind(processor))
};
const mapper = Function.bind.bind(processor.processTask, processor);
return {
process: () => async.series(params.tasks.map((element) => mapper(element)), processor.done.bind(processor))
};
};

@ -5,18 +5,18 @@ const path = require('path');
const UglifyJS = require("uglify-js");
module.exports = (params, processor) => ({
process: () => {
const filePath = path.normalize(processor.context.exported + "/" + params.filename);
const result = UglifyJS.minify(filePath);
fs.writeFile(filePath, result.code, (err) => {
if (err) {
processor.onError("Unable to write uglified script for " + params.filename + ": " + err);
} else {
processor.onInfo("Saved uglified script for " + params.filename + "; uglified length: " + result.code.length);
}
process: () => {
const filePath = path.normalize(processor.context.exported + "/" + params.filename);
const result = UglifyJS.minify(filePath);
fs.writeFile(filePath, result.code, (err) => {
if (err) {
processor.onError("Unable to write uglified script for " + params.filename + ": " + err);
} else {
processor.onInfo("Saved uglified script for " + params.filename + "; uglified length: " + result.code.length);
}
processor.done();
});
}
processor.done();
});
}
});

@ -3,34 +3,34 @@
const glob = require('glob');
module.exports = (params, processor) => ({
process: () => {
if (processor.context.uglifyjsallDone) {
processor.onWarn("dotnetnunitall task is executed more than once; this is probably a bug in your mbs.json");
}
process: () => {
if (processor.context.uglifyjsallDone) {
processor.onWarn("dotnetnunitall task is executed more than once; this is probably a bug in your mbs.json");
}
processor.context.uglifyjsallDone = true;
processor.context.uglifyjsallDone = true;
glob("**/*.js", {
dot: true,
cwd: processor.context.exported
}, (err, files) => {
if (err) {
processor.onError(err);
return processor.done();
}
glob("**/*.js", {
dot: true,
cwd: processor.context.exported
}, (err, files) => {
if (err) {
processor.onError(err);
return processor.done();
}
return processor.processTask({
type: params.preventParallelTests ? "sequential" : "parallel",
params: {
tasks: files.map((file) => ({
name: file,
type: "uglifyjs",
params: {
filename: file
}
}))
}
}, processor.done.bind(processor));
});
}
return processor.processTask({
type: params.preventParallelTests ? "sequential" : "parallel",
params: {
tasks: files.map((file) => ({
name: file,
type: "uglifyjs",
params: {
filename: file
}
}))
}
}, processor.done.bind(processor));
});
}
});

@ -3,17 +3,17 @@
const fs = require('fs');
module.exports = (params, processor) => ({
process: () => {
const filePath = processor.context.exported + "/" + params.filename;
processor.onInfo("Writing to " + filePath);
process: () => {
const filePath = processor.context.exported + "/" + params.filename;
processor.onInfo("Writing to " + filePath);
fs.writeFile(filePath, params.data, (err) => {
if (err) {
processor.onError("Unable to write file: " + err);
} else {
processor.onInfo("Written file");
}
return processor.done();
});
}
fs.writeFile(filePath, params.data, (err) => {
if (err) {
processor.onError("Unable to write file: " + err);
} else {
processor.onInfo("Written file");
}
return processor.done();
});
}
});

@ -5,20 +5,20 @@ const path = require('path');
const Archiver = require('archiver');
module.exports = (params, processor) => ({
process: () => {
const sourceDirectoryPath = path.normalize(processor.context.exported + "/" + (params.directory || ""));
const targetArchivePath = path.normalize(processor.context.release + "/" + params.archive);
process: () => {
const sourceDirectoryPath = path.normalize(processor.context.exported + "/" + (params.directory || ""));
const targetArchivePath = path.normalize(processor.context.release + "/" + params.archive);
processor.onInfo("Compressing '" + params.directory + "' to " + params.archive);
processor.onInfo("Compressing '" + params.directory + "' to " + params.archive);
const output = fs.createWriteStream(targetArchivePath);
const archive = new Archiver("zip");
const output = fs.createWriteStream(targetArchivePath);
const archive = new Archiver("zip");
output.on("close", () => processor.done());
output.on("close", () => processor.done());
archive.on("error", (err) => processor.onError("Error while compressing: " + err));
archive.pipe(output);
archive.directory(sourceDirectoryPath, false);
archive.finalize();
}
archive.on("error", (err) => processor.onError("Error while compressing: " + err));
archive.pipe(output);
archive.directory(sourceDirectoryPath, false);
archive.finalize();
}
});

@ -1,14 +1,14 @@
"use strict";
module.exports = (req, res) => {
const options = {
owner: req.params.owner,
reponame: req.params.reponame,
branchName: req.params.branch,
branch: "/refs/heads/" + req.params.branch,
rev: req.params.rev,
file: req.params[0]
};
const options = {
owner: req.params.owner,
reponame: req.params.reponame,
branchName: req.params.branch,
branch: "/refs/heads/" + req.params.branch,
rev: req.params.rev,
file: req.params[0]
};
res.sendfile(req.app.get('releasepath') + "/" + options.owner + "/" + options.reponame + "/" + options.branch + "/" + options.rev + "/" + options.file);
res.sendfile(req.app.get('releasepath') + "/" + options.owner + "/" + options.reponame + "/" + options.branch + "/" + options.rev + "/" + options.file);
};

@ -5,16 +5,16 @@ const builder = require('../lib/builder');
exports.get = (req, res) => res.render('manual');
exports.post = (req, res) => {
const options = req.body;
options.url = "https://pos-github.payonline.ru/" + options.owner + "/" + options.reponame;
options.app = req.app;
const options = req.body;
options.url = "https://pos-github.payonline.ru/" + options.owner + "/" + options.reponame;
options.app = req.app;
builder.build(options, (err, result) => {
console.log("Done processing manual request");
console.log("Error: " + err);
//console.log("Result:");
//console.log(result);
res.render('manual-done', {err: err, result: result});
//res.render("manual-done", { err: err, result: result });
});
builder.build(options, (err, result) => {
console.log("Done processing manual request");
console.log("Error: " + err);
//console.log("Result:");
//console.log(result);
res.render('manual-done', {err: err, result: result});
//res.render("manual-done", { err: err, result: result });
});
};

@ -4,109 +4,109 @@ const builder = require('../lib/builder');
const commenter = require('../lib/commenter');
const processPush = (req, res, payload) => {
const repository = payload.repository;
const options = {
app: req.app,
url: repository.url,
owner: repository.owner.name,
reponame: repository.name,
rev: payload.after,
branch: payload.ref
};
console.log("Got push event for " + options.owner + "/" + options.reponame + ":" + options.branch);
builder.build(options, (err, result) => {
console.log("Done processing request from GitHub");
console.log("Error: " + err);
//console.log("Result:");
//console.log(result);
res.send("Done processing request from GitHub\r\n" + "Error: " + err + "\r\n" + "Result: " + result);
});
const repository = payload.repository;
const options = {
app: req.app,
url: repository.url,
owner: repository.owner.name,
reponame: repository.name,
rev: payload.after,
branch: payload.ref
};
console.log("Got push event for " + options.owner + "/" + options.reponame + ":" + options.branch);
builder.build(options, (err, result) => {
console.log("Done processing request from GitHub");
console.log("Error: " + err);
//console.log("Result:");
//console.log(result);
res.send("Done processing request from GitHub\r\n" + "Error: " + err + "\r\n" + "Result: " + result);
});
};
const processPullRequest = (req, res, payload) => {
const action = payload.action;
const number = payload.number;
const pullRequest = payload.pull_request;
const head = pullRequest.head;
const headRepo = head.repo;
const headRepoOptions = {
url: headRepo.url,
owner: headRepo.owner.name || headRepo.owner.login,
reponame: headRepo.name,
rev: head.sha,
branchname: head.ref,
branch: "refs/heads/" + head.ref
};
const base = pullRequest.base;
const baseRepo = base.repo;
const baseRepoOptions = {
owner: baseRepo.owner.name || baseRepo.owner.login,
reponame: baseRepo.name,
branchname: base.ref
};
const options = {
app: req.app,
action: action,
number: number,
headRepoOptions: headRepoOptions,
baseRepoOptions: baseRepoOptions
};
const masterOptions = {
app: req.app,
action: action,
number: number,
headRepoOptions: baseRepoOptions,
baseRepoOptions: baseRepoOptions
};
console.log("Got pull request " + action + " event, from " + headRepoOptions.owner + "/" + headRepoOptions.reponame + ":" + headRepoOptions.branchname + " (" + headRepoOptions.rev + ") to " + baseRepoOptions.owner + "/" + baseRepoOptions.reponame + ":" + baseRepoOptions.branchname);
if (action !== "opened" && action !== "reopened" && action !== "synchronize" && action !== "closed") {
//console.log("Got '" + action + "' event:");
//console.log(req.body);
return res.send("Only opened/reopened/synchronize/closed actions are supported");
}
if (action === "closed" && !pullRequest.merged) {
console.log("Pull request closed without merging");
return res.send("Pull request closed without merging");
}
if (action === "closed") {
return res.send("");
}
commenter.commentOnPullRequest(
action === "closed" ? masterOptions : options,
(err, data) => {
if (err) {
console.log("Unable to post comment: " + err);
}
res.send(err || data);
}
);
const action = payload.action;
const number = payload.number;
const pullRequest = payload.pull_request;
const head = pullRequest.head;
const headRepo = head.repo;
const headRepoOptions = {
url: headRepo.url,
owner: headRepo.owner.name || headRepo.owner.login,
reponame: headRepo.name,
rev: head.sha,
branchname: head.ref,
branch: "refs/heads/" + head.ref
};
const base = pullRequest.base;
const baseRepo = base.repo;
const baseRepoOptions = {
owner: baseRepo.owner.name || baseRepo.owner.login,
reponame: baseRepo.name,
branchname: base.ref
};
const options = {
app: req.app,
action: action,
number: number,
headRepoOptions: headRepoOptions,
baseRepoOptions: baseRepoOptions
};
const masterOptions = {
app: req.app,
action: action,
number: number,
headRepoOptions: baseRepoOptions,
baseRepoOptions: baseRepoOptions
};
console.log("Got pull request " + action + " event, from " + headRepoOptions.owner + "/" + headRepoOptions.reponame + ":" + headRepoOptions.branchname + " (" + headRepoOptions.rev + ") to " + baseRepoOptions.owner + "/" + baseRepoOptions.reponame + ":" + baseRepoOptions.branchname);
if (action !== "opened" && action !== "reopened" && action !== "synchronize" && action !== "closed") {
//console.log("Got '" + action + "' event:");
//console.log(req.body);
return res.send("Only opened/reopened/synchronize/closed actions are supported");
}
if (action === "closed" && !pullRequest.merged) {
console.log("Pull request closed without merging");
return res.send("Pull request closed without merging");
}
if (action === "closed") {
return res.send("");
}
commenter.commentOnPullRequest(
action === "closed" ? masterOptions : options,
(err, data) => {
if (err) {
console.log("Unable to post comment: " + err);
}
res.send(err || data);
}
);
};
module.exports = (req, res) => {
if (!req.body || (!req.body.payload && !req.body.repository)) {
return res.end();
}
if (!req.body || (!req.body.payload && !req.body.repository)) {
return res.end();
}
const eventType = req.header("x-github-event");
const payload = req.body.payload ? JSON.parse(req.body.payload || "{}") : req.body;
const eventType = req.header("x-github-event");
const payload = req.body.payload ? JSON.parse(req.body.payload || "{}") : req.body;
if (eventType === "push") {
return processPush(req, res, payload);
}
if (eventType === "push") {
return processPush(req, res, payload);
}
if (eventType === "pull_request") {
return processPullRequest(req, res, payload);
}
if (eventType === "pull_request") {
return processPullRequest(req, res, payload);
}
console.log("Got '" + eventType + "' event:");
//console.log(req.body);
return res.send("Only push/pull_request events are supported");
console.log("Got '" + eventType + "' event:");
//console.log(req.body);
return res.send("Only push/pull_request events are supported");
};

@ -5,64 +5,64 @@ const fs = require('fs');
const Archiver = require('archiver');
const getReport = (releasePath, callback) => {
const reportFile = releasePath + "report.json";
const reportFile = releasePath + "report.json";
fs.exists(reportFile, (exists) => {
if (!exists) {
return callback("ReportFileNotFound: " + reportFile);
}
fs.exists(reportFile, (exists) => {
if (!exists) {
return callback("ReportFileNotFound: " + reportFile);
}
return fs.readFile(reportFile, (err, dataBuffer) => {
if (err) {
return callback(err, reportFile);
}
const data = dataBuffer.toString();
if (!data) {
return callback("ReportFileNotFound", reportFile);
}
const report = JSON.parse(data);
return callback(null, report);
});
});
return fs.readFile(reportFile, (err, dataBuffer) => {
if (err) {
return callback(err, reportFile);
}
const data = dataBuffer.toString();
if (!data) {
return callback("ReportFileNotFound", reportFile);
}
const report = JSON.parse(data);
return callback(null, report);
});
});
};
const getDatePart = (report) => {
if (!report.date) {
return "unknowndate";
}
if (!report.date) {
return "unknowndate";
}
const date = new Date(report.date);
const paddingLeft = (str, paddingValue) => String(paddingValue + str).slice(-paddingValue.length);
const date = new Date(report.date);
const paddingLeft = (str, paddingValue) => String(paddingValue + str).slice(-paddingValue.length);
return date.getFullYear() + "." +
paddingLeft(date.getMonth() + 1, "00") + "." +
paddingLeft(date.getDate(), "00") + "." +
paddingLeft(date.getHours(), "00") + "." +
paddingLeft(date.getMinutes(), "00") + "." +
paddingLeft(date.getSeconds(), "00");
return date.getFullYear() + "." +
paddingLeft(date.getMonth() + 1, "00") + "." +
paddingLeft(date.getDate(), "00") + "." +
paddingLeft(date.getHours(), "00") + "." +
paddingLeft(date.getMinutes(), "00") + "." +
paddingLeft(date.getSeconds(), "00");
};
module.exports = (req, res, next) => {
const options = {
owner: req.params.owner,
reponame: req.params.reponame,
branchName: req.params.branch,
branch: "/refs/heads/" + req.params.branch,
rev: req.params.rev
};
const options = {
owner: req.params.owner,
reponame: req.params.reponame,
branchName: req.params.branch,
branch: "/refs/heads/" + req.params.branch,
rev: req.params.rev
};
const releasePath = path.normalize(req.app.get('releasepath') + "/" + options.owner + "/" + options.reponame + "/" + options.branch + "/" + options.rev + "/");
const releasePath = path.normalize(req.app.get('releasepath') + "/" + options.owner + "/" + options.reponame + "/" + options.branch + "/" + options.rev + "/");
getReport(releasePath, (err, report) => {
if (err) {
return next(err);
}
getReport(releasePath, (err, report) => {
if (err) {
return next(err);
}
const archive = new Archiver("zip");
archive.on("error", next);
res.attachment(options.reponame + '.' + getDatePart(report) + '.' + options.rev + '.zip', '.');
archive.pipe(res);
archive.directory(releasePath, false);
archive.finalize();
});
const archive = new Archiver("zip");
archive.on("error", next);
res.attachment(options.reponame + '.' + getDatePart(report) + '.' + options.rev + '.zip', '.');
archive.pipe(res);
archive.directory(releasePath, false);
archive.finalize();
});
};

@ -4,80 +4,80 @@ const url = require('url');
const statusProcessor = require('../lib/status-processor');
const parseOptionsFromReferer = (path, callback) => {
const pathParts = path.split("/").filter((value) => value);
const result = {};
const pathParts = path.split("/").filter((value) => value);
const result = {};
if (pathParts.length < 2) {
return callback("BadRequest", result);
}
if (pathParts.length < 2) {
return callback("BadRequest", result);
}
if (pathParts[2] === "tree") {
pathParts.splice(2, 1);
}
if (pathParts[2] === "tree") {
pathParts.splice(2, 1);
}
result.owner = pathParts[0];
result.reponame = pathParts[1];
result.branchName = pathParts[2];
result.rev = pathParts[3];
return callback(null, result);
result.owner = pathParts[0];
result.reponame = pathParts[1];
result.branchName = pathParts[2];
result.rev = pathParts[3];
return callback(null, result);
};
const createShowReport = (res) => (err, options) => {
options = options || {};
options.err = err;
res.render('status', options);
options = options || {};
options.err = err;
res.render('status', options);
};
exports.image = (req, res) => {
const handle = (err, options) => {
if (err === "ReportFileNotFound") {
options.status = "Building";
} else if (err) {
options.status = "StatusError";
options.message = err;
} else if (options.report.result === "MBSNotFound") {
options.status = "MBSNotUsed";
} else if (options.report.err) {
options.status = "Error";
options.message = options.report.err;
} else if ((options.report.result.warns.$allMessages || []).length > 0) {
options.status = "Warning";
options.message = options.report.result.warns.$allMessages[0].message;
} else {
options.status = "OK";
if ((options.report.result.infos.$allMessages || []).length > 0) {
options.message = options.report.result.infos.$allMessages[options.report.result.infos.$allMessages.length-1].message;
}
}
res.setHeader('Content-Type', 'image/svg+xml');
res.render('status-image', options);
};
const handle = (err, options) => {
if (err === "ReportFileNotFound") {
options.status = "Building";
} else if (err) {
options.status = "StatusError";
options.message = err;
} else if (options.report.result === "MBSNotFound") {
options.status = "MBSNotUsed";
} else if (options.report.err) {
options.status = "Error";
options.message = options.report.err;
} else if ((options.report.result.warns.$allMessages || []).length > 0) {
options.status = "Warning";
options.message = options.report.result.warns.$allMessages[0].message;
} else {
options.status = "OK";
if ((options.report.result.infos.$allMessages || []).length > 0) {
options.message = options.report.result.infos.$allMessages[options.report.result.infos.$allMessages.length-1].message;
}
}
res.setHeader('Content-Type', 'image/svg+xml');
res.render('status-image', options);
};
parseOptionsFromReferer(url.parse(req.headers.referer || "").pathname || "", (err, options) => {
if (err) {
return handle(err, options);
}
parseOptionsFromReferer(url.parse(req.headers.referer || "").pathname || "", (err, options) => {
if (err) {
return handle(err, options);
}
statusProcessor.getReport(req.app, options, (err, options) => handle(err, options));
});
statusProcessor.getReport(req.app, options, (err, options) => handle(err, options));
});
};
exports.page = (req, res) => {
const options = {
owner: req.params.owner,
reponame: req.params.reponame,
branchName: req.params.branch,
branch: "/refs/heads/" + req.params.branch,
rev: req.params.rev
};
const options = {
owner: req.params.owner,
reponame: req.params.reponame,
branchName: req.params.branch,
branch: "/refs/heads/" + req.params.branch,
rev: req.params.rev
};
statusProcessor.getReport(req.app, options, createShowReport(res));
statusProcessor.getReport(req.app, options, createShowReport(res));
};
exports.pageFromGithub = (req, res) => parseOptionsFromReferer(req.params[0], (err, options) => {
if (err) {
return createShowReport(err, options);
}
if (err) {
return createShowReport(err, options);
}
return statusProcessor.getReport(req.app, options, createShowReport(res));
return statusProcessor.getReport(req.app, options, createShowReport(res));
});

Loading…
Cancel
Save