Merge branch 'migration-to-typescript'

dependabot/npm_and_yarn/BuildServer/eslint-7.2.0
Inga 🏳‍🌈 7 years ago
commit 17d326866e
  1. 1
      BuildServer/.gitignore
  2. 55
      BuildServer/app.js
  3. 55
      BuildServer/app.ts
  4. 36
      BuildServer/compress-old-reports.js
  5. 64
      BuildServer/global.d.ts
  6. 128
      BuildServer/lib/builder.ts
  7. 46
      BuildServer/lib/commenter.ts
  8. 58
      BuildServer/lib/git/copy.ts
  9. 33
      BuildServer/lib/git/loader.ts
  10. 3
      BuildServer/lib/mail-sender.js
  11. 3
      BuildServer/lib/mail-sender.ts
  12. 99
      BuildServer/lib/report-processor.ts
  13. 39
      BuildServer/lib/status-processor.ts
  14. 96
      BuildServer/lib/task-processor.js
  15. 94
      BuildServer/lib/task-processor.ts
  16. 31
      BuildServer/lib/tasks/cleanupafterdotnetbuild.js
  17. 29
      BuildServer/lib/tasks/cleanupafterdotnetbuild.ts
  18. 6
      BuildServer/lib/tasks/conditional.ts
  19. 23
      BuildServer/lib/tasks/copy.js
  20. 21
      BuildServer/lib/tasks/copy.ts
  21. 31
      BuildServer/lib/tasks/copyglob.js
  22. 29
      BuildServer/lib/tasks/copyglob.ts
  23. 37
      BuildServer/lib/tasks/cssnano.js
  24. 34
      BuildServer/lib/tasks/cssnano.ts
  25. 36
      BuildServer/lib/tasks/cssnanoall.js
  26. 34
      BuildServer/lib/tasks/cssnanoall.ts
  27. 22
      BuildServer/lib/tasks/deletefromcode.js
  28. 20
      BuildServer/lib/tasks/deletefromcode.ts
  29. 17
      BuildServer/lib/tasks/dotnetbuild.js
  30. 17
      BuildServer/lib/tasks/dotnetbuild.ts
  31. 22
      BuildServer/lib/tasks/dotnetbuildandtest.js
  32. 22
      BuildServer/lib/tasks/dotnetbuildandtest.ts
  33. 82
      BuildServer/lib/tasks/dotnetbuilderwrapper.js
  34. 89
      BuildServer/lib/tasks/dotnetbuilderwrapper.ts
  35. 42
      BuildServer/lib/tasks/dotnetbuildwithoutcleanup.js
  36. 42
      BuildServer/lib/tasks/dotnetbuildwithoutcleanup.ts
  37. 76
      BuildServer/lib/tasks/dotnetcheckstyle.js
  38. 74
      BuildServer/lib/tasks/dotnetcheckstyle.ts
  39. 26
      BuildServer/lib/tasks/dotnetcompile.ts
  40. 11
      BuildServer/lib/tasks/dotnetnugetpack.js
  41. 11
      BuildServer/lib/tasks/dotnetnugetpack.ts
  42. 30
      BuildServer/lib/tasks/dotnetnugetprocess.js
  43. 30
      BuildServer/lib/tasks/dotnetnugetprocess.ts
  44. 28
      BuildServer/lib/tasks/dotnetnugetprocessinternal.ts
  45. 11
      BuildServer/lib/tasks/dotnetnugetpush.js
  46. 11
      BuildServer/lib/tasks/dotnetnugetpush.ts
  47. 12
      BuildServer/lib/tasks/dotnetnugetpushonly.js
  48. 13
      BuildServer/lib/tasks/dotnetnugetpushonly.ts
  49. 17
      BuildServer/lib/tasks/dotnetnugetrestore.js
  50. 17
      BuildServer/lib/tasks/dotnetnugetrestore.ts
  51. 9
      BuildServer/lib/tasks/dotnetnunit.js
  52. 9
      BuildServer/lib/tasks/dotnetnunit.ts
  53. 42
      BuildServer/lib/tasks/dotnetnunitall.js
  54. 40
      BuildServer/lib/tasks/dotnetnunitall.ts
  55. 51
      BuildServer/lib/tasks/dotnetpackwebapp.js
  56. 47
      BuildServer/lib/tasks/dotnetpackwebapp.ts
  57. 71
      BuildServer/lib/tasks/dotnetrewrite.js
  58. 69
      BuildServer/lib/tasks/dotnetrewrite.ts
  59. 19
      BuildServer/lib/tasks/echo.js
  60. 17
      BuildServer/lib/tasks/echo.ts
  61. 32
      BuildServer/lib/tasks/eslintbrowser.js
  62. 30
      BuildServer/lib/tasks/eslintbrowser.ts
  63. 38
      BuildServer/lib/tasks/eslintbrowserall.js
  64. 36
      BuildServer/lib/tasks/eslintbrowserall.ts
  65. 13
      BuildServer/lib/tasks/index.js
  66. 16
      BuildServer/lib/tasks/index.ts
  67. 3
      BuildServer/lib/tasks/noop.js
  68. 3
      BuildServer/lib/tasks/noop.ts
  69. 28
      BuildServer/lib/tasks/packform.js
  70. 28
      BuildServer/lib/tasks/packform.ts
  71. 7
      BuildServer/lib/tasks/parallel.js
  72. 7
      BuildServer/lib/tasks/parallel.ts
  73. 7
      BuildServer/lib/tasks/sequential.js
  74. 7
      BuildServer/lib/tasks/sequential.ts
  75. 23
      BuildServer/lib/tasks/uglifyjs.js
  76. 20
      BuildServer/lib/tasks/uglifyjs.ts
  77. 37
      BuildServer/lib/tasks/uglifyjsall.js
  78. 35
      BuildServer/lib/tasks/uglifyjsall.ts
  79. 22
      BuildServer/lib/tasks/writefile.js
  80. 20
      BuildServer/lib/tasks/writefile.ts
  81. 24
      BuildServer/lib/tasks/zip.js
  82. 22
      BuildServer/lib/tasks/zip.ts
  83. 128
      BuildServer/package.json
  84. 16
      BuildServer/routes/artifact.js
  85. 16
      BuildServer/routes/artifact.ts
  86. 9
      BuildServer/routes/index.js
  87. 11
      BuildServer/routes/index.ts
  88. 22
      BuildServer/routes/manual.js
  89. 22
      BuildServer/routes/manual.ts
  90. 76
      BuildServer/routes/postreceive.ts
  91. 24
      BuildServer/routes/release.ts
  92. 59
      BuildServer/routes/status.ts
  93. 10
      BuildServer/settings.ts.example
  94. 19
      BuildServer/tsconfig.json
  95. 10
      BuildServer/tslint.json
  96. 5
      BuildServer/typings.json

@ -17,5 +17,6 @@ node_modules
data
*.crt
settings.js
settings.ts
iisnode

@ -1,55 +0,0 @@
"use strict";
const realFs = require("fs");
const fs = require("graceful-fs");
fs.gracefulify(realFs);
const express = require("express");
const routes = require("./routes");
const http = require("http");
const path = require("path");
const serveFavicon = require("serve-favicon");
const morgan = require("morgan");
const bodyParser = require("body-parser");
const methodOverride = require("method-override");
const serveStatic = require("serve-static");
const errorhandler = require("errorhandler");
const settings = require("./settings");
const app = express();
app.set("port", process.env.PORT || settings.port); // eslint-disable-line no-process-env
app.set("views", path.join(__dirname, "views"));
app.set("view engine", "jade");
app.set("gitpath", settings.gitpath);
app.set("tmpcodepath", settings.tmpcodepath);
app.set("releasepath", settings.releasepath);
app.use(serveFavicon(path.join(__dirname, "public/images/favicon.png")));
app.use(morgan("dev"));
app.use(bodyParser.json({ "limit": "10mb" }));
app.use(bodyParser.urlencoded({ "extended": false }));
app.use(methodOverride());
app.use(serveStatic(path.join(__dirname, "public")));
if (app.get("env") === "development") {
app.use(errorhandler());
}
app.route("/").get(routes.index);
app.route("/github/postreceive")
.post(routes.postreceive)
.get((req, res) => res.send("Only automated POST requests are allowed for postreceive route"));
app.route("/manual")
.get(routes.manual.get)
.post(routes.manual.post);
app.route("/status/:owner/:reponame/:branch/:rev?").get(routes.status.page);
app.route("/pos-github.payonline.ru/*").get(routes.status.pageFromGithub);
app.route("/status.svg").get(routes.status.image);
app.route("/release/:owner/:reponame/:branch/:rev").get(routes.release);
app.route("/artifact/:owner/:reponame/:branch/:rev/*").get(routes.artifact);
http.createServer(app).listen(app.get("port"), () => console.log(`Express server listening on port ${app.get("port")}`));

@ -0,0 +1,55 @@
"use strict";
import * as fs from "fs";
import { gracefulify } from "graceful-fs";
gracefulify(fs);
import { json as bodyJson, urlencoded as bodyUrlencoded } from "body-parser";
import * as errorhandler from "errorhandler";
import * as express from "express";
import { createServer } from "http";
import * as methodOverride from "method-override";
import * as morgan from "morgan";
import { join } from "path";
import * as serveFavicon from "serve-favicon";
import * as serveStatic from "serve-static";
import * as routes from "./routes";
import settings from "./settings";
const app = express();
app.set("port", settings.port);
app.set("views", join(__dirname, "views"));
app.set("view engine", "jade");
app.set("gitpath", settings.gitpath);
app.set("tmpcodepath", settings.tmpcodepath);
app.set("releasepath", settings.releasepath);
app.use(serveFavicon(join(__dirname, "public/images/favicon.png")));
app.use(morgan("dev"));
app.use(bodyJson({ limit: "10mb" }));
app.use(bodyUrlencoded({ extended: false }));
app.use(methodOverride());
app.use(serveStatic(join(__dirname, "public")));
if (app.get("env") === "development") {
app.use(errorhandler());
}
app.route("/").get(routes.index);
app.route("/github/postreceive")
.post(routes.postreceive)
.get((req, res) => res.send("Only automated POST requests are allowed for postreceive route"));
app.route("/manual")
.get(routes.manual.get)
.post(routes.manual.post);
app.route("/status/:owner/:reponame/:branch/:rev?").get(routes.status.page);
app.route("/pos-github.payonline.ru/*").get(routes.status.pageFromGithub);
app.route("/status.svg").get(routes.status.image);
app.route("/release/:owner/:reponame/:branch/:rev").get(routes.release);
app.route("/artifact/:owner/:reponame/:branch/:rev/*").get(routes.artifact);
createServer(app).listen(app.get("port"), () => console.log(`Express server listening on port ${app.get("port")}`));

@ -1,36 +0,0 @@
"use strict";
const fs = require("fs");
const path = require("path");
const zlib = require("zlib");
const glob = require("glob");
const async = require("async");
const settings = require("./settings");
const streamsNumber = 100;
glob("**\\report.json", { "cwd": settings.releasepath }, (globErr, files) => {
if (globErr) {
return console.log(globErr);
}
return async.parallelLimit(files.map((file) => (callback) => {
const originalPath = path.join(settings.releasepath, file);
const newPath = `${originalPath}.gz`;
console.log(file);
fs.createReadStream(originalPath)
.pipe(zlib.createGzip())
.pipe(fs.createWriteStream(newPath))
.on("error", callback)
.on("finish", () => {
fs.unlink(originalPath, callback);
});
}), streamsNumber, (err) => {
if (err) {
console.log(err);
}
console.log("Done");
});
});

@ -0,0 +1,64 @@
interface Message {
readonly message: string;
readonly prefix: string;
}
interface PartialMessagesLeaf {
readonly $messages?: string[];
}
interface PartialMessagesRecursive {
readonly [propName: string]: Messages | string[] | Message[]; // workaround for compatibility with PartialMessagesLeaf and PartialMessagesRoot
}
interface PartialMessagesRoot {
readonly $allMessages: Message[];
}
type Messages = PartialMessagesLeaf & PartialMessagesRecursive;
type MessagesRoot = PartialMessagesLeaf & PartialMessagesRecursive & PartialMessagesRoot;
interface ReportResult {
readonly errors: MessagesRoot;
readonly warns: MessagesRoot;
readonly infos: MessagesRoot;
readonly messages: MessagesRoot;
}
interface Report {
readonly date: number;
readonly err?: string;
readonly result?: ReportResult;
}
interface TaskProcessorCallback {
(err: string): void;
}
interface TaskProcessorCore {
readonly onError: (message: string | Error, prefix?: string) => void;
readonly onWarn: (message: string, prefix?: string) => void;
readonly onInfo: (message: string, prefix?: string) => void;
readonly context?: any;
}
interface TaskProcessor extends TaskProcessorCore {
readonly process: () => void;
readonly processTask: (task: TaskInfo, innerCallback: TaskProcessorCallback) => void;
readonly done: () => void;
}
interface TaskInfo {
name?: string;
type: string;
params: any;
}
interface Task {
(params: any, processor: TaskProcessor): () => void;
}
interface Tasks {
readonly [taskName: string]: Task;
}

@ -1,14 +1,16 @@
"use strict";
const path = require("path");
const fs = require("fs");
const fse = require("fs-extra");
const async = require("async");
const gitLoader = require("./git/loader");
const processor = require("./task-processor");
const reportProcessor = require("./report-processor");
const mailSender = require("./mail-sender");
const settings = require("../settings");
import { parallel, queue } from "async";
import { exists, readFile, writeFileSync } from "fs";
import { mkdirsSync, remove } from "fs-extra";
import * as JSONParse from "json-parse-safe";
import { join } from "path";
import settings from "../settings";
import { gitLoader } from "./git/loader";
import { send as sendMail } from "./mail-sender";
import { writeReport } from "./report-processor";
import { processTask } from "./task-processor";
const codePostfix = "";
const mailLazinessLevel = 1000;
@ -34,12 +36,12 @@ const createBuildDoneMessage = (isSuccess, name) => {
const notifyStatus = (options, notifyStatusCallback) => {
const status = {
"description": String(options.description || "").substr(0, maxDescriptionLength),
"owner": options.owner,
"repo": options.reponame,
"sha": options.hash,
"state": options.state,
"target_url": `${settings.siteRoot}status/${options.owner}/${options.reponame}/${options.hash}`
description: String(options.description || "").substr(0, maxDescriptionLength),
owner: options.owner,
repo: options.reponame,
sha: options.hash,
state: options.state,
target_url: `${settings.siteRoot}status/${options.owner}/${options.reponame}/${options.hash}`,
};
settings.createGithub(options.owner).repos.createStatus(status, (createStatusErr) => {
@ -62,26 +64,18 @@ const wrapGitLoader = (skipGitLoader) => {
return (gitLoaderOptions, gitLoaderCallback) => process.nextTick(gitLoaderCallback);
};
const safeParseJson = (data) => {
try {
return { "parsed": JSON.parse(data) };
} catch (err) {
return { err };
}
};
const build = (options, buildCallback) => {
export const build = (options, buildCallback) => {
const url = options.url;
const owner = options.owner;
const reponame = options.reponame;
const rev = options.rev;
const branch = options.branch;
const skipGitLoader = options.skipGitLoader;
const local = path.join(options.app.get("gitpath"), "r");
const tmp = path.join(options.app.get("tmpcodepath"), rev.substr(0, maxTmpcodepathLength));
const local = join(options.app.get("gitpath"), "r");
const tmp = join(options.app.get("tmpcodepath"), rev.substr(0, maxTmpcodepathLength));
const exported = tmp + codePostfix;
const release = path.join(options.app.get("releasepath"), owner, reponame, branch, rev);
const statusQueue = async.queue((task, queueCallback) => task(queueCallback), 1);
const release = join(options.app.get("releasepath"), owner, reponame, branch, rev);
const statusQueue = queue((task: (callback: any) => void, queueCallback) => task(queueCallback), 1);
const actualGitLoader = wrapGitLoader(skipGitLoader);
const date = new Date();
const versionMajor = date.getFullYear();
@ -92,18 +86,18 @@ const build = (options, buildCallback) => {
const versionInfo = `${version}; built from ${rev}; repository: ${owner}/${reponame}; branch: ${branch}`;
statusQueue.push((queueCallback) => notifyStatus({
"description": "Preparing to build...",
"hash": rev,
description: "Preparing to build...",
hash: rev,
owner,
reponame,
"state": "pending"
state: "pending",
}, queueCallback));
fse.mkdirsSync(release);
mkdirsSync(release);
fs.writeFileSync(path.join(options.app.get("releasepath"), owner, reponame, branch, "latest.id"), rev);
fse.mkdirsSync(path.join(options.app.get("releasepath"), owner, reponame, "$revs"));
fs.writeFileSync(path.join(options.app.get("releasepath"), owner, reponame, "$revs", `${rev}.branch`), branch);
writeFileSync(join(options.app.get("releasepath"), owner, reponame, branch, "latest.id"), rev);
mkdirsSync(join(options.app.get("releasepath"), owner, reponame, "$revs"));
writeFileSync(join(options.app.get("releasepath"), owner, reponame, "$revs", `${rev}.branch`), branch);
const createErrorMessageForMail = (doneErr) => {
if (!doneErr) {
@ -121,37 +115,37 @@ const build = (options, buildCallback) => {
return result.messages.$allMessages.map((msg) => `${msg.prefix}\t${msg.message}`).join("\r\n");
};
const done = (doneErr, result) => {
const allErrors = ((result || {}).errors || {}).$allMessages || [];
const allWarns = ((result || {}).warns || {}).$allMessages || [];
const allInfos = ((result || {}).infos || {}).$allMessages || [];
const errorMessage = (allErrors[0] || {}).message || doneErr;
const warnMessage = (allWarns[0] || {}).message;
const infoMessage = (allInfos[allInfos.length - 1] || {}).message;
const done = (doneErr, result?: ReportResult) => {
const allErrors = (result && result.errors && result.errors.$allMessages) || [];
const allWarns = (result && result.warns && result.errors.$allMessages) || [];
const allInfos = (result && result.infos && result.errors.$allMessages) || [];
const errorMessage = (allErrors[0] && allErrors[0].message) || doneErr;
const warnMessage = allWarns[0] && allWarns[0].message;
const infoMessage = allInfos[allInfos.length - 1] && allInfos[allInfos.length - 1].message;
reportProcessor.writeReport(release, doneErr, result, (writeErr) => {
statusQueue.push((queueCallback) => async.parallel([
writeReport(release, doneErr, result, (writeErr) => {
statusQueue.push((queueCallback) => parallel([
(parallelCallback) => notifyStatus({
"description": errorMessage || warnMessage || infoMessage || "Success",
"hash": rev,
description: errorMessage || warnMessage || infoMessage || "Success",
hash: rev,
owner,
reponame,
"state": createFinalState(!doneErr)
state: createFinalState(!doneErr),
}, parallelCallback),
(parallelCallback) => mailSender.send({
"from": settings.smtp.sender,
"headers": { "X-Laziness-level": mailLazinessLevel },
"subject": createBuildDoneMessage(doneErr, `${owner}/${reponame}/${branch}`),
"text": `Build status URL: ${settings.siteRoot}status/${owner}/${reponame}/${rev}\r\n\r\n${createErrorMessageForMail(doneErr)}${createResultMessageForMail(result)}`,
"to": settings.smtp.receiver
(parallelCallback) => sendMail({
from: settings.smtp.sender,
headers: { "X-Laziness-level": mailLazinessLevel },
subject: createBuildDoneMessage(doneErr, `${owner}/${reponame}/${branch}`),
text: `Build status URL: ${settings.siteRoot}status/${owner}/${reponame}/${rev}\r\n\r\n${createErrorMessageForMail(doneErr)}${createResultMessageForMail(result)}`,
to: settings.smtp.receiver,
}, parallelCallback),
(parallelCallback) => {
if (doneErr) {
return process.nextTick(parallelCallback);
}
return fse.remove(tmp, parallelCallback);
}
return remove(tmp, parallelCallback);
},
], queueCallback));
if (writeErr) {
@ -165,9 +159,9 @@ const build = (options, buildCallback) => {
actualGitLoader({
branch,
exported,
"hash": rev,
hash: rev,
local,
"remote": `${url}.git`
remote: `${url}.git`,
}, (gitLoaderErr) => {
if (gitLoaderErr) {
console.log(gitLoaderErr);
@ -177,25 +171,25 @@ const build = (options, buildCallback) => {
console.log("Done loading from git");
return fs.exists(path.join(exported, "mbs.json"), (exists) => {
return exists(join(exported, "mbs.json"), (exists) => {
if (!exists) {
return done(null, "MBSNotFound");
return done("MBSNotFound");
}
return fs.readFile(path.join(exported, "mbs.json"), (readErr, data) => {
return readFile(join(exported, "mbs.json"), (readErr, data) => {
if (readErr) {
return done(readErr, "MBSUnableToRead");
return done(`MBSUnableToRead: ${readErr}`);
}
const { parsed, err } = safeParseJson(data);
const { value, error } = JSONParse(data);
if (err) {
if (error) {
console.log(`Malformed data: ${data}`);
return done(err, "MBSMalformed");
return done("MBSMalformed");
}
return processor.processTask(parsed, {
return processTask(value, {
branch,
exported,
owner,
@ -203,7 +197,7 @@ const build = (options, buildCallback) => {
reponame,
rev,
tmp,
versionInfo
versionInfo,
}, (processErr, result) => {
if (processErr) {
return done(processErr, result);
@ -215,5 +209,3 @@ const build = (options, buildCallback) => {
});
});
};
exports.build = build;

@ -1,8 +1,9 @@
"use strict";
const _ = require("underscore");
const reportProcessor = require("./report-processor");
const settings = require("../settings");
import * as _ from "underscore";
import settings from "../settings";
import { getStatusMessageFromRelease } from "./report-processor";
const featureNamePattern = /^feature-(\d+)(?:-[a-zA-Z0-9]+)+$/;
const versionNamePattern = /^v\d+(\.\d+)*$/;
@ -12,10 +13,10 @@ const httpNotFound = 404;
const maxCommentLength = 64000;
const writeComment = (options, message, callback) => options.github.issues.createComment({
"body": message,
"number": options.number,
"owner": options.baseRepoOptions.owner,
"repo": options.baseRepoOptions.reponame
body: message,
number: options.pullRequestNumber,
owner: options.baseRepoOptions.owner,
repo: options.baseRepoOptions.reponame,
}, callback);
const closePullRequest = (options, message, callback) => writeComment(options, message, (err) => {
@ -24,17 +25,17 @@ const closePullRequest = (options, message, callback) => writeComment(options, m
}
return options.github.issues.edit({
"number": options.number,
"owner": options.baseRepoOptions.owner,
"repo": options.baseRepoOptions.reponame,
"state": "closed"
number: options.pullRequestNumber,
owner: options.baseRepoOptions.owner,
repo: options.baseRepoOptions.reponame,
state: "closed",
}, callback);
});
const checkHasIssue = (options, issueNumber, callback) => options.github.issues.get({
"number": issueNumber,
"owner": options.baseRepoOptions.owner,
"repo": options.baseRepoOptions.reponame
number: issueNumber,
owner: options.baseRepoOptions.owner,
repo: options.baseRepoOptions.reponame,
}, (getIssueErr, result) => {
if (getIssueErr && getIssueErr.code !== httpNotFound) {
return callback(getIssueErr);
@ -52,9 +53,9 @@ const checkHasIssue = (options, issueNumber, callback) => options.github.issues.
});
const checkHasReleases = (options, callback) => options.github.repos.getReleases({
"owner": options.baseRepoOptions.owner,
"per_page": 1,
"repo": options.baseRepoOptions.reponame
owner: options.baseRepoOptions.owner,
per_page: 1,
repo: options.baseRepoOptions.reponame,
}, (getReleasesErr, result) => {
if (getReleasesErr) {
return callback(getReleasesErr);
@ -101,7 +102,8 @@ const checkPullRequest = (options, callback) => {
return closePullRequest(options, `Only merging to master or version branch is allowed; merging to '${base.branchname}' is not supported`, callback);
}
const issueNumber = featureNamePattern.exec(head.branchname)[1];
const execResult = featureNamePattern.exec(head.branchname);
const issueNumber = execResult && execResult[1];
return checkHasIssue(options, issueNumber, (hasIssueErr, hasIssue, issueTitle) => {
if (hasIssueErr) {
@ -136,11 +138,11 @@ const checkPullRequest = (options, callback) => {
});
};
exports.commentOnPullRequest = (originalOptions, callback) => {
const optionsGithub = _.extend(originalOptions, { "github": settings.createGithub(originalOptions.baseRepoOptions.owner) });
const options = _.extend(optionsGithub, { "onTenthAttempt": () => writeComment(optionsGithub, "Waiting for build to finish...") });
export const commentOnPullRequest = (originalOptions, callback) => {
const optionsGithub = _.extend(originalOptions, { github: settings.createGithub(originalOptions.baseRepoOptions.owner) });
const options = _.extend(optionsGithub, { onTenthAttempt: () => writeComment(optionsGithub, "Waiting for build to finish...", _.noop) });
return checkPullRequest(options, () => reportProcessor.getStatusMessageFromRelease(options.app, options.headRepoOptions, (statusMessageErr, statusSuccessMessage) => {
return checkPullRequest(options, () => getStatusMessageFromRelease(options.app, options.headRepoOptions, (statusMessageErr, statusSuccessMessage) => {
const escapedErr = String(statusMessageErr || "").substring(0, maxCommentLength)
.replace(/`/g, "` ");
const message = statusMessageErr

@ -1,36 +1,36 @@
"use strict";
const EventEmitter = require("events").EventEmitter; // eslint-disable-line fp/no-events
const path = require("path");
const fs = require("fs");
const async = require("async");
const Copier = require("recursive-tree-copy").Copier;
import { parallel } from "async";
import { EventEmitter } from "events";
import { mkdir, writeFile } from "fs";
import { join } from "path";
import { Copier } from "recursive-tree-copy";
const safeGetEntries = (tree) => {
const safeGetEntries = (tree, callback) => {
try {
return { "entries": tree.gitTree.entries() };
return callback(null, tree.gitTree.entries());
} catch (err) {
return { err };
return callback(err);
}
};
const gitToFsCopier = new Copier({
"concurrency": 4,
"copyLeaf": (entry, targetDir, callback) => {
const targetPath = path.join(targetDir, entry.name());
concurrency: 4,
copyLeaf: (entry, targetDir, callback) => {
const targetPath = join(targetDir, entry.name());
entry.getBlob((err, blob) => {
if (err) {
return callback(err);
}
return fs.writeFile(targetPath, blob.content(), callback);
return writeFile(targetPath, blob.content(), callback);
});
},
"createTargetTree": (tree, targetDir, callback) => {
const targetSubdir = path.join(targetDir, tree.name);
createTargetTree: (tree, targetDir, callback) => {
const targetSubdir = join(targetDir, tree.name);
fs.mkdir(targetSubdir, (err) => {
mkdir(targetSubdir, (err) => {
// Workaround for broken trees
if (err && err.code !== "EEXIST") {
return callback(err);
@ -39,18 +39,16 @@ const gitToFsCopier = new Copier({
return callback(null, targetSubdir);
});
},
"finalizeTargetTree": (targetSubdir, callback) => callback(),
"walkSourceTree": (tree) => {
finalizeTargetTree: (targetSubdir, callback) => callback(),
walkSourceTree: (tree) => {
const emitter = new EventEmitter();
process.nextTick(() => {
const { entries, err } = safeGetEntries(tree);
if (err) {
return emitter.emit("error", err);
process.nextTick(() => safeGetEntries(tree, (getEntriesErr, entries) => {
if (getEntriesErr) {
return emitter.emit("error", getEntriesErr);
}
return async.parallel(entries.map((entry) => (callback) => {
return parallel(entries.map((entry) => (callback) => {
if (entry.isTree()) {
return entry.getTree((getTreeErr, subTree) => {
if (getTreeErr) {
@ -58,8 +56,8 @@ const gitToFsCopier = new Copier({
}
emitter.emit("tree", {
"gitTree": subTree,
"name": entry.name()
gitTree: subTree,
name: entry.name(),
});
return callback();
@ -80,19 +78,19 @@ const gitToFsCopier = new Copier({
return emitter.emit("done");
});
});
}));
return emitter;
}
},
});
exports.gitToFs = (commit, exportDir, callback) => commit.getTree((err, tree) => {
export const gitToFs = (commit, exportDir, callback) => commit.getTree((err, tree) => {
if (err) {
return callback(err);
}
return gitToFsCopier.copy({
"gitTree": tree,
"name": "."
gitTree: tree,
name: ".",
}, exportDir, callback);
});

@ -1,16 +1,9 @@
"use strict";
const nodegit = require("nodegit");
const fse = require("fs-extra");
const gitToFs = require("./copy").gitToFs;
import { mkdirsSync, removeSync } from "fs-extra";
import { Remote, Repository } from "nodegit";
const mkdirs = (path) => {
fse.mkdirsSync(path); // eslint-disable-line no-sync
};
const removedirs = (path) => {
fse.removeSync(path); // eslint-disable-line no-sync
};
import { gitToFs } from "./copy";
const fixUrl = (url) => {
if (!url.startsWith("https://")) {
@ -30,25 +23,25 @@ options = {
}
*/
module.exports = (options, globalCallback) => {
export const gitLoader = (options, globalCallback) => {
const url = fixUrl(options.remote);
const path = `${options.local}/${options.hash}`;
const exported = options.exported;
removedirs(path);
mkdirs(path);
removeSync(path);
mkdirsSync(path);
console.log(`Cloning ${url} to ${path}`);
nodegit.Repository.init(path, 1)
Repository.init(path, 1)
.catch(globalCallback)
.then((repo) => nodegit.Remote.create(repo, "origin", url)
.then((repo) => Remote.create(repo, "origin", url)
.catch(globalCallback)
.then((remote) => remote.fetch([options.branch])
.catch(globalCallback)
.then((number) => {
if (number) {
return globalCallback(`Failed to fetch commit: error number ${number}`);
.then((errorNumber) => {
if (errorNumber) {
return globalCallback(`Failed to fetch commit: error number ${errorNumber}`);
}
console.log(`Cloned ${url} to ${path}`);
@ -56,8 +49,8 @@ module.exports = (options, globalCallback) => {
return repo.getCommit(options.hash)
.catch(globalCallback)
.then((commit) => {
removedirs(exported);
mkdirs(exported);
removeSync(exported);
mkdirsSync(exported);
gitToFs(commit, exported, (err, result) => {
repo.free();

@ -1,3 +0,0 @@
"use strict";
exports.send = (message, callback) => process.nextTick(callback);

@ -0,0 +1,3 @@
"use strict";
export const send = (message, callback) => process.nextTick(callback);

@ -1,11 +1,12 @@
"use strict";
const path = require("path");
const fs = require("fs");
const zlib = require("zlib");
const glob = require("glob");
const streamBuffers = require("stream-buffers");
const _ = require("underscore");
import { createReadStream, createWriteStream, exists } from "fs";
import * as glob from "glob";
import * as JSONParse from "json-parse-safe";
import { join } from "path";
import { ReadableStreamBuffer, WritableStreamBuffer } from "stream-buffers";
import * as _ from "underscore";
import { createGunzip, createGzip } from "zlib";
const reportFilename = "report.json.gz";
const maxAttemptsNumber = 100;
@ -15,27 +16,27 @@ const directoryCheckTimeout = 2000;
const attemptsDebugFrequency = 10;
const readableStreamBufferOptions = {
"chunkSize": 262144,
"frequency": 1
chunkSize: 262144,
frequency: 1,
};
const getAllErrors = (report) => ((report.result || {}).errors || {}).$allMessages || [];
const getAllWarns = (report) => ((report.result || {}).warns || {}).$allMessages || [];
const getAllInfos = (report) => ((report.result || {}).infos || {}).$allMessages || [];
const getAllErrors = (report: Report): Message[] => (report.result && report.result.errors && report.result.errors.$allMessages) || [];
const getAllWarns = (report: Report): Message[] => (report.result && report.result.warns && report.result.errors.$allMessages) || [];
const getAllInfos = (report: Report): Message[] => (report.result && report.result.infos && report.result.errors.$allMessages) || [];
const writeReport = (releaseDir, err, result, callback) => {
export const writeReport = (releaseDir, err, result: ReportResult | undefined, callback) => {
const data = JSON.stringify({
"date": Date.now(),
date: Date.now(),
err,
result
});
result,
} as Report);
const readable = new streamBuffers.ReadableStreamBuffer(readableStreamBufferOptions);
const writeStream = fs.createWriteStream(path.join(releaseDir, reportFilename));
const readable = new ReadableStreamBuffer(readableStreamBufferOptions);
const writeStream = createWriteStream(join(releaseDir, reportFilename));
readable
.on("error", callback)
.pipe(zlib.createGzip())
.pipe(createGzip())
.on("error", callback)
.pipe(writeStream)
.on("error", callback)
@ -48,13 +49,13 @@ const writeReport = (releaseDir, err, result, callback) => {
readable.stop();
};
const readReport = (releaseDir, callback) => {
const readStream = fs.createReadStream(path.join(releaseDir, reportFilename));
const writable = new streamBuffers.WritableStreamBuffer();
export const readReport = (releaseDir, callback) => {
const readStream = createReadStream(join(releaseDir, reportFilename));
const writable = new WritableStreamBuffer();
readStream
.on("error", callback)
.pipe(zlib.createGunzip())
.pipe(createGunzip())
.on("error", callback)
.pipe(writable)
.on("error", callback)
@ -62,34 +63,34 @@ const readReport = (releaseDir, callback) => {
readStream.destroy();
const data = writable.getContentsAsString();
if (!data) {
return callback("ReportFileNotFound");
}
return callback(null, JSON.parse(data));
const { error, value }: { error: any, value?: Report } = JSONParse(data);
if (error) {
return callback("ReportFileMalformed");
}
return callback(null, value);
});
};
exports.readReport = readReport;
exports.writeReport = writeReport;
exports.loadReport = (app, options, callback) => {
const releaseDir = path.join(app.get("releasepath"), options.owner, options.reponame, options.branch, options.rev);
export const loadReport = (app, options, callback) => {
const releaseDir = join(app.get("releasepath"), options.owner, options.reponame, options.branch, options.rev);
glob("**", {
"cwd": releaseDir,
"mark": true
cwd: releaseDir,
mark: true,
}, (err, files) => {
if (err) {
return callback(err, options);
}
const reportFile = path.join(releaseDir, reportFilename);
const reportFile = join(releaseDir, reportFilename);
return fs.exists(reportFile, (exists) => {
if (!exists) {
return exists(reportFile, (reportFileExists) => {
if (!reportFileExists) {
return callback("ReportFileNotFound", options);
}
@ -100,21 +101,21 @@ exports.loadReport = (app, options, callback) => {
return callback(null, _.extend(options, {
files,
report
report,
}));
});
});
});
};
exports.getStatusMessageFromRelease = (app, originalOptions, callback) => {
const options = _.extend(originalOptions, { "attemptsGetReport": (Number(originalOptions.attemptsGetReport) || Number()) + 1 });
const releaseDir = path.join(app.get("releasepath"), options.owner, options.reponame, options.branch, options.rev);
const reportFile = path.join(releaseDir, reportFilename);
export const getStatusMessageFromRelease = (app, originalOptions, callback) => {
const options = _.extend(originalOptions, { attemptsGetReport: (Number(originalOptions.attemptsGetReport) || Number()) + 1 });
const releaseDir = join(app.get("releasepath"), options.owner, options.reponame, options.branch, options.rev);
const reportFile = join(releaseDir, reportFilename);
fs.exists(reportFile, (exists) => {
if (!exists) {
return setTimeout(() => fs.exists(releaseDir, (dirExists) => {
exists(reportFile, (reportFileExists) => {
if (!reportFileExists) {
return setTimeout(() => exists(releaseDir, (dirExists) => {
if (!dirExists) {
return callback("Release directory not found. Probably repository hooks are not configured");
}
@ -146,19 +147,17 @@ exports.getStatusMessageFromRelease = (app, originalOptions, callback) => {
const infos = getAllInfos(report);
if (errors.length + warns.length) {
return callback(_.map(
errors, (message) => `ERR: ${message.message}`
).concat(_.map(
warns, (message) => `WARN: ${message.message}`
))
.join("\r\n"));
const formattedErrors = _.map(errors, (message) => `ERR: ${message.message}`);
const formattedWarns = _.map(warns, (message) => `WARN: ${message.message}`);
return callback(formattedErrors.concat(formattedWarns).join("\r\n"));
}
if (!report.result || report.err) {
return callback(`CRITICAL ERROR: ${report.err}`);
}
return callback(null, (infos[infos.length - 1] || { "message": "OK" }).message);
return callback(null, (infos[infos.length - 1] || { message: "OK" }).message);
}), reportReadTimeout);
});
};

@ -1,19 +1,20 @@
"use strict";
const path = require("path");
const fs = require("fs");
const _ = require("underscore");
const reportProcessor = require("./report-processor");
import { exists, readFile } from "fs";
import { join } from "path";
import * as _ from "underscore";
import { loadReport } from "./report-processor";
const addBranchInfo = (app, options, callback) => {
const branchFile = path.join(app.get("releasepath"), options.owner, options.reponame, "$revs", `${options.rev}.branch`);
const branchFile = join(app.get("releasepath"), options.owner, options.reponame, "$revs", `${options.rev}.branch`);
fs.exists(branchFile, (exists) => {
exists(branchFile, (exists) => {
if (!exists) {
return callback("BranchFileNotFound", options);
}
return fs.readFile(branchFile, (err, data) => {
return readFile(branchFile, (err, data) => {
if (err) {
return callback(err, options);
}
@ -24,21 +25,21 @@ const addBranchInfo = (app, options, callback) => {
return callback(null, _.extend(options, {
branch,
branchName
branchName,
}));
});
});
};
const addRevInfo = (app, options, callback) => {
const revFile = path.join(app.get("releasepath"), options.owner, options.reponame, options.branch, "latest.id");
const revFile = join(app.get("releasepath"), options.owner, options.reponame, options.branch, "latest.id");
fs.exists(revFile, (exists) => {
exists(revFile, (exists) => {
if (!exists) {
return callback("RevFileNotFound", options);
}
return fs.readFile(revFile, (err, data) => {
return readFile(revFile, (err, data) => {
if (err) {
return callback(err, options);
}
@ -56,30 +57,30 @@ const parseOptions = (app, options, callback) => {
}
const result = {
"owner": options.owner,
"reponame": options.reponame
owner: options.owner,
reponame: options.reponame,
};
if (options.rev) {
return addBranchInfo(app, _.extend(result, { "rev": options.rev }), callback);
return addBranchInfo(app, _.extend(result, { rev: options.rev }), callback);
}
if (/^[\da-f]{40}$/i.test(options.branchName)) {
return addBranchInfo(app, _.extend(result, { "rev": options.branchName }), callback);
return addBranchInfo(app, _.extend(result, { rev: options.branchName }), callback);
}
const branchName = options.branchName || "master";
return addRevInfo(app, _.extend(result, {
"branch": `refs/heads/${branchName}`,
branchName
branch: `refs/heads/${branchName}`,
branchName,
}), callback);
};
exports.getReport = (app, options, callback) => parseOptions(app, options, (err, result) => {
export const getReport = (app, options, callback) => parseOptions(app, options, (err, result) => {
if (err) {
return callback(err, {});
}
return reportProcessor.loadReport(app, result, callback);
return loadReport(app, result, callback);
});

@ -1,96 +0,0 @@
"use strict";
const _ = require("underscore");
const tasks = require("./tasks");
// TaskProcessor does not look like EventEmitter, so no need to extend EventEmitter and use `emit' here.
const TaskProcessor = function (task, outerProcessor, callback) {
if (!this) {
return new TaskProcessor(task);
}
const that = this;
const createTaskWorker = () => tasks[task.type](task.params || {}, that);
const errors = [];
const process = () => createTaskWorker().process();
const getOuterPrefix = (prefix) => {
if (task.name && prefix) {
return `${task.name}/${prefix}`;
}
return String(task.name || "") + String(prefix || "");
};
const onError = (message, prefix) => {
errors.push(message);
outerProcessor.onError(message, getOuterPrefix(prefix));
};
const onWarn = (message, prefix) => outerProcessor.onWarn(message, getOuterPrefix(prefix));
const onInfo = (message, prefix) => outerProcessor.onInfo(message, getOuterPrefix(prefix));
const processTask = (innerTask, innerCallback) => {
const innerProcessor = new TaskProcessor(innerTask, that, innerCallback);
innerProcessor.process();
};
const done = () => callback(errors.join("\r\n"));
that.process = process;
that.onError = onError;
that.onWarn = onWarn;
that.onInfo = onInfo;
that.processTask = processTask;
that.done = done;
that.context = outerProcessor.context;
};
const pushMessage = (list, message, parts, index) => {
if (!index) {
list.$allMessages = list.$allMessages || []; // eslint-disable-line fp/no-mutation
list.$allMessages.push({ // eslint-disable-line fp/no-mutating-methods
message,
"prefix": parts.join("/")
});
}
list.$messages = list.$messages || []; // eslint-disable-line fp/no-mutation
if (index === parts.length) {
return list.$messages.push(message); // eslint-disable-line fp/no-mutating-methods
}
return pushMessage(list, message, parts, index + 1);
};
const addFlag = (flags) => (flagName) => {
flags[flagName] = true; // eslint-disable-line fp/no-mutation
};
const containsFlag = (flags) => (flagName) => flags[flagName];
exports.processTask = (task, context, callback) => {
const errors = {};
const warns = {};
const infos = {};
const messages = {};
const messageProcessor = (list) => (message, prefix) => {
const parts = prefix.split("/");
pushMessage(list, message, parts, 0);
pushMessage(messages, message, parts, 0);
};
const flags = {};
const processor = new TaskProcessor(task, {
"context": _.extend(context, {
"addFlag": addFlag(flags),
"containsFlag": containsFlag(flags)
}),
"onError": messageProcessor(errors),
"onInfo": messageProcessor(infos),
"onWarn": messageProcessor(warns)
}, (err) => callback(err, {
errors,
infos,
messages,
warns
}));
processor.process();
};

@ -0,0 +1,94 @@
"use strict";
import * as _ from "underscore";
import tasks from "./tasks";
// TaskProcessor does not look like EventEmitter, so no need to extend EventEmitter and use `emit' here.
const createTaskProcessor = (task: TaskInfo, outerProcessor: TaskProcessorCore, callback: TaskProcessorCallback) => {
const errors: string[] = [];
const getOuterPrefix = (prefix) => {
if (task.name && prefix) {
return `${task.name}/${prefix}`;
}
return String(task.name || "") + String(prefix || "");
};
const onError = (message, prefix) => {
errors.push(message);
outerProcessor.onError(message, getOuterPrefix(prefix));
};
const onWarn = (message, prefix) => outerProcessor.onWarn(message, getOuterPrefix(prefix));
const onInfo = (message, prefix) => outerProcessor.onInfo(message, getOuterPrefix(prefix));
let result: TaskProcessor;
result = {
context: outerProcessor.context,
done: () => callback(errors.join("\r\n")),
onError,
onWarn,
onInfo,
process: () => tasks[task.type](task.params || {}, result)(),
processTask: (innerTask, innerCallback) => createTaskProcessor(innerTask, result, innerCallback).process(),
};
return result;
};
const pushMessage = (list, message, parts, index) => {
if (!index) {
list.$allMessages.push({
message,
prefix: parts.join("/"),
});
}
if (index < parts.length) {
if (!list[parts[index]]) {
list[parts[index]] = {};
}
return pushMessage(list[parts[index]], message, parts, index + 1);
}
if (!list.$messages) {
list.$messages = [];
}
return list.$messages.push(message);
};
const addFlag = (flags) => (flagName) => {
flags[flagName] = true;
};
const containsFlag = (flags) => (flagName) => flags[flagName];
export const processTask = (task, context, callback) => {
const errors: MessagesRoot = { $allMessages: [] };
const warns: MessagesRoot = { $allMessages: [] };
const infos: MessagesRoot = { $allMessages: [] };
const messages: MessagesRoot = { $allMessages: [] };
const messageProcessor = (list) => (message, prefix) => {
const parts = prefix.split("/");
pushMessage(list, message, parts, 0);
pushMessage(messages, message, parts, 0);
};
const flags = {};
const processor = createTaskProcessor(task, {
context: _.extend(context, {
addFlag: addFlag(flags),
containsFlag: containsFlag(flags),
}),
onError: messageProcessor(errors),
onInfo: messageProcessor(infos),
onWarn: messageProcessor(warns),
}, (err) => callback(err, {
errors,
infos,
messages,
warns,
}));
processor.process();
};

@ -1,31 +0,0 @@
"use strict";
const glob = require("glob");
module.exports = (params, processor) => ({
"process": () => glob("**/obj/{Debug,Release}/*.{dll,pdb,xml}", {
"cwd": processor.context.exported,
"dot": true
}, (err, files) => {
if (err) {
processor.onError(err);
return processor.done();
}
if (!files || !files.length) {
return processor.done();
}
return processor.processTask({
"params": {
"tasks": files.map((file) => ({
"name": file,
"params": { "filename": file },
"type": "deletefromcode"
}))
},
"type": "parallel"
}, processor.done.bind(processor));
})
});

@ -0,0 +1,29 @@
"use strict";
import * as glob from "glob";
export default ((params, processor) => () => glob("**/obj/{Debug,Release}/*.{dll,pdb,xml}", {
cwd: processor.context.exported,
dot: true,
}, (err, files) => {
if (err) {
processor.onError(err);
return processor.done();
}
if (!files || !files.length) {
return processor.done();
}
return processor.processTask({
params: {
tasks: files.map((file) => ({
name: file,
params: { filename: file },
type: "deletefromcode",
})),
},
type: "parallel",
}, processor.done);
})) as Task;

@ -1,9 +1,9 @@
"use strict";
module.exports = (params, processor) => {
export default ((params, processor) => {
const condition = (!params.owner || params.owner === processor.context.owner)
&& (!params.branch || params.branch === processor.context.branch || `refs/heads/${params.branch}` === processor.context.branch);
const task = (condition && params.task) || params.otherwise;
return { "process": () => processor.processTask(task || { "type": "noop" }, processor.done.bind(processor)) };
};
return () => processor.processTask(task || { type: "noop" }, processor.done);
}) as Task;

@ -1,23 +0,0 @@
"use strict";
const path = require("path");
const fse = require("fs-extra");
module.exports = (params, processor) => ({
"process": () => {
const sourceFilePath = path.join(processor.context.exported, params.filename);
const targetFilePath = path.join(processor.context.release, params.filename);
processor.onInfo(`Copying ${sourceFilePath} to ${targetFilePath}`);
fse.copy(sourceFilePath, targetFilePath, (err) => {
if (err) {
processor.onError(`Unable to copy file: ${err}`);
} else {
processor.onInfo("Copied file");
}
return processor.done();
});
}
});

@ -0,0 +1,21 @@
"use strict";
import { copy } from "fs-extra";
import { join } from "path";
export default ((params, processor) => () => {
const sourceFilePath = join(processor.context.exported, params.filename);
const targetFilePath = join(processor.context.release, params.filename);
processor.onInfo(`Copying ${sourceFilePath} to ${targetFilePath}`);
copy(sourceFilePath, targetFilePath, (err) => {
if (err) {
processor.onError(`Unable to copy file: ${err}`);
} else {
processor.onInfo("Copied file");
}
return processor.done();
});
}) as Task;

@ -1,31 +0,0 @@
"use strict";
const glob = require("glob");
module.exports = (params, processor) => ({
"process": () => glob(params.mask, {
"cwd": processor.context.exported,
"dot": true
}, (err, files) => {
if (err) {
processor.onError(err);
return processor.done();
}
if (!files || !files.length) {
return processor.done();
}
return processor.processTask({
"params": {
"tasks": files.map((file) => ({
"name": file,
"params": { "filename": file },
"type": "copy"
}))
},
"type": "parallel"
}, processor.done.bind(processor));
})
});

@ -0,0 +1,29 @@
"use strict";
import * as glob from "glob";
export default ((params, processor) => () => glob(params.mask, {
cwd: processor.context.exported,
dot: true,
}, (err, files) => {
if (err) {
processor.onError(err);
return processor.done();
}
if (!files || !files.length) {
return processor.done();
}
return processor.processTask({
params: {
tasks: files.map((file) => ({
name: file,
params: { filename: file },
type: "copy",
})),
},
type: "parallel",
}, processor.done);
})) as Task;

@ -1,37 +0,0 @@
"use strict";
const fs = require("fs");
const path = require("path");
const cssnano = require("cssnano");
module.exports = (params, processor) => ({
"process": () => {
const filePath = path.join(processor.context.exported, params.filename);
fs.readFile(filePath, (readErr, css) => {
if (readErr) {
processor.onError(`Unable to read stylesheet ${params.filename}: ${readErr}`);
return processor.done();
}
return cssnano.process(css)
.catch((cssErr) => {
processor.onError(`Unable to uglify stylesheet: ${cssErr}`);
processor.done();
})
.then((result) => {
fs.writeFile(filePath, result.css, (writeErr) => {
if (writeErr) {
processor.onError(`Unable to write uglified stylesheet for ${params.filename}: ${writeErr}`);
} else {
processor.onInfo(`Saved uglified stylesheet for ${params.filename}; uglified length: ${result.css.length}`);
}
processor.done();
});
});
});
}
});

@ -0,0 +1,34 @@
"use strict";
import { process as cssnanoProcess } from "cssnano";
import { readFile, writeFile } from "fs";
import { join } from "path";
export default ((params, processor) => () => {
const filePath = join(processor.context.exported, params.filename);
readFile(filePath, (readErr, css) => {
if (readErr) {
processor.onError(`Unable to read stylesheet ${params.filename}: ${readErr}`);
return processor.done();
}
return cssnanoProcess(css)
.catch((cssErr) => {
processor.onError(`Unable to uglify stylesheet: ${cssErr}`);
processor.done();
})
.then((result) => {
writeFile(filePath, result.css, (writeErr) => {
if (writeErr) {
processor.onError(`Unable to write uglified stylesheet for ${params.filename}: ${writeErr}`);
} else {
processor.onInfo(`Saved uglified stylesheet for ${params.filename}; uglified length: ${result.css.length}`);
}
processor.done();
});
});
});
}) as Task;

@ -1,36 +0,0 @@
"use strict";
const glob = require("glob");
const flagDoneName = "cssnanoallDone";
module.exports = (params, processor) => ({
"process": () => {
if (processor.context.containsFlag(flagDoneName)) {
processor.onWarn("cssnanoall task is executed more than once; this is probably a bug in your mbs.json");
}
processor.context.addFlag(flagDoneName);
glob("**/*.css", {
"cwd": processor.context.exported,
"dot": true
}, (err, files) => {
if (err) {
processor.onError(err);
return processor.done();
}
return processor.processTask({
"params": {
"tasks": files.map((file) => ({
"name": file,
"params": { "filename": file },
"type": "cssnano"
}))
},
"type": (params.preventParallelTests && "sequential") || "parallel"
}, processor.done.bind(processor));
});
}
});

@ -0,0 +1,34 @@
"use strict";
import * as glob from "glob";
const flagDoneName = "cssnanoallDone";
export default ((params, processor) => () => {
if (processor.context.containsFlag(flagDoneName)) {
processor.onWarn("cssnanoall task is executed more than once; this is probably a bug in your mbs.json");
}
processor.context.addFlag(flagDoneName);
glob("**/*.css", {
cwd: processor.context.exported,
dot: true,
}, (err, files) => {
if (err) {
processor.onError(err);
return processor.done();
}
return processor.processTask({
params: {
tasks: files.map((file) => ({
name: file,
params: { filename: file },
type: "cssnano",
})),
},
type: (params.preventParallelTests && "sequential") || "parallel",
}, processor.done);
});
}) as Task;

@ -1,22 +0,0 @@
"use strict";
const path = require("path");
const fse = require("fs-extra");
module.exports = (params, processor) => ({
"process": () => {
const sourceFilePath = path.join(processor.context.exported, params.filename);
processor.onInfo(`Deleting ${sourceFilePath}`);
fse.remove(sourceFilePath, (err) => {
if (err) {
processor.onError(`Unable to delete file: ${err}`);
} else {
processor.onInfo("Deleted file");
}
return processor.done();
});
}
});

@ -0,0 +1,20 @@
"use strict";
import { remove } from "fs-extra";
import { join } from "path";
export default ((params, processor) => () => {
const sourceFilePath = join(processor.context.exported, params.filename);
processor.onInfo(`Deleting ${sourceFilePath}`);
remove(sourceFilePath, (err) => {
if (err) {
processor.onError(`Unable to delete file: ${err}`);
} else {
processor.onInfo("Deleted file");
}
return processor.done();
});
}) as Task;

@ -1,17 +0,0 @@
"use strict";
const sequential = require("./sequential");
module.exports = (params, processor) => sequential({
"tasks": [
{
"name": "build",
params,
"type": "dotnetbuildwithoutcleanup"
},
{
"name": "cleanup",
"type": "cleanupafterdotnetbuild"
}
]
}, processor);

@ -0,0 +1,17 @@
"use strict";
import sequential from "./sequential";
export default ((params, processor) => sequential({
tasks: [
{
name: "build",
params,
type: "dotnetbuildwithoutcleanup",
},
{
name: "cleanup",
type: "cleanupafterdotnetbuild",
},
],
}, processor)) as Task;

@ -1,22 +0,0 @@
"use strict";
const sequential = require("./sequential");
module.exports = (params, processor) => sequential({
"tasks": [
{
"name": "build",
params,
"type": "dotnetbuildwithoutcleanup"
},
{
"name": "test",
params,
"type": "dotnetnunitall"
},
{
"name": "cleanup",
"type": "cleanupafterdotnetbuild"
}
]
}, processor);

@ -0,0 +1,22 @@
"use strict";
import sequential from "./sequential";
export default ((params, processor) => sequential({
tasks: [
{
name: "build",
params,
type: "dotnetbuildwithoutcleanup",
},
{
name: "test",
params,
type: "dotnetnunitall",
},
{
name: "cleanup",
type: "cleanupafterdotnetbuild",
},
],
}, processor)) as Task;

@ -1,82 +0,0 @@
"use strict";
const spawn = require("child_process").spawn;
const streamBuffers = require("stream-buffers");
const settings = require("../../settings");
const wrapBuilder = (builder, input, onExit) => {
const resultBuffer = new streamBuffers.WritableStreamBuffer();
const errorBuffer = new streamBuffers.WritableStreamBuffer();
builder.stdout.on("data", (data) => {
resultBuffer.write(data);
});
builder.stderr.on("data", (data) => {
errorBuffer.write(data);
});
builder.on("exit", (code) => {
resultBuffer.end();
errorBuffer.end();
onExit(code, resultBuffer.getContentsAsString(), errorBuffer.getContentsAsString());
});
builder.stdin.write(input);
builder.stdin.end();
};
const safeParseJson = (data) => {
try {
return { "parsed": JSON.parse(data) };
} catch (err) {
return { err };
}
};
module.exports = (params, processor) => ({
"process": () => {
const input = JSON.stringify(params);
const builder = spawn(settings.builderExecutable, [params.command]);
processor.onInfo(`DotNetBuilderWrapper processing (at ${new Date().toISOString()}): ${input}`);
wrapBuilder(builder, input, (code, result, builderError) => {
if (code || builderError) {
processor.onError(`Return code is ${code}\r\n${builderError}`);
return processor.done();
}
const { parsed, err } = safeParseJson(result);
if (err || !parsed || !parsed.Messages) {
processor.onError(`Malformed JSON: ${err}`);
processor.onInfo(result);
return processor.done();
}
const messages = parsed.Messages;
messages.forEach((message) => {
if (!message) {
return processor.onError("Message is null");
}
switch (message.Type) {
case "info":
return processor.onInfo(message.Body);
case "warn":
return processor.onWarn(message.Body);
default:
return processor.onError(message.Body);
}
});
processor.onInfo(`Done DotNetBuilderWrapper processing (at ${new Date().toISOString()})`);
return processor.done();
});
}
});

@ -0,0 +1,89 @@
"use strict";
import { spawn } from "child_process";
import * as JSONParse from "json-parse-safe";
import { WritableStreamBuffer } from "stream-buffers";
import settings from "../../settings";
const wrapBuilder = (builder, input, onExit) => {
const stdoutPromise = new Promise((resolve, reject) => {
const streamBuffer = new WritableStreamBuffer();
builder.stdout
.on("error", reject)
.pipe(streamBuffer)
.on("error", reject)
.on("finish", () => {
streamBuffer.end();
resolve(streamBuffer.getContentsAsString());
});
});
const stderrPromise = new Promise((resolve, reject) => {
const streamBuffer = new WritableStreamBuffer();
builder.stderr
.on("error", reject)
.pipe(streamBuffer)
.on("error", reject)
.on("finish", () => {
streamBuffer.end();
resolve(streamBuffer.getContentsAsString());
});
});
const builderPromise = new Promise((resolve, reject) => {
builder.stdin.write(input);
builder.stdin.end();
builder.on("exit", resolve);
});
Promise.all([stdoutPromise, stderrPromise, builderPromise]).then((values) => {
const [result, builderError, code] = values;
onExit(code, result, builderError);
}).catch((err) => onExit(0, undefined, err));
};
export default ((params, processor) => () => {
const input = JSON.stringify(params);
const builder = spawn(settings.builderExecutable, [params.command]);
processor.onInfo(`DotNetBuilderWrapper processing (at ${new Date().toISOString()}): ${input}`);
wrapBuilder(builder, input, (code, result, builderError) => {
if (code || builderError) {
processor.onError(`Return code is ${code}\r\n${builderError}`);
return processor.done();
}
const { value, error } = JSONParse(result);
if (error || !value || !value.Messages) {
processor.onError(`Malformed JSON: ${error}`);
processor.onInfo(result);
return processor.done();
}
const messages = value.Messages;
messages.forEach((message) => {
if (!message) {
return processor.onError("Message is null");
}
switch (message.Type) {
case "info":
return processor.onInfo(message.Body);
case "warn":
return processor.onWarn(message.Body);
default:
return processor.onError(message.Body);
}
});
processor.onInfo(`Done DotNetBuilderWrapper processing (at ${new Date().toISOString()})`);
return processor.done();
});
}) as Task;

@ -1,42 +0,0 @@
"use strict";
const sequential = require("./sequential");
const createTasks = function *(params) {
if (!params.skipMbsCheckStyle) {
yield {
params,
"type": "dotnetcheckstyle"
};
}
yield {
params,
"type": "dotnetrewrite"
};
if (!params.skipNugetRestore) {
yield {
params,
"type": "dotnetnugetrestore"
};
}
yield {
"params": {
"configuration": params.configuration,
"forceCodeAnalysis": params.forceCodeAnalysis,
"ignoreCodeAnalysis": params.ignoreCodeAnalysis,
"skipCodeSigning": params.skipCodeSigning,
"solution": params.solution,
"target": "Rebuild"
},
"type": "dotnetcompile"
};
};
module.exports = (params, processor) => {
const tasks = Array.from(createTasks(params));
return sequential({ tasks }, processor);
};

@ -0,0 +1,42 @@
"use strict";
import sequential from "./sequential";
const createTasks = function *(params) {
if (!params.skipMbsCheckStyle) {
yield {
params,
type: "dotnetcheckstyle",
};
}
yield {
params,
type: "dotnetrewrite",
};
if (!params.skipNugetRestore) {
yield {
params,
type: "dotnetnugetrestore",
};
}
yield {
params: {
configuration: params.configuration,
forceCodeAnalysis: params.forceCodeAnalysis,
ignoreCodeAnalysis: params.ignoreCodeAnalysis,
skipCodeSigning: params.skipCodeSigning,
solution: params.solution,
target: "Rebuild",
},
type: "dotnetcompile",
};
};
export default ((params, processor) => {
const tasks = Array.from(createTasks(params));
return sequential({ tasks }, processor);
}) as Task;

@ -1,76 +0,0 @@
"use strict";
const path = require("path");
const fs = require("fs");
const async = require("async");
const glob = require("glob");
const autoGeneratedMarker
= "//------------------------------------------------------------------------------\n"
+ "// <auto-generated>";
const flagDoneName = "dotnetcheckerDone";
module.exports = (params, processor) => ({
"process": () => {
if (processor.context.containsFlag(flagDoneName)) {
return processor.done();
}
processor.context.addFlag(flagDoneName);
return glob("**/*.cs", { "cwd": processor.context.exported }, (globErr, files) => {
if (globErr) {
processor.onError(globErr);
return processor.done();
}
processor.onInfo(`Found ${files.length} .cs files`);
if (!files || !files.length) {
processor.onWarn("No .cs files found");
return processor.done();
}
const processFile = (data, file) => {
if (data.includes("\r\n")) {
return processor.onError(`Windows-style EOL (0D0A) found in file ${file}`);
}
if (!params.ignoreCodeStyle) {
if (data.substr(1, autoGeneratedMarker.length) === autoGeneratedMarker || data.startsWith(autoGeneratedMarker)) {
return processor.onInfo(`Skipping auto-generated file ${file}`);
}
if (data.includes("\t") && data.includes(" ")) {
processor.onError(`Both tabs and spaces found in file ${file}`);
}
if (data.includes("\t")) {
processor.onError(`Tabs found in file ${file}`);
}
}
return processor.onInfo(`Checked file ${file}`);
};
return async.parallel(files.map((file) => (callback) => fs.readFile(
path.join(processor.context.exported, file),
{ "encoding": "utf8" },
(readErr, data) => {
if (readErr) {
processor.onError(`Unable to check file ${file}: ${readErr}`);
return callback(readErr);
}
processFile(data, file);
return callback();
}
)), processor.done.bind(processor));
});
}
});

@ -0,0 +1,74 @@
"use strict";
import { parallel } from "async";
import { readFile } from "fs";
import * as glob from "glob";
import { join } from "path";
const autoGeneratedMarker
= "//------------------------------------------------------------------------------\n"
+ "// <auto-generated>";
const flagDoneName = "dotnetcheckerDone";
export default ((params, processor) => () => {
if (processor.context.containsFlag(flagDoneName)) {
return processor.done();
}
processor.context.addFlag(flagDoneName);
return glob("**/*.cs", { cwd: processor.context.exported }, (globErr, files) => {
if (globErr) {
processor.onError(globErr);
return processor.done();
}
processor.onInfo(`Found ${files.length} .cs files`);
if (!files || !files.length) {
processor.onWarn("No .cs files found");
return processor.done();
}
const processFile = (data, file) => {
if (data.includes("\r\n")) {
return processor.onError(`Windows-style EOL (0D0A) found in file ${file}`);
}
if (!params.ignoreCodeStyle) {
if (data.substr(1, autoGeneratedMarker.length) === autoGeneratedMarker || data.startsWith(autoGeneratedMarker)) {
return processor.onInfo(`Skipping auto-generated file ${file}`);
}
if (data.includes("\t") && data.includes(" ")) {
processor.onError(`Both tabs and spaces found in file ${file}`);
}
if (data.includes("\t")) {
processor.onError(`Tabs found in file ${file}`);
}
}
return processor.onInfo(`Checked file ${file}`);
};
return parallel(files.map((file) => (callback) => readFile(
join(processor.context.exported, file),
{ encoding: "utf8" },
(readErr, data) => {
if (readErr) {
processor.onError(`Unable to check file ${file}: ${readErr}`);
return callback(readErr);
}
processFile(data, file);
return callback();
},
)), processor.done);
});
}) as Task;

@ -1,11 +1,11 @@
"use strict";
const path = require("path");
const _ = require("underscore");
const settings = require("../../settings");
const dotnetbuilderwrapper = require("./dotnetbuilderwrapper");
import { join } from "path";
import * as _ from "underscore";
import settings from "../../settings";
import dotnetbuilderwrapper from "./dotnetbuilderwrapper";
module.exports = (params, processor) => {
export default ((params, processor) => {
if (settings.isCodeAnalysisUnsupported && params.forceCodeAnalysis) {
processor.onError("Code analysis is not supported");
@ -17,7 +17,7 @@ module.exports = (params, processor) => {
return {};
}
return { "SigningKey": settings.codeSigningKeyFile };
return { SigningKey: settings.codeSigningKeyFile };
};
const skipCodeAnalysis = settings.isCodeAnalysisUnsupported
@ -25,13 +25,13 @@ module.exports = (params, processor) => {
|| (settings.ignoreCodeAnalysisByDefault && !params.forceCodeAnalysis);
const compileParams = {
"Configuration": params.configuration,
"OutputDirectory": params.overrideOutputDirectory,
"SkipCodeAnalysis": skipCodeAnalysis,
"SolutionPath": path.join(processor.context.exported, params.solution),
"Target": params.target,
"command": "compile"
Configuration: params.configuration,
OutputDirectory: params.overrideOutputDirectory,
SkipCodeAnalysis: skipCodeAnalysis,
SolutionPath: join(processor.context.exported, params.solution),
Target: params.target,
command: "compile",
};
return dotnetbuilderwrapper(_.extend(compileParams, getAdditionalSigningParameters()), processor);
};
}) as Task;

@ -1,11 +0,0 @@
"use strict";
const _ = require("underscore");
const dotnetnugetprocessinternal = require("./dotnetnugetprocessinternal");
module.exports = (params, processor) => dotnetnugetprocessinternal(_.extendOwn(params, {
"getFinalTask": (nupkg) => ({
"params": { "filename": nupkg },
"type": "copy"
})
}), processor);

@ -0,0 +1,11 @@
"use strict";
import * as _ from "underscore";
import dotnetnugetprocessinternal from "./dotnetnugetprocessinternal";
export default ((params, processor) => dotnetnugetprocessinternal(_.extendOwn(params, {
getFinalTask: (nupkg) => ({
params: { filename: nupkg },
type: "copy",
}),
}), processor)) as Task;

@ -1,30 +0,0 @@
"use strict";
const conditional = require("./conditional");
module.exports = (params, processor) => conditional({
"branch": "master",
"otherwise": {
"name": "nuget-pack",
"params": {
"major": params.major,
"name": params.nuspecName,
"nuspec": `${params.nuspecName}.nuspec`,
"version": params.version,
"withoutCommitSha": params.withoutCommitSha
},
"type": "dotnetnugetpack"
},
"owner": params.masterRepoOwner,
"task": {
"name": "nuget-push",
"params": {
"major": params.major,
"name": params.nuspecName,
"nuspec": `${params.nuspecName}.nuspec`,
"version": params.version,
"withoutCommitSha": params.withoutCommitSha
},
"type": "dotnetnugetpush"
}
}, processor);

@ -0,0 +1,30 @@
"use strict";
import conditional from "./conditional";
export default ((params, processor) => conditional({
branch: "master",
otherwise: {
name: "nuget-pack",
params: {
major: params.major,
name: params.nuspecName,
nuspec: `${params.nuspecName}.nuspec`,
version: params.version,
withoutCommitSha: params.withoutCommitSha,
},
type: "dotnetnugetpack",
},
owner: params.masterRepoOwner,
task: {
name: "nuget-push",
params: {
major: params.major,
name: params.nuspecName,
nuspec: `${params.nuspecName}.nuspec`,
version: params.version,
withoutCommitSha: params.withoutCommitSha,
},
type: "dotnetnugetpush",
},
}, processor)) as Task;

@ -1,7 +1,7 @@
"use strict";
const path = require("path");
const sequential = require("./sequential");
import { join } from "path";
import sequential from "./sequential";
const postfixLength = 16;
const fourDigits = 10000;
@ -15,7 +15,7 @@ const addPostfix = (version, params, processor) => {
return `${version}-r${processor.context.rev.substr(0, postfixLength)}`;
};
module.exports = (params, processor) => {
export default ((params, processor) => {
const date = new Date();
const major = params.major || "0";
const minor = (date.getFullYear() * fourDigits) + ((date.getMonth() + 1) * twoDigits) + date.getDate();
@ -24,18 +24,18 @@ module.exports = (params, processor) => {
const nupkg = `${params.name}.${version}.nupkg`;
return sequential({
"tasks": [
tasks: [
{
"params": {
"BaseDirectory": processor.context.exported,
"OutputDirectory": processor.context.exported,
"SpecPath": path.join(processor.context.exported, params.nuspec),
"Version": version,
"command": "nugetpack"
params: {
BaseDirectory: processor.context.exported,
OutputDirectory: processor.context.exported,
SpecPath: join(processor.context.exported, params.nuspec),
Version: version,
command: "nugetpack",
},
"type": "dotnetbuilderwrapper"
type: "dotnetbuilderwrapper",
},
params.getFinalTask(nupkg)
]
params.getFinalTask(nupkg),
],
}, processor);
};
}) as Task;

@ -1,11 +0,0 @@
"use strict";
const _ = require("underscore");
const dotnetnugetprocessinternal = require("./dotnetnugetprocessinternal");
module.exports = (params, processor) => dotnetnugetprocessinternal(_.extendOwn(params, {
"getFinalTask": (nupkg) => ({
"params": { "Package": nupkg },
"type": "dotnetnugetpushonly"
})
}), processor);

@ -0,0 +1,11 @@
"use strict";
import * as _ from "underscore";
import dotnetnugetprocessinternal from "./dotnetnugetprocessinternal";
export default ((params, processor) => dotnetnugetprocessinternal(_.extendOwn(params, {
getFinalTask: (nupkg) => ({
params: { Package: nupkg },
type: "dotnetnugetpushonly",
}),
}), processor)) as Task;

@ -1,12 +0,0 @@
"use strict";
const path = require("path");
const dotnetbuilderwrapper = require("./dotnetbuilderwrapper");
const settings = require("../../settings");
module.exports = (params, processor) => dotnetbuilderwrapper({
"ApiKey": settings.nugetApiKey,
"NugetHost": settings.nugetHost,
"Package": path.join(processor.context.exported, params.Package),
"command": "nugetpush"
}, processor);

@ -0,0 +1,13 @@
"use strict";
import { join } from "path";
import settings from "../../settings";
import dotnetbuilderwrapper from "./dotnetbuilderwrapper";
export default ((params, processor) => dotnetbuilderwrapper({
ApiKey: settings.nugetApiKey,
NugetHost: settings.nugetHost,
Package: join(processor.context.exported, params.Package),
command: "nugetpush",
}, processor)) as Task;

@ -1,17 +0,0 @@
"use strict";
const path = require("path");
const sequential = require("./sequential");
module.exports = (params, processor) => sequential({
"tasks": [
{
"params": {
"BaseDirectory": processor.context.exported,
"SolutionPath": path.join(processor.context.exported, params.solution),
"command": "nugetrestore"
},
"type": "dotnetbuilderwrapper"
}
]
}, processor);

@ -0,0 +1,17 @@
"use strict";
import { join } from "path";
import sequential from "./sequential";
export default ((params, processor) => sequential({
tasks: [
{
params: {
BaseDirectory: processor.context.exported,
SolutionPath: join(processor.context.exported, params.solution),
command: "nugetrestore",
},
type: "dotnetbuilderwrapper",
},
],
}, processor)) as Task;

@ -1,9 +0,0 @@
"use strict";
const path = require("path");
const dotNetBuilderWrapper = require("./dotnetbuilderwrapper");
module.exports = (params, processor) => dotNetBuilderWrapper({
"TestLibraryPath": path.join(processor.context.exported, params.assembly),
"command": "nunit"
}, processor);

@ -0,0 +1,9 @@
"use strict";
import { join } from "path";
import dotNetBuilderWrapper from "./dotnetbuilderwrapper";
export default ((params, processor) => dotNetBuilderWrapper({
TestLibraryPath: join(processor.context.exported, params.assembly),
command: "nunit",
}, processor)) as Task;

@ -1,42 +0,0 @@
"use strict";
const glob = require("glob");
const flagDoneName = "dotnetnunitallDone";
module.exports = (params, processor) => ({
"process": () => {
if (processor.context.containsFlag(flagDoneName)) {
processor.onWarn("dotnetnunitall task is executed more than once; this is probably a bug in your mbs.json");
}
processor.context.addFlag(flagDoneName);
glob("**/{bin,build}/**/*.{Tests,Test,UnitTests}.dll", {
"cwd": processor.context.exported,
"dot": true
}, (err, files) => {
if (err) {
processor.onError(err);
return processor.done();
}
if (!files || !files.length) {
processor.onError(`No test assemblies found in ${processor.context.exported}`);
return processor.done();
}
return processor.processTask({
"params": {
"tasks": files.map((file) => ({
"name": file,
"params": { "assembly": file },
"type": "dotnetnunit"
}))
},
"type": (params.preventParallelTests && "sequential") || "parallel"
}, processor.done.bind(processor));
});
}
});

@ -0,0 +1,40 @@
"use strict";
import * as glob from "glob";
const flagDoneName = "dotnetnunitallDone";
export default ((params, processor) => () => {
if (processor.context.containsFlag(flagDoneName)) {
processor.onWarn("dotnetnunitall task is executed more than once; this is probably a bug in your mbs.json");
}
processor.context.addFlag(flagDoneName);
glob("**/{bin,build}/**/*.{Tests,Test,UnitTests}.dll", {
cwd: processor.context.exported,
dot: true,
}, (err, files) => {
if (err) {
processor.onError(err);
return processor.done();
}
if (!files || !files.length) {
processor.onError(`No test assemblies found in ${processor.context.exported}`);
return processor.done();
}
return processor.processTask({
params: {
tasks: files.map((file) => ({
name: file,
params: { assembly: file },
type: "dotnetnunit",
})),
},
type: (params.preventParallelTests && "sequential") || "parallel",
}, processor.done);
});
}) as Task;

@ -1,51 +0,0 @@
"use strict";
const path = require("path");
const fs = require("fs");
const Mustache = require("mustache");
const sequential = require("./sequential");
// eslint-disable-next-line no-sync
const msbuildTemplate = fs.readFileSync(path.join(__dirname, "/dotnetpackwebapp.template.msbuild"), { "encoding": "utf8" });
// eslint-disable-next-line no-sync
const deployTemplate = fs.readFileSync(path.join(__dirname, "/dotnetpackwebapp.template.bat"), { "encoding": "utf8" });
// eslint-disable-next-line no-sync
const versionTemplate = fs.readFileSync(path.join(__dirname, "/dotnetpackwebapp.template.version.aspx"), { "encoding": "utf8" });
module.exports = (params, processor) => sequential({
"tasks": [
{
"params": {
"data": Mustache.render(msbuildTemplate, params),
"filename": "MakePackage.msbuild"
},
"type": "writefile"
},
{
"params": {
"data": Mustache.render(deployTemplate, params),
"filename": "Deploy.bat"
},
"type": "writefile"
},
{
"params": {
"data": Mustache.render(versionTemplate, params),
"filename": "version.aspx"
},
"type": "writefile"
},
{
"params": {
"configuration": params.configuration,
"isCodeAnalysisUnsupported": params.isCodeAnalysisUnsupported,
"overrideOutputDirectory": processor.context.release,
"skipCodeSigning": params.skipCodeSigning,
"solution": "MakePackage.msbuild",
"target": "Package"
},
"type": "dotnetcompile"
}
]
}, processor);

@ -0,0 +1,47 @@
"use strict";
import { readFileSync } from "fs";
import { render } from "mustache";
import { join } from "path";
import sequential from "./sequential";
const msbuildTemplate = readFileSync(join(__dirname, "/dotnetpackwebapp.template.msbuild"), { encoding: "utf8" });
const deployTemplate = readFileSync(join(__dirname, "/dotnetpackwebapp.template.bat"), { encoding: "utf8" });
const versionTemplate = readFileSync(join(__dirname, "/dotnetpackwebapp.template.version.aspx"), { encoding: "utf8" });
export default ((params, processor) => sequential({
tasks: [
{
params: {
data: render(msbuildTemplate, params),
filename: "MakePackage.msbuild",
},
type: "writefile",
},
{
params: {
data: render(deployTemplate, params),
filename: "Deploy.bat",
},
type: "writefile",
},
{
params: {
data: render(versionTemplate, params),
filename: "version.aspx",
},
type: "writefile",
},
{
params: {
configuration: params.configuration,
isCodeAnalysisUnsupported: params.isCodeAnalysisUnsupported,
overrideOutputDirectory: processor.context.release,
skipCodeSigning: params.skipCodeSigning,
solution: "MakePackage.msbuild",
target: "Package",
},
type: "dotnetcompile",
},
],
}, processor)) as Task;

@ -1,71 +0,0 @@
"use strict";
const path = require("path");
const fs = require("fs");
const async = require("async");
const glob = require("glob");
const settings = require("../../settings");
const flagDoneName = "dotnetrewriterDone";
const processAssemblyInfo = (params, processor, appendInformationalVersion) => (originalContent, cb) => {
const processInternalsVisible = (content) => {
if (params.skipCodeSigning || settings.skipCodeSigning) {
return content;
}
return content.replace(
/InternalsVisibleTo\s*\(\s*"([\w.]+)"\s*\)/g,
(match, p1) => `InternalsVisibleTo("${p1},PublicKey=${settings.codeSigningPublicKey}")`
);
};
const processInformationalVersion = (content) => {
if (!appendInformationalVersion) {
return content;
}
return `${content}\n[assembly: System.Reflection.AssemblyInformationalVersion("${processor.context.versionInfo}")]\n`;
};
return cb(null, processInformationalVersion(processInternalsVisible(originalContent)));
};
module.exports = (params, processor) => ({
"process": () => {
if (processor.context.containsFlag(flagDoneName)) {
return processor.done();
}
processor.context.addFlag(flagDoneName);
return glob("**/{InternalsVisible,AssemblyInfo}*.cs", { "cwd": processor.context.exported }, (globErr, files) => {
if (globErr) {
processor.onError(globErr);
return processor.done();
}
processor.onInfo(`Found ${files.length} AssemblyInfo.cs files`);
if (!files || !files.length) {
processor.onWarn("No AssemblyInfo.cs found");
return processor.done();
}
return async.parallel(files.map((file) => (callback) => async.waterfall([
fs.readFile.bind(null, path.join(processor.context.exported, file), { "encoding": "utf8" }),
processAssemblyInfo(params, processor, file.toLowerCase().includes("assemblyinfo.cs")),
fs.writeFile.bind(null, path.join(processor.context.exported, file))
], (err) => {
if (err) {
processor.onError(`Unable to rewrite file ${file}: ${err}`);
} else {
processor.onInfo(`Rewritten file ${file}`);
}
callback(err);
})), processor.done.bind(processor));
});
}
});

@ -0,0 +1,69 @@
"use strict";
import { parallel, waterfall } from "async";
import { readFile, writeFile } from "fs";
import * as glob from "glob";
import { join } from "path";
import settings from "../../settings";
const flagDoneName = "dotnetrewriterDone";
const processAssemblyInfo = (params, processor, appendInformationalVersion) => (originalContent, cb) => {
const processInternalsVisible = (content) => {
if (params.skipCodeSigning || settings.skipCodeSigning) {
return content;
}
const pattern = /InternalsVisibleTo\s*\(\s*"([\w.]+)"\s*\)/g;
const replacer = (match, p1) => `InternalsVisibleTo("${p1},PublicKey=${settings.codeSigningPublicKey}")`;
return content.replace(pattern, replacer);
};
const processInformationalVersion = (content) => {
if (!appendInformationalVersion) {
return content;
}
return `${content}\n[assembly: System.Reflection.AssemblyInformationalVersion("${processor.context.versionInfo}")]\n`;
};
return cb(null, processInformationalVersion(processInternalsVisible(originalContent)));
};
export default ((params, processor) => () => {
if (processor.context.containsFlag(flagDoneName)) {
return processor.done();
}
processor.context.addFlag(flagDoneName);
return glob("**/{InternalsVisible,AssemblyInfo}*.cs", { cwd: processor.context.exported }, (globErr, files) => {
if (globErr) {
processor.onError(globErr);
return processor.done();
}
processor.onInfo(`Found ${files.length} AssemblyInfo.cs files`);
if (!files || !files.length) {
processor.onWarn("No AssemblyInfo.cs found");
return processor.done();
}
return parallel(files.map((file) => (callback) => waterfall([
readFile.bind(null, join(processor.context.exported, file), { encoding: "utf8" }),
processAssemblyInfo(params, processor, file.toLowerCase().includes("assemblyinfo.cs")),
writeFile.bind(null, join(processor.context.exported, file)),
], (err) => {
if (err) {
processor.onError(`Unable to rewrite file ${file}: ${err}`);
} else {
processor.onInfo(`Rewritten file ${file}`);
}
callback(err);
})), processor.done);
});
}) as Task;

@ -1,19 +0,0 @@
"use strict";
module.exports = (params, processor) => ({
"process": () => {
if (params.error) {
processor.onError(params.error);
}
if (params.warn) {
processor.onWarn(params.warn);
}
if (params.info) {
processor.onInfo(params.info);
}
processor.done();
}
});

@ -0,0 +1,17 @@
"use strict";
export default ((params, processor) => () => {
if (params.error) {
processor.onError(params.error);
}
if (params.warn) {
processor.onWarn(params.warn);
}
if (params.info) {
processor.onInfo(params.info);
}
processor.done();
}) as Task;

@ -1,32 +0,0 @@
"use strict";
const path = require("path");
const CLIEngine = require("eslint").CLIEngine;
const settings = require("../../settings");
const cli = new CLIEngine({ "configFile": settings.eslintBrowserConfig });
const errorSeverity = 2;
module.exports = (params, processor) => ({
"process": () => {
const filePath = path.join(processor.context.exported, params.filename);
const result = cli.executeOnFiles([filePath]);
processor.onInfo(`ESLinted ${params.filename}`);
result.results.forEach((subresult) => {
subresult.messages.forEach((message) => {
const messageText = `${params.filename}:${message.line},${message.column} (${message.ruleId}) ${message.message}`;
if (message.fatal || message.severity === errorSeverity) {
processor.onError(messageText);
} else {
processor.onWarn(messageText);
}
});
});
processor.done();
}
});

@ -0,0 +1,30 @@
"use strict";
import { CLIEngine } from "eslint";
import { join } from "path";
import settings from "../../settings";
const cli = new CLIEngine({ configFile: settings.eslintBrowserConfig });
const errorSeverity = 2;
export default ((params, processor) => () => {
const filePath = join(processor.context.exported, params.filename);
const result = cli.executeOnFiles([filePath]);
processor.onInfo(`ESLinted ${params.filename}`);
result.results.forEach((subresult) => {
subresult.messages.forEach((message) => {
const messageText = `${params.filename}:${message.line},${message.column} (${message.ruleId}) ${message.message}`;
if (message.fatal || message.severity === errorSeverity) {
processor.onError(messageText);
} else {
processor.onWarn(messageText);
}
});
});
processor.done();
}) as Task;

@ -1,38 +0,0 @@
"use strict";
const glob = require("glob");
const flagDoneName = "eslintbrowserallDone";
module.exports = (params, processor) => ({
"process": () => {
if (processor.context.containsFlag(flagDoneName)) {
processor.onWarn("eslintbrowserall task is executed more than once; this is probably a bug in your mbs.json");
}
processor.context.addFlag(flagDoneName);
const excludeFiles = params.excludeFiles || [];
glob("**/*.js", {
"cwd": processor.context.exported,
"dot": true
}, (err, files) => {
if (err) {
processor.onError(err);
return processor.done();
}
return processor.processTask({
"params": {
"tasks": files.filter((file) => !excludeFiles.includes(file)).map((file) => ({
"name": file,
"params": { "filename": file },
"type": "eslintbrowser"
}))
},
"type": (params.preventParallelTests && "sequential") || "parallel"
}, processor.done.bind(processor));
});
}
});

@ -0,0 +1,36 @@
"use strict";
import * as glob from "glob";
const flagDoneName = "eslintbrowserallDone";
export default ((params, processor) => () => {
if (processor.context.containsFlag(flagDoneName)) {
processor.onWarn("eslintbrowserall task is executed more than once; this is probably a bug in your mbs.json");
}
processor.context.addFlag(flagDoneName);
const excludeFiles = params.excludeFiles || [];
glob("**/*.js", {
cwd: processor.context.exported,
dot: true,
}, (err, files) => {
if (err) {
processor.onError(err);
return processor.done();
}
return processor.processTask({
params: {
tasks: files.filter((file) => !excludeFiles.includes(file)).map((file) => ({
name: file,
params: { filename: file },
type: "eslintbrowser",
})),
},
type: (params.preventParallelTests && "sequential") || "parallel",
}, processor.done);
});
}) as Task;

@ -1,13 +0,0 @@
"use strict";
// Code taken from http://stackoverflow.com/a/17204293
// eslint-disable-next-line no-sync
require("fs").readdirSync(__dirname)
.forEach((file) => {
if (file.match(/\.js$/) !== null && file !== "index.js") {
const name = file.replace(".js", "");
// eslint-disable-next-line global-require
exports[name] = require(`./${file}`);
}
});

@ -0,0 +1,16 @@
"use strict";
import { readdirSync } from "fs";
const tasks = {};
// Code taken from http://stackoverflow.com/a/17204293
readdirSync(__dirname)
.forEach((file) => {
if (file.match(/\.ts$/) !== null && file !== "index.ts") {
const name = file.replace(".ts", "");
tasks[name] = require(`./${file}`).default;
}
});
export default tasks as Tasks;

@ -1,3 +0,0 @@
"use strict";
module.exports = (params, processor) => ({ "process": () => processor.done() });

@ -0,0 +1,3 @@
"use strict";
export default ((params, processor) => processor.done()) as Task;

@ -1,28 +0,0 @@
"use strict";
const sequential = require("./sequential");
module.exports = (params, processor) => sequential({
"tasks": [
{
"params": { "excludeFiles": params.eslintExcludeFiles },
"type": "eslintbrowserall"
},
{ "type": "uglifyjsall" },
{ "type": "cssnanoall" },
{
"params": {
"data": processor.context.versionInfo,
"filename": "version.txt"
},
"type": "writefile"
},
{
"params": {
"archive": `${processor.context.reponame}.zip`,
"directory": ""
},
"type": "zip"
}
]
}, processor);

@ -0,0 +1,28 @@
"use strict";
import sequential from "./sequential";
export default ((params, processor) => sequential({
tasks: [
{
params: { excludeFiles: params.eslintExcludeFiles },
type: "eslintbrowserall",
},
{ type: "uglifyjsall" },
{ type: "cssnanoall" },
{
params: {
data: processor.context.versionInfo,
filename: "version.txt",
},
type: "writefile",
},
{
params: {
archive: `${processor.context.reponame}.zip`,
directory: "",
},
type: "zip",
},
],
}, processor)) as Task;

@ -1,7 +0,0 @@
"use strict";
const async = require("async");
const mapper = (processor) => (task) => (callback) => processor.processTask(task, callback);
module.exports = (params, processor) => ({ "process": () => async.parallel(params.tasks.map(mapper(processor)), () => processor.done()) });

@ -0,0 +1,7 @@
"use strict";
import { parallel } from "async";
const mapper = (processor) => (task) => (callback) => processor.processTask(task, callback);
export default ((params, processor) => () => parallel(params.tasks.map(mapper(processor)), processor.done)) as Task;

@ -1,7 +0,0 @@
"use strict";
const async = require("async");
const mapper = (processor) => (task) => (callback) => processor.processTask(task, callback);
module.exports = (params, processor) => ({ "process": () => async.series(params.tasks.map(mapper(processor)), () => processor.done()) });

@ -0,0 +1,7 @@
"use strict";
import { series } from "async";
const mapper = (processor) => (task) => (callback) => processor.processTask(task, callback);
export default ((params, processor) => () => series(params.tasks.map(mapper(processor)), processor.done)) as Task;

@ -1,23 +0,0 @@
"use strict";
const fs = require("fs");
const path = require("path");
const UglifyJS = require("uglify-js");
module.exports = (params, processor) => ({
"process": () => {
const filePath = path.normalize(path.join(processor.context.exported, params.filename));
const result = UglifyJS.minify(filePath);
fs.writeFile(filePath, result.code, (err) => {
if (err) {
processor.onError(`Unable to write uglified script for ${params.filename}: ${err}`);
} else {
processor.onInfo(`Saved uglified script for ${params.filename}; uglified length: ${result.code.length}`);
}
processor.done();
});
}
});

@ -0,0 +1,20 @@
"use strict";
import { writeFile } from "fs";
import { join, normalize } from "path";
import { minify } from "uglify-js";
export default ((params, processor) => () => {
const filePath = normalize(join(processor.context.exported, params.filename));
const result = minify(filePath);
writeFile(filePath, result.code, (err) => {
if (err) {
processor.onError(`Unable to write uglified script for ${params.filename}: ${err}`);
} else {
processor.onInfo(`Saved uglified script for ${params.filename}; uglified length: ${result.code.length}`);
}
processor.done();
});
}) as Task;

@ -1,37 +0,0 @@
"use strict";
const glob = require("glob");
const doneFlagName = "uglifyjsallDone";
module.exports = (params, processor) => ({
"process": () => {
if (processor.context.containsFlag(doneFlagName)) {
processor.onWarn("dotnetnunitall task is executed more than once; this is probably a bug in your mbs.json");
}
processor.context.addFlag(doneFlagName);
glob("**/*.js", {
"cwd": processor.context.exported,
"dot": true
}, (err, files) => {
if (err) {
processor.onError(err);
return processor.done();
}
return processor.processTask({
"params": {
"tasks": files.map((file) => ({
"name": file,
"params": { "filename": file },
"type": "uglifyjs"
}))
},
"type": (params.preventParallelTests && "sequential") || "parallel"
}, processor.done.bind(processor));
});
}
});

@ -0,0 +1,35 @@
"use strict";
import * as glob from "glob";
const doneFlagName = "uglifyjsallDone";
export default ((params, processor) => () => {
if (processor.context.containsFlag(doneFlagName)) {
processor.onWarn("dotnetnunitall task is executed more than once; this is probably a bug in your mbs.json");
}
processor.context.addFlag(doneFlagName);
glob("**/*.js", {
cwd: processor.context.exported,
dot: true,
}, (err, files) => {
if (err) {
processor.onError(err);
return processor.done();
}
return processor.processTask({
params: {
tasks: files.map((file) => ({
name: file,
params: { filename: file },
type: "uglifyjs",
})),
},
type: (params.preventParallelTests && "sequential") || "parallel",
}, processor.done);
});
}) as Task;

@ -1,22 +0,0 @@
"use strict";
const fs = require("fs");
const path = require("path");
module.exports = (params, processor) => ({
"process": () => {
const filePath = path.join(processor.context.exported, params.filename);
processor.onInfo(`Writing to ${filePath}`);
fs.writeFile(filePath, params.data, (err) => {
if (err) {
processor.onError(`Unable to write file: ${err}`);
} else {
processor.onInfo("Written file");
}
return processor.done();
});
}
});

@ -0,0 +1,20 @@
"use strict";
import { writeFile } from "fs";
import { join } from "path";
export default ((params, processor) => () => {
const filePath = join(processor.context.exported, params.filename);
processor.onInfo(`Writing to ${filePath}`);
writeFile(filePath, params.data, (err) => {
if (err) {
processor.onError(`Unable to write file: ${err}`);
} else {
processor.onInfo("Written file");
}
return processor.done();
});
}) as Task;

@ -1,24 +0,0 @@
"use strict";
const fs = require("fs");
const path = require("path");
const Archiver = require("archiver");
module.exports = (params, processor) => ({
"process": () => {
const sourceDirectoryPath = path.normalize(path.join(processor.context.exported, String(params.directory || "")));
const targetArchivePath = path.normalize(path.join(processor.context.release, params.archive));
processor.onInfo(`Compressing "${params.directory}" to "${params.archive}"`);
const output = fs.createWriteStream(targetArchivePath);
const archive = new Archiver("zip");
output.on("close", () => processor.done());
archive.on("error", (err) => processor.onError(`Error while compressing: ${err}`));
archive.pipe(output);
archive.directory(sourceDirectoryPath, false);
archive.finalize();
}
});

@ -0,0 +1,22 @@
"use strict";
import { create as createArchiver } from "archiver";
import { createWriteStream } from "fs";
import { join, normalize } from "path";
export default ((params, processor) => () => {
const sourceDirectoryPath = normalize(join(processor.context.exported, String(params.directory || "")));
const targetArchivePath = normalize(join(processor.context.release, params.archive));
processor.onInfo(`Compressing "${params.directory}" to "${params.archive}"`);
const output = createWriteStream(targetArchivePath);
const archive = createArchiver("zip");
output.on("close", processor.done);
archive.on("error", (err) => processor.onError(`Error while compressing: ${err}`));
archive.pipe(output);
archive.directory(sourceDirectoryPath, false);
archive.finalize();
}) as Task;

@ -4,7 +4,9 @@
"private": true,
"scripts": {
"start": "forever -c node app.js",
"test": "./node_modules/.bin/eslint ."
"build": "./node_modules/.bin/tsc -p . --noEmitOnError",
"pretest": "./node_modules/.bin/tsc -p . --noEmit",
"test": "./node_modules/.bin/tslint --config tslint.json --project tsconfig.json --type-check"
},
"dependencies": {
"archiver": "^1.3.0",
@ -15,15 +17,15 @@
"eslint": "^3.12.2",
"express": "4.14.0",
"fs-extra": "^1.0.0",
"github": "~7.1.0",
"github": "~9.0.0",
"glob": "~7.1.1",
"graceful-fs": "^4.1.11",
"jade": "*",
"json-parse-safe": "^1.0.3",
"method-override": "^2.3.7",
"morgan": "^1.7.0",
"mustache": "~2.3.0",
"nodegit": "~0.16.0",
"nodemailer": "~2.7.0",
"recursive-tree-copy": "0.0.1",
"serve-favicon": "^2.3.2",
"serve-static": "^1.11.1",
@ -31,106 +33,26 @@
"uglify-js": "^2.7.5",
"underscore": "^1.8.3"
},
"eslintConfig": {
"env": {
"node": true
},
"parserOptions": {
"ecmaVersion": 6
},
"plugins": [
"fp"
],
"extends": [
"eslint:all",
"plugin:fp/recommended"
],
"rules": {
"fp/no-unused-expression": "off",
"fp/no-nil": "off",
"fp/no-mutation": [
"error",
{
"commonjs": true
}
],
"prefer-destructuring": "off",
"quotes": [
"warn",
"double"
],
"require-jsdoc": "off",
"func-names": [
"warn",
"never"
],
"max-len": [
"warn",
{
"code": 140
}
],
"operator-linebreak": [
"warn",
"before"
],
"padded-blocks": [
"warn",
"never"
],
"dot-notation": [
"warn",
{
"allowPattern": "^[a-z]+(_[a-z]+)+$"
}
],
"linebreak-style": [
"warn",
"windows"
],
"no-console": "off",
"dot-location": [
"warn",
"property"
],
"object-curly-spacing": [
"warn",
"always"
],
"one-var": [
"warn",
{
"initialized": "never"
}
],
"no-magic-numbers": [
"warn",
{
"ignore": [
0,
1
]
}
],
"id-length": [
"warn",
{
"exceptions": [
"_"
]
}
],
"no-extra-parens": [
"warn",
"all",
{
"nestedBinaryExpressions": false
}
]
}
},
"devDependencies": {
"eslint": "^3.15.0",
"eslint-plugin-fp": "^2.3.0"
"@types/archiver": "^0.15.37",
"@types/async": "^2.0.38",
"@types/body-parser": "0.0.34",
"@types/errorhandler": "0.0.30",
"@types/express": "^4.0.35",
"@types/fs-extra": "0.0.37",
"@types/github": "0.0.0",
"@types/glob": "^5.0.30",
"@types/jade": "0.0.30",
"@types/method-override": "0.0.29",
"@types/morgan": "^1.7.32",
"@types/mustache": "^0.8.29",
"@types/node": "^7.0.5",
"@types/serve-favicon": "^2.2.28",
"@types/serve-static": "^1.7.31",
"@types/uglify-js": "^2.6.28",
"@types/underscore": "^1.7.36",
"tslint": "^4.4.2",
"tslint-eslint-rules": "^3.4.0",
"typescript": "^2.2.1"
}
}

@ -1,16 +0,0 @@
"use strict";
module.exports = (req, res) => {
const options = {
"branch": `/refs/heads/${req.params.branch}`,
"branchName": req.params.branch,
"file": req.params[0],
"owner": req.params.owner,
"reponame": req.params.reponame,
"rev": req.params.rev
};
const pathParts = [req.app.get("releasepath"), options.owner, options.reponame, options.branch, options.rev, options.file];
res.sendfile(pathParts.join("/"));
};

@ -0,0 +1,16 @@
"use strict";
export default (req, res) => {
const options = {
branch: `/refs/heads/${req.params.branch}`,
branchName: req.params.branch,
file: req.params[0],
owner: req.params.owner,
reponame: req.params.reponame,
rev: req.params.rev,
};
const pathParts = [req.app.get("releasepath"), options.owner, options.reponame, options.branch, options.rev, options.file];
res.sendfile(pathParts.join("/"));
};

@ -1,9 +0,0 @@
"use strict";
exports.index = (req, res) => res.render("index", { "title": `Express<br/>\r\n${req}` });
exports.postreceive = require("./postreceive");
exports.manual = require("./manual");
exports.status = require("./status");
exports.artifact = require("./artifact");
exports.release = require("./release");

@ -0,0 +1,11 @@
"use strict";
import artifact from "./artifact";
import * as manual from "./manual";
import postreceive from "./postreceive";
import release from "./release";
import * as status from "./status";
const index = (req, res) => res.render("index", { title: `Express<br/>\r\n${req}` });
export { index, postreceive, manual, status, artifact, release };

@ -1,22 +0,0 @@
"use strict";
const _ = require("underscore");
const builder = require("../lib/builder");
exports.get = (req, res) => res.render("manual");
exports.post = (req, res) => {
const options = _.extend(req.body, {
"app": req.app,
"url": `https://pos-github.payonline.ru/${req.body.owner}/${req.body.reponame}`
});
builder.build(options, (err, result) => {
console.log("Done processing manual request");
console.log(`Error: ${err}`);
res.render("manual-done", {
err,
result
});
});
};

@ -0,0 +1,22 @@
"use strict";
import * as _ from "underscore";
import { build } from "../lib/builder";
export const get = (req, res) => res.render("manual");
export const post = (req, res) => {
const options = _.extend(req.body, {
app: req.app,
url: `https://pos-github.payonline.ru/${req.body.owner}/${req.body.reponame}`,
});
build(options, (err, result) => {
console.log("Done processing manual request");
console.log(`Error: ${err}`);
res.render("manual-done", {
err,
result,
});
});
};

@ -1,24 +1,25 @@
"use strict";
const builder = require("../lib/builder");
const commenter = require("../lib/commenter");
import * as JSONParse from "json-parse-safe";
import { build } from "../lib/builder";
import { commentOnPullRequest } from "../lib/commenter";
const getBranchDescription = (options) => `${options.owner}/${options.reponame}:${options.branchname || options.branch}`;
const processPush = (req, res, payload) => {
const repository = payload.repository;
const options = {
"app": req.app,
"branch": payload.ref,
"owner": repository.owner.name,
"reponame": repository.name,
"rev": payload.after,
"url": repository.url
app: req.app,
branch: payload.ref,
owner: repository.owner.name,
reponame: repository.name,
rev: payload.after,
url: repository.url,
};
console.log(`Got push event for ${getBranchDescription(options)}`);
builder.build(options, (err, result) => {
build(options, (err, result) => {
console.log("Done processing request from GitHub");
console.log(`Error: ${err}`);
res.send(`Done processing request from GitHub\r\nError: ${err}\r\nResult: ${result}`);
@ -27,38 +28,38 @@ const processPush = (req, res, payload) => {
const processPullRequest = (req, res, payload) => {
const action = payload.action;
const number = payload.number;
const pullRequestNumber = payload.number;
const pullRequest = payload.pull_request;
const head = pullRequest.head;
const headRepo = head.repo;
const headRepoOptions = {
"branch": `refs/heads/${head.ref}`,
"branchname": head.ref,
"owner": headRepo.owner.name || headRepo.owner.login,
"reponame": headRepo.name,
"rev": head.sha,
"url": headRepo.url
branch: `refs/heads/${head.ref}`,
branchname: head.ref,
owner: headRepo.owner.name || headRepo.owner.login,
reponame: headRepo.name,
rev: head.sha,
url: headRepo.url,
};
const base = pullRequest.base;
const baseRepo = base.repo;
const baseRepoOptions = {
"branchname": base.ref,
"owner": baseRepo.owner.name || baseRepo.owner.login,
"reponame": baseRepo.name
branchname: base.ref,
owner: baseRepo.owner.name || baseRepo.owner.login,
reponame: baseRepo.name,
};
const options = {
action,
"app": req.app,
app: req.app,
baseRepoOptions,
headRepoOptions,
number
pullRequestNumber,
};
const masterOptions = {
action,
"app": req.app,
app: req.app,
baseRepoOptions,
"headRepoOptions": baseRepoOptions,
number
headRepoOptions: baseRepoOptions,
pullRequestNumber,
};
console.log(`Got pull request ${action} event, `
@ -78,25 +79,30 @@ const processPullRequest = (req, res, payload) => {
return res.send("");
}
return commenter.commentOnPullRequest(
(action === "closed" && masterOptions) || options,
(err, data) => {
if (err) {
console.log(`Unable to post comment: ${err}`);
}
res.send(err || data);
return commentOnPullRequest((action === "closed" && masterOptions) || options, (err, data) => {
if (err) {
console.log(`Unable to post comment: ${err}`);
}
);
res.send(err || data);
});
};
const getPayload = (body) => {
if (!body.payload) {
return body;
}
return JSONParse(body.payload).value;
};
module.exports = (req, res) => {
export default (req, res) => {
if (!req.body || (!req.body.payload && !req.body.repository)) {
return res.end();
}
const eventType = req.header("x-github-event");
const payload = (req.body.payload && JSON.parse(req.body.payload)) || req.body;
const payload = getPayload(req.body);
if (eventType === "push") {
return processPush(req, res, payload);

@ -1,9 +1,9 @@
"use strict";
const path = require("path");
const Archiver = require("archiver");
import { create as createArchiver } from "archiver";
import { join } from "path";
const reportProcessor = require("../lib/report-processor");
import { readReport } from "../lib/report-processor";
const getDatePart = (report) => {
if (!report.date) {
@ -23,23 +23,23 @@ const getDatePart = (report) => {
return `${year}.${month}.${day}.${hours}.${minutes}.${seconds}`;
};
module.exports = (req, res, next) => {
export default (req, res, next) => {
const options = {
"branch": `/refs/heads/${req.params.branch}`,
"branchName": req.params.branch,
"owner": req.params.owner,
"reponame": req.params.reponame,
"rev": req.params.rev
branch: `/refs/heads/${req.params.branch}`,
branchName: req.params.branch,
owner: req.params.owner,
reponame: req.params.reponame,
rev: req.params.rev,
};
const releasePath = path.join(req.app.get("releasepath"), options.owner, options.reponame, options.branch, options.rev);
const releasePath = join(req.app.get("releasepath"), options.owner, options.reponame, options.branch, options.rev);
reportProcessor.readReport(releasePath, (err, report) => {
readReport(releasePath, (err, report) => {
if (err) {
return next(err);
}
const archive = new Archiver("zip");
const archive = createArchiver("zip");
archive.on("error", next);
res.attachment(`${options.reponame}.${getDatePart(report)}.${options.rev}.zip`, ".");

@ -1,8 +1,9 @@
"use strict";
const url = require("url");
const _ = require("underscore");
const statusProcessor = require("../lib/status-processor");
import * as _ from "underscore";
import { parse } from "url";
import { getReport } from "../lib/status-processor";
const parseOptionsFromReferer = (path, callback) => {
const pathParts = path.split("/").filter((value) => value);
@ -19,7 +20,7 @@ const parseOptionsFromReferer = (path, callback) => {
branchName,
owner,
reponame,
rev
rev,
});
}
@ -29,7 +30,7 @@ const parseOptionsFromReferer = (path, callback) => {
branchName,
owner,
reponame,
rev
rev,
});
};
@ -39,27 +40,27 @@ const createShowReport = (res) => (err, inputOptions) => {
res.render("status", options);
};
exports.image = (req, res) => {
export const image = (req, res) => {
const getAdditionalOptions = (err, options) => {
if (err === "ReportFileNotFound") {
return { "status": "Building" };
return { status: "Building" };
}
if (err) {
return {
"message": err,
"status": "StatusError"
message: err,
status: "StatusError",
};
}
if (options.report.result === "MBSNotFound") {
return { "status": "MBSNotUsed" };
return { status: "MBSNotUsed" };
}
if (options.report.err) {
return {
"message": options.report.err,
"status": "Error"
message: options.report.err,
status: "Error",
};
}
@ -67,8 +68,8 @@ exports.image = (req, res) => {
const [firstWarn] = options.report.result.warns.$allMessages;
return {
"message": firstWarn.message,
"status": "Warning"
message: firstWarn.message,
status: "Warning",
};
}
@ -76,12 +77,12 @@ exports.image = (req, res) => {
if (allInfos.length) {
return {
"message": allInfos[allInfos.length - 1].message,
"status": "OK"
message: allInfos[allInfos.length - 1].message,
status: "OK",
};
}
return { "status": "OK" };
return { status: "OK" };
};
const handle = (err, options) => {
@ -89,31 +90,31 @@ exports.image = (req, res) => {
res.render("status-image", _.extend(options, getAdditionalOptions(err, options)));
};
parseOptionsFromReferer(url.parse(req.headers.referer || "").pathname || "", (err, options) => {
parseOptionsFromReferer(parse(req.headers.referer || "").pathname || "", (err, options) => {
if (err) {
return handle(err, options);
}
return statusProcessor.getReport(req.app, options, handle);
return getReport(req.app, options, handle);
});
};
exports.page = (req, res) => {
export const page = (req, res) => {
const options = {
"branch": `/refs/heads/${req.params.branch}`,
"branchName": req.params.branch,
"owner": req.params.owner,
"reponame": req.params.reponame,
"rev": req.params.rev
branch: `/refs/heads/${req.params.branch}`,
branchName: req.params.branch,
owner: req.params.owner,
reponame: req.params.reponame,
rev: req.params.rev,
};
statusProcessor.getReport(req.app, options, createShowReport(res));
getReport(req.app, options, createShowReport(res));
};
exports.pageFromGithub = (req, res) => parseOptionsFromReferer(req.params[0], (err, options) => {
export const pageFromGithub = (req, res) => parseOptionsFromReferer(req.params[0], (err, options) => {
if (err) {
return createShowReport(err, options);
return createShowReport(res)(err, options);
}
return statusProcessor.getReport(req.app, options, createShowReport(res));
return getReport(req.app, options, createShowReport(res));
});

@ -1,6 +1,6 @@
"use strict";
const GitHubApi = require("github");
import GitHubApi = require("github");
const createGithub = () => new GitHubApi({
"debug": false,
@ -8,12 +8,13 @@ const createGithub = () => new GitHubApi({
"host": "pos-github.payonline.ru",
"pathPrefix": "/api/v3",
"protocol": "https",
"timeout": 5000,
"version": "3.0.0"
"timeout": 5000
});
module.exports = {
export default {
"builderExecutable": "../DotNetBuilder/bin/Debug/MicroBuildServer.DotNetBuilder.exe",
"codeSigningKeyFile": null,
"codeSigningPublicKey": null,
"createGithub": (repoOwner) => {
const github = createGithub();
@ -36,6 +37,7 @@ module.exports = {
"eslintBrowserConfig": "settings-eslint-browser.json",
"gitpath": "M:/g",
"ignoreCodeAnalysisByDefault": true,
"isCodeAnalysisUnsupported": false,
"nugetApiKey": "*** NUGET API KEY ***",
"nugetHost": "https://*** NUGET HOST ***/",
"port": 3000,

@ -0,0 +1,19 @@
{
"compilerOptions": {
"module": "commonjs",
"target": "es6",
"sourceMap": false,
"strictNullChecks": true,
"typeRoots": [
"node_modules/@types"
]
},
"include": [
"*.ts",
"lib/**/*.ts",
"routes/**/*.ts"
],
"exclude": [
"node_modules/**/*.ts"
]
}

@ -0,0 +1,10 @@
{
"extends": [
"tslint:latest",
"tslint-eslint-rules"
],
"rules": {
"no-console": false,
"max-line-length": false
}
}

@ -0,0 +1,5 @@
{
"dependencies": {
"debug": "registry:npm/debug#2.0.0+20160723033700"
}
}
Loading…
Cancel
Save