Switch to ES6 modules

dependabot/npm_and_yarn/BuildServer/eslint-7.2.0
Inga 🏳‍🌈 8 years ago
parent c9dfd2b5b9
commit 48de21871c
  1. 38
      BuildServer/app.ts
  2. 48
      BuildServer/lib/builder.ts
  3. 8
      BuildServer/lib/commenter.ts
  4. 16
      BuildServer/lib/git/copy.ts
  5. 26
      BuildServer/lib/git/loader.ts
  6. 42
      BuildServer/lib/report-processor.ts
  7. 22
      BuildServer/lib/status-processor.ts
  8. 2
      BuildServer/lib/task-processor.ts
  9. 4
      BuildServer/lib/tasks/cleanupafterdotnetbuild.ts
  10. 2
      BuildServer/lib/tasks/conditional.ts
  11. 12
      BuildServer/lib/tasks/copy.ts
  12. 4
      BuildServer/lib/tasks/copyglob.ts
  13. 16
      BuildServer/lib/tasks/cssnano.ts
  14. 4
      BuildServer/lib/tasks/cssnanoall.ts
  15. 10
      BuildServer/lib/tasks/deletefromcode.ts
  16. 4
      BuildServer/lib/tasks/dotnetbuild.ts
  17. 4
      BuildServer/lib/tasks/dotnetbuildandtest.ts
  18. 10
      BuildServer/lib/tasks/dotnetbuilderwrapper.ts
  19. 4
      BuildServer/lib/tasks/dotnetbuildwithoutcleanup.ts
  20. 14
      BuildServer/lib/tasks/dotnetcheckstyle.ts
  21. 12
      BuildServer/lib/tasks/dotnetcompile.ts
  22. 6
      BuildServer/lib/tasks/dotnetnugetpack.ts
  23. 4
      BuildServer/lib/tasks/dotnetnugetprocess.ts
  24. 8
      BuildServer/lib/tasks/dotnetnugetprocessinternal.ts
  25. 6
      BuildServer/lib/tasks/dotnetnugetpush.ts
  26. 10
      BuildServer/lib/tasks/dotnetnugetpushonly.ts
  27. 8
      BuildServer/lib/tasks/dotnetnugetrestore.ts
  28. 8
      BuildServer/lib/tasks/dotnetnunit.ts
  29. 4
      BuildServer/lib/tasks/dotnetnunitall.ts
  30. 22
      BuildServer/lib/tasks/dotnetpackwebapp.ts
  31. 18
      BuildServer/lib/tasks/dotnetrewrite.ts
  32. 2
      BuildServer/lib/tasks/echo.ts
  33. 8
      BuildServer/lib/tasks/eslintbrowser.ts
  34. 4
      BuildServer/lib/tasks/eslintbrowserall.ts
  35. 2
      BuildServer/lib/tasks/index.ts
  36. 2
      BuildServer/lib/tasks/noop.ts
  37. 4
      BuildServer/lib/tasks/packform.ts
  38. 4
      BuildServer/lib/tasks/parallel.ts
  39. 4
      BuildServer/lib/tasks/sequential.ts
  40. 14
      BuildServer/lib/tasks/uglifyjs.ts
  41. 4
      BuildServer/lib/tasks/uglifyjsall.ts
  42. 10
      BuildServer/lib/tasks/writefile.ts
  43. 14
      BuildServer/lib/tasks/zip.ts
  44. 2
      BuildServer/routes/artifact.ts
  45. 10
      BuildServer/routes/index.ts
  46. 6
      BuildServer/routes/manual.ts
  47. 10
      BuildServer/routes/postreceive.ts
  48. 12
      BuildServer/routes/release.ts
  49. 14
      BuildServer/routes/status.ts
  50. 7
      BuildServer/settings.ts.example

@ -1,37 +1,37 @@
"use strict"; "use strict";
import realFs = require("fs"); import * as realFs from "fs";
import fs = require("graceful-fs"); import * as fs from "graceful-fs";
fs.gracefulify(realFs); fs.gracefulify(realFs);
import express = require("express"); import * as express from "express";
import routes = require("./routes"); import * as routes from "./routes";
import http = require("http"); import { createServer } from "http";
import path = require("path"); import { join } from "path";
import serveFavicon = require("serve-favicon"); import * as serveFavicon from "serve-favicon";
import morgan = require("morgan"); import * as morgan from "morgan";
import bodyParser = require("body-parser"); import { json as bodyJson, urlencoded as bodyUrlencoded } from "body-parser";
import methodOverride = require("method-override"); import * as methodOverride from "method-override";
import serveStatic = require("serve-static"); import * as serveStatic from "serve-static";
import errorhandler = require("errorhandler"); import * as errorhandler from "errorhandler";
import settings = require("./settings"); import settings from "./settings";
const app = express(); const app = express();
app.set("port", settings.port); // eslint-disable-line no-process-env app.set("port", settings.port); // eslint-disable-line no-process-env
app.set("views", path.join(__dirname, "views")); app.set("views", join(__dirname, "views"));
app.set("view engine", "jade"); app.set("view engine", "jade");
app.set("gitpath", settings.gitpath); app.set("gitpath", settings.gitpath);
app.set("tmpcodepath", settings.tmpcodepath); app.set("tmpcodepath", settings.tmpcodepath);
app.set("releasepath", settings.releasepath); app.set("releasepath", settings.releasepath);
app.use(serveFavicon(path.join(__dirname, "public/images/favicon.png"))); app.use(serveFavicon(join(__dirname, "public/images/favicon.png")));
app.use(morgan("dev")); app.use(morgan("dev"));
app.use(bodyParser.json({ "limit": "10mb" })); app.use(bodyJson({ "limit": "10mb" }));
app.use(bodyParser.urlencoded({ "extended": false })); app.use(bodyUrlencoded({ "extended": false }));
app.use(methodOverride()); app.use(methodOverride());
app.use(serveStatic(path.join(__dirname, "public"))); app.use(serveStatic(join(__dirname, "public")));
if (app.get("env") === "development") { if (app.get("env") === "development") {
app.use(errorhandler()); app.use(errorhandler());
@ -52,4 +52,4 @@ app.route("/status.svg").get(routes.status.image);
app.route("/release/:owner/:reponame/:branch/:rev").get(routes.release); app.route("/release/:owner/:reponame/:branch/:rev").get(routes.release);
app.route("/artifact/:owner/:reponame/:branch/:rev/*").get(routes.artifact); app.route("/artifact/:owner/:reponame/:branch/:rev/*").get(routes.artifact);
http.createServer(app).listen(app.get("port"), () => console.log(`Express server listening on port ${app.get("port")}`)); createServer(app).listen(app.get("port"), () => console.log(`Express server listening on port ${app.get("port")}`));

@ -1,14 +1,14 @@
"use strict"; "use strict";
import path = require("path"); import { join } from "path";
import fs = require("fs"); import { exists, readFile, writeFileSync } from "fs";
import fse = require("fs-extra"); import { mkdirsSync, remove } from "fs-extra";
import async = require("async"); import { parallel, queue } from "async";
import gitLoader = require("./git/loader"); import { gitLoader } from "./git/loader";
import processor = require("./task-processor"); import { processTask } from "./task-processor";
import reportProcessor = require("./report-processor"); import { writeReport } from "./report-processor";
import mailSender = require("./mail-sender"); import { send as sendMail } from "./mail-sender";
import settings = require("../settings"); import settings from "../settings";
const codePostfix = ""; const codePostfix = "";
const mailLazinessLevel = 1000; const mailLazinessLevel = 1000;
@ -77,11 +77,11 @@ export const build = (options, buildCallback) => {
const rev = options.rev; const rev = options.rev;
const branch = options.branch; const branch = options.branch;
const skipGitLoader = options.skipGitLoader; const skipGitLoader = options.skipGitLoader;
const local = path.join(options.app.get("gitpath"), "r"); const local = join(options.app.get("gitpath"), "r");
const tmp = path.join(options.app.get("tmpcodepath"), rev.substr(0, maxTmpcodepathLength)); const tmp = join(options.app.get("tmpcodepath"), rev.substr(0, maxTmpcodepathLength));
const exported = tmp + codePostfix; const exported = tmp + codePostfix;
const release = path.join(options.app.get("releasepath"), owner, reponame, branch, rev); const release = join(options.app.get("releasepath"), owner, reponame, branch, rev);
const statusQueue = async.queue((task, queueCallback) => task(queueCallback), 1); const statusQueue = queue((task, queueCallback) => task(queueCallback), 1);
const actualGitLoader = wrapGitLoader(skipGitLoader); const actualGitLoader = wrapGitLoader(skipGitLoader);
const date = new Date(); const date = new Date();
const versionMajor = date.getFullYear(); const versionMajor = date.getFullYear();
@ -99,11 +99,11 @@ export const build = (options, buildCallback) => {
"state": "pending" "state": "pending"
}, queueCallback)); }, queueCallback));
fse.mkdirsSync(release); mkdirsSync(release);
fs.writeFileSync(path.join(options.app.get("releasepath"), owner, reponame, branch, "latest.id"), rev); writeFileSync(join(options.app.get("releasepath"), owner, reponame, branch, "latest.id"), rev);
fse.mkdirsSync(path.join(options.app.get("releasepath"), owner, reponame, "$revs")); mkdirsSync(join(options.app.get("releasepath"), owner, reponame, "$revs"));
fs.writeFileSync(path.join(options.app.get("releasepath"), owner, reponame, "$revs", `${rev}.branch`), branch); writeFileSync(join(options.app.get("releasepath"), owner, reponame, "$revs", `${rev}.branch`), branch);
const createErrorMessageForMail = (doneErr) => { const createErrorMessageForMail = (doneErr) => {
if (!doneErr) { if (!doneErr) {
@ -129,8 +129,8 @@ export const build = (options, buildCallback) => {
const warnMessage = (allWarns[0] || {}).message; const warnMessage = (allWarns[0] || {}).message;
const infoMessage = (allInfos[allInfos.length - 1] || {}).message; const infoMessage = (allInfos[allInfos.length - 1] || {}).message;
reportProcessor.writeReport(release, doneErr, result, (writeErr) => { writeReport(release, doneErr, result, (writeErr) => {
statusQueue.push((queueCallback) => async.parallel([ statusQueue.push((queueCallback) => parallel([
(parallelCallback) => notifyStatus({ (parallelCallback) => notifyStatus({
"description": errorMessage || warnMessage || infoMessage || "Success", "description": errorMessage || warnMessage || infoMessage || "Success",
"hash": rev, "hash": rev,
@ -138,7 +138,7 @@ export const build = (options, buildCallback) => {
reponame, reponame,
"state": createFinalState(!doneErr) "state": createFinalState(!doneErr)
}, parallelCallback), }, parallelCallback),
(parallelCallback) => mailSender.send({ (parallelCallback) => sendMail({
"from": settings.smtp.sender, "from": settings.smtp.sender,
"headers": { "X-Laziness-level": mailLazinessLevel }, "headers": { "X-Laziness-level": mailLazinessLevel },
"subject": createBuildDoneMessage(doneErr, `${owner}/${reponame}/${branch}`), "subject": createBuildDoneMessage(doneErr, `${owner}/${reponame}/${branch}`),
@ -150,7 +150,7 @@ export const build = (options, buildCallback) => {
return process.nextTick(parallelCallback); return process.nextTick(parallelCallback);
} }
return fse.remove(tmp, parallelCallback); return remove(tmp, parallelCallback);
} }
], queueCallback)); ], queueCallback));
@ -177,12 +177,12 @@ export const build = (options, buildCallback) => {
console.log("Done loading from git"); console.log("Done loading from git");
return fs.exists(path.join(exported, "mbs.json"), (exists) => { return exists(join(exported, "mbs.json"), (exists) => {
if (!exists) { if (!exists) {
return done(null, "MBSNotFound"); return done(null, "MBSNotFound");
} }
return fs.readFile(path.join(exported, "mbs.json"), (readErr, data) => { return readFile(join(exported, "mbs.json"), (readErr, data) => {
if (readErr) { if (readErr) {
return done(readErr, "MBSUnableToRead"); return done(readErr, "MBSUnableToRead");
} }
@ -195,7 +195,7 @@ export const build = (options, buildCallback) => {
return done(err, "MBSMalformed"); return done(err, "MBSMalformed");
} }
return processor.processTask(parsed, { return processTask(parsed, {
branch, branch,
exported, exported,
owner, owner,

@ -1,8 +1,8 @@
"use strict"; "use strict";
import _ = require("underscore"); import * as _ from "underscore";
import reportProcessor = require("./report-processor"); import { getStatusMessageFromRelease } from "./report-processor";
import settings = require("../settings"); import settings from "../settings";
const featureNamePattern = /^feature-(\d+)(?:-[a-zA-Z0-9]+)+$/; const featureNamePattern = /^feature-(\d+)(?:-[a-zA-Z0-9]+)+$/;
const versionNamePattern = /^v\d+(\.\d+)*$/; const versionNamePattern = /^v\d+(\.\d+)*$/;
@ -140,7 +140,7 @@ export const commentOnPullRequest = (originalOptions, callback) => {
const optionsGithub = _.extend(originalOptions, { "github": settings.createGithub(originalOptions.baseRepoOptions.owner) }); const optionsGithub = _.extend(originalOptions, { "github": settings.createGithub(originalOptions.baseRepoOptions.owner) });
const options = _.extend(optionsGithub, { "onTenthAttempt": () => writeComment(optionsGithub, "Waiting for build to finish...", () => {}) }); const options = _.extend(optionsGithub, { "onTenthAttempt": () => writeComment(optionsGithub, "Waiting for build to finish...", () => {}) });
return checkPullRequest(options, () => reportProcessor.getStatusMessageFromRelease(options.app, options.headRepoOptions, (statusMessageErr, statusSuccessMessage) => { return checkPullRequest(options, () => getStatusMessageFromRelease(options.app, options.headRepoOptions, (statusMessageErr, statusSuccessMessage) => {
const escapedErr = String(statusMessageErr || "").substring(0, maxCommentLength) const escapedErr = String(statusMessageErr || "").substring(0, maxCommentLength)
.replace(/`/g, "` "); .replace(/`/g, "` ");
const message = statusMessageErr const message = statusMessageErr

@ -1,9 +1,9 @@
"use strict"; "use strict";
import { EventEmitter } from "events"; // eslint-disable-line fp/no-events import { EventEmitter } from "events"; // eslint-disable-line fp/no-events
import path = require("path"); import { join } from "path";
import fs = require("fs"); import { writeFile, mkdir } from "fs";
import async = require("async"); import { parallel } from "async";
import { Copier } from "recursive-tree-copy"; import { Copier } from "recursive-tree-copy";
const safeGetEntries = (tree):any => { const safeGetEntries = (tree):any => {
@ -17,20 +17,20 @@ const safeGetEntries = (tree):any => {
const gitToFsCopier = new Copier({ const gitToFsCopier = new Copier({
"concurrency": 4, "concurrency": 4,
"copyLeaf": (entry, targetDir, callback) => { "copyLeaf": (entry, targetDir, callback) => {
const targetPath = path.join(targetDir, entry.name()); const targetPath = join(targetDir, entry.name());
entry.getBlob((err, blob) => { entry.getBlob((err, blob) => {
if (err) { if (err) {
return callback(err); return callback(err);
} }
return fs.writeFile(targetPath, blob.content(), callback); return writeFile(targetPath, blob.content(), callback);
}); });
}, },
"createTargetTree": (tree, targetDir, callback) => { "createTargetTree": (tree, targetDir, callback) => {
const targetSubdir = path.join(targetDir, tree.name); const targetSubdir = join(targetDir, tree.name);
fs.mkdir(targetSubdir, (err) => { mkdir(targetSubdir, (err) => {
// Workaround for broken trees // Workaround for broken trees
if (err && err.code !== "EEXIST") { if (err && err.code !== "EEXIST") {
return callback(err); return callback(err);
@ -50,7 +50,7 @@ const gitToFsCopier = new Copier({
return emitter.emit("error", err); return emitter.emit("error", err);
} }
return async.parallel(entries.map((entry) => (callback) => { return parallel(entries.map((entry) => (callback) => {
if (entry.isTree()) { if (entry.isTree()) {
return entry.getTree((getTreeErr, subTree) => { return entry.getTree((getTreeErr, subTree) => {
if (getTreeErr) { if (getTreeErr) {

@ -1,17 +1,9 @@
"use strict"; "use strict";
import nodegit = require("nodegit"); import { Repository, Remote } from "nodegit";
import fse = require("fs-extra"); import { mkdirsSync, removeSync } from "fs-extra";
import { gitToFs } from "./copy"; import { gitToFs } from "./copy";
const mkdirs = (path) => {
fse.mkdirsSync(path); // eslint-disable-line no-sync
};
const removedirs = (path) => {
fse.removeSync(path); // eslint-disable-line no-sync
};
const fixUrl = (url) => { const fixUrl = (url) => {
if (!url.startsWith("https://")) { if (!url.startsWith("https://")) {
return url; return url;
@ -30,19 +22,19 @@ options = {
} }
*/ */
export = (options, globalCallback) => { export const gitLoader = (options, globalCallback) => {
const url = fixUrl(options.remote); const url = fixUrl(options.remote);
const path = `${options.local}/${options.hash}`; const path = `${options.local}/${options.hash}`;
const exported = options.exported; const exported = options.exported;
removedirs(path); removeSync(path); // eslint-disable-line no-sync
mkdirs(path); mkdirsSync(path); // eslint-disable-line no-sync
console.log(`Cloning ${url} to ${path}`); console.log(`Cloning ${url} to ${path}`);
nodegit.Repository.init(path, 1) Repository.init(path, 1)
.catch(globalCallback) .catch(globalCallback)
.then((repo) => nodegit.Remote.create(repo, "origin", url) .then((repo) => Remote.create(repo, "origin", url)
.catch(globalCallback) .catch(globalCallback)
.then((remote) => remote.fetch([options.branch]) .then((remote) => remote.fetch([options.branch])
.catch(globalCallback) .catch(globalCallback)
@ -56,8 +48,8 @@ export = (options, globalCallback) => {
return repo.getCommit(options.hash) return repo.getCommit(options.hash)
.catch(globalCallback) .catch(globalCallback)
.then((commit) => { .then((commit) => {
removedirs(exported); removeSync(exported);
mkdirs(exported); mkdirsSync(exported);
gitToFs(commit, exported, (err, result) => { gitToFs(commit, exported, (err, result) => {
repo.free(); repo.free();

@ -1,11 +1,11 @@
"use strict"; "use strict";
import path = require("path"); import { join } from "path";
import fs = require("fs"); import { createReadStream, createWriteStream, exists } from "fs";
import zlib = require("zlib"); import { createGzip, createGunzip } from "zlib";
import glob = require("glob"); import * as glob from "glob";
import streamBuffers = require("stream-buffers"); import { ReadableStreamBuffer, WritableStreamBuffer } from "stream-buffers";
import _ = require("underscore"); import * as _ from "underscore";
const reportFilename = "report.json.gz"; const reportFilename = "report.json.gz";
const maxAttemptsNumber = 100; const maxAttemptsNumber = 100;
@ -30,12 +30,12 @@ export const writeReport = (releaseDir, err, result, callback) => {
result result
}); });
const readable = new streamBuffers.ReadableStreamBuffer(readableStreamBufferOptions); const readable = new ReadableStreamBuffer(readableStreamBufferOptions);
const writeStream = fs.createWriteStream(path.join(releaseDir, reportFilename)); const writeStream = createWriteStream(join(releaseDir, reportFilename));
readable readable
.on("error", callback) .on("error", callback)
.pipe(zlib.createGzip()) .pipe(createGzip())
.on("error", callback) .on("error", callback)
.pipe(writeStream) .pipe(writeStream)
.on("error", callback) .on("error", callback)
@ -49,12 +49,12 @@ export const writeReport = (releaseDir, err, result, callback) => {
}; };
export const readReport = (releaseDir, callback) => { export const readReport = (releaseDir, callback) => {
const readStream = fs.createReadStream(path.join(releaseDir, reportFilename)); const readStream = createReadStream(join(releaseDir, reportFilename));
const writable = new streamBuffers.WritableStreamBuffer(); const writable = new WritableStreamBuffer();
readStream readStream
.on("error", callback) .on("error", callback)
.pipe(zlib.createGunzip()) .pipe(createGunzip())
.on("error", callback) .on("error", callback)
.pipe(writable) .pipe(writable)
.on("error", callback) .on("error", callback)
@ -72,7 +72,7 @@ export const readReport = (releaseDir, callback) => {
}; };
export const loadReport = (app, options, callback) => { export const loadReport = (app, options, callback) => {
const releaseDir = path.join(app.get("releasepath"), options.owner, options.reponame, options.branch, options.rev); const releaseDir = join(app.get("releasepath"), options.owner, options.reponame, options.branch, options.rev);
glob("**", { glob("**", {
"cwd": releaseDir, "cwd": releaseDir,
@ -82,10 +82,10 @@ export const loadReport = (app, options, callback) => {
return callback(err, options); return callback(err, options);
} }
const reportFile = path.join(releaseDir, reportFilename); const reportFile = join(releaseDir, reportFilename);
return fs.exists(reportFile, (exists) => { return exists(reportFile, (reportFileExists) => {
if (!exists) { if (!reportFileExists) {
return callback("ReportFileNotFound", options); return callback("ReportFileNotFound", options);
} }
@ -105,12 +105,12 @@ export const loadReport = (app, options, callback) => {
export const getStatusMessageFromRelease = (app, originalOptions, callback) => { export const getStatusMessageFromRelease = (app, originalOptions, callback) => {
const options = _.extend(originalOptions, { "attemptsGetReport": (Number(originalOptions.attemptsGetReport) || Number()) + 1 }); const options = _.extend(originalOptions, { "attemptsGetReport": (Number(originalOptions.attemptsGetReport) || Number()) + 1 });
const releaseDir = path.join(app.get("releasepath"), options.owner, options.reponame, options.branch, options.rev); const releaseDir = join(app.get("releasepath"), options.owner, options.reponame, options.branch, options.rev);
const reportFile = path.join(releaseDir, reportFilename); const reportFile = join(releaseDir, reportFilename);
fs.exists(reportFile, (exists) => { exists(reportFile, (reportFileExists) => {
if (!exists) { if (!reportFileExists) {
return setTimeout(() => fs.exists(releaseDir, (dirExists) => { return setTimeout(() => exists(releaseDir, (dirExists) => {
if (!dirExists) { if (!dirExists) {
return callback("Release directory not found. Probably repository hooks are not configured"); return callback("Release directory not found. Probably repository hooks are not configured");
} }

@ -1,19 +1,19 @@
"use strict"; "use strict";
import path = require("path"); import { join } from "path";
import fs = require("fs"); import { exists, readFile } from "fs";
import _ = require("underscore"); import * as _ from "underscore";
import reportProcessor = require("./report-processor"); import { loadReport } from "./report-processor";
const addBranchInfo = (app, options, callback) => { const addBranchInfo = (app, options, callback) => {
const branchFile = path.join(app.get("releasepath"), options.owner, options.reponame, "$revs", `${options.rev}.branch`); const branchFile = join(app.get("releasepath"), options.owner, options.reponame, "$revs", `${options.rev}.branch`);
fs.exists(branchFile, (exists) => { exists(branchFile, (exists) => {
if (!exists) { if (!exists) {
return callback("BranchFileNotFound", options); return callback("BranchFileNotFound", options);
} }
return fs.readFile(branchFile, (err, data) => { return readFile(branchFile, (err, data) => {
if (err) { if (err) {
return callback(err, options); return callback(err, options);
} }
@ -31,14 +31,14 @@ const addBranchInfo = (app, options, callback) => {
}; };
const addRevInfo = (app, options, callback) => { const addRevInfo = (app, options, callback) => {
const revFile = path.join(app.get("releasepath"), options.owner, options.reponame, options.branch, "latest.id"); const revFile = join(app.get("releasepath"), options.owner, options.reponame, options.branch, "latest.id");
fs.exists(revFile, (exists) => { exists(revFile, (exists) => {
if (!exists) { if (!exists) {
return callback("RevFileNotFound", options); return callback("RevFileNotFound", options);
} }
return fs.readFile(revFile, (err, data) => { return readFile(revFile, (err, data) => {
if (err) { if (err) {
return callback(err, options); return callback(err, options);
} }
@ -81,5 +81,5 @@ export const getReport = (app, options, callback) => parseOptions(app, options,
return callback(err, {}); return callback(err, {});
} }
return reportProcessor.loadReport(app, result, callback); return loadReport(app, result, callback);
}); });

@ -1,6 +1,6 @@
"use strict"; "use strict";
import _ = require("underscore"); import * as _ from "underscore";
import tasks from "./tasks"; import tasks from "./tasks";
// TaskProcessor does not look like EventEmitter, so no need to extend EventEmitter and use `emit' here. // TaskProcessor does not look like EventEmitter, so no need to extend EventEmitter and use `emit' here.

@ -1,8 +1,8 @@
"use strict"; "use strict";
import glob = require("glob"); import * as glob from "glob";
export = (params, processor) => ({ export default (params, processor) => ({
"process": () => glob("**/obj/{Debug,Release}/*.{dll,pdb,xml}", { "process": () => glob("**/obj/{Debug,Release}/*.{dll,pdb,xml}", {
"cwd": processor.context.exported, "cwd": processor.context.exported,
"dot": true "dot": true

@ -1,6 +1,6 @@
"use strict"; "use strict";
export = (params, processor) => { export default (params, processor) => {
const condition = (!params.owner || params.owner === processor.context.owner) const condition = (!params.owner || params.owner === processor.context.owner)
&& (!params.branch || params.branch === processor.context.branch || `refs/heads/${params.branch}` === processor.context.branch); && (!params.branch || params.branch === processor.context.branch || `refs/heads/${params.branch}` === processor.context.branch);
const task = (condition && params.task) || params.otherwise; const task = (condition && params.task) || params.otherwise;

@ -1,16 +1,16 @@
"use strict"; "use strict";
import path = require("path"); import { join } from "path";
import fse = require("fs-extra"); import { copy } from "fs-extra";
export = (params, processor) => ({ export default (params, processor) => ({
"process": () => { "process": () => {
const sourceFilePath = path.join(processor.context.exported, params.filename); const sourceFilePath = join(processor.context.exported, params.filename);
const targetFilePath = path.join(processor.context.release, params.filename); const targetFilePath = join(processor.context.release, params.filename);
processor.onInfo(`Copying ${sourceFilePath} to ${targetFilePath}`); processor.onInfo(`Copying ${sourceFilePath} to ${targetFilePath}`);
fse.copy(sourceFilePath, targetFilePath, (err) => { copy(sourceFilePath, targetFilePath, (err) => {
if (err) { if (err) {
processor.onError(`Unable to copy file: ${err}`); processor.onError(`Unable to copy file: ${err}`);
} else { } else {

@ -1,8 +1,8 @@
"use strict"; "use strict";
import glob = require("glob"); import * as glob from "glob";
export = (params, processor) => ({ export default (params, processor) => ({
"process": () => glob(params.mask, { "process": () => glob(params.mask, {
"cwd": processor.context.exported, "cwd": processor.context.exported,
"dot": true "dot": true

@ -1,27 +1,27 @@
"use strict"; "use strict";
import fs = require("fs"); import { readFile, writeFile } from "fs";
import path = require("path"); import { join } from "path";
import cssnano = require("cssnano"); import { process as cssnanoProcess } from "cssnano";
export = (params, processor) => ({ export default (params, processor) => ({
"process": () => { "process": () => {
const filePath = path.join(processor.context.exported, params.filename); const filePath = join(processor.context.exported, params.filename);
fs.readFile(filePath, (readErr, css) => { readFile(filePath, (readErr, css) => {
if (readErr) { if (readErr) {
processor.onError(`Unable to read stylesheet ${params.filename}: ${readErr}`); processor.onError(`Unable to read stylesheet ${params.filename}: ${readErr}`);
return processor.done(); return processor.done();
} }
return cssnano.process(css) return cssnanoProcess(css)
.catch((cssErr) => { .catch((cssErr) => {
processor.onError(`Unable to uglify stylesheet: ${cssErr}`); processor.onError(`Unable to uglify stylesheet: ${cssErr}`);
processor.done(); processor.done();
}) })
.then((result) => { .then((result) => {
fs.writeFile(filePath, result.css, (writeErr) => { writeFile(filePath, result.css, (writeErr) => {
if (writeErr) { if (writeErr) {
processor.onError(`Unable to write uglified stylesheet for ${params.filename}: ${writeErr}`); processor.onError(`Unable to write uglified stylesheet for ${params.filename}: ${writeErr}`);
} else { } else {

@ -1,9 +1,9 @@
"use strict"; "use strict";
import glob = require("glob"); import * as glob from "glob";
const flagDoneName = "cssnanoallDone"; const flagDoneName = "cssnanoallDone";
export = (params, processor) => ({ export default (params, processor) => ({
"process": () => { "process": () => {
if (processor.context.containsFlag(flagDoneName)) { if (processor.context.containsFlag(flagDoneName)) {
processor.onWarn("cssnanoall task is executed more than once; this is probably a bug in your mbs.json"); processor.onWarn("cssnanoall task is executed more than once; this is probably a bug in your mbs.json");

@ -1,15 +1,15 @@
"use strict"; "use strict";
import path = require("path"); import { join } from "path";
import fse = require("fs-extra"); import { remove } from "fs-extra";
export = (params, processor) => ({ export default (params, processor) => ({
"process": () => { "process": () => {
const sourceFilePath = path.join(processor.context.exported, params.filename); const sourceFilePath = join(processor.context.exported, params.filename);
processor.onInfo(`Deleting ${sourceFilePath}`); processor.onInfo(`Deleting ${sourceFilePath}`);
fse.remove(sourceFilePath, (err) => { remove(sourceFilePath, (err) => {
if (err) { if (err) {
processor.onError(`Unable to delete file: ${err}`); processor.onError(`Unable to delete file: ${err}`);
} else { } else {

@ -1,8 +1,8 @@
"use strict"; "use strict";
import sequential = require("./sequential"); import sequential from "./sequential";
export = (params, processor) => sequential({ export default (params, processor) => sequential({
"tasks": [ "tasks": [
{ {
"name": "build", "name": "build",

@ -1,8 +1,8 @@
"use strict"; "use strict";
import sequential = require("./sequential"); import sequential from "./sequential";
export = (params, processor) => sequential({ export default (params, processor) => sequential({
"tasks": [ "tasks": [
{ {
"name": "build", "name": "build",

@ -1,12 +1,12 @@
"use strict"; "use strict";
import { spawn } from "child_process"; import { spawn } from "child_process";
import streamBuffers = require("stream-buffers"); import { WritableStreamBuffer } from "stream-buffers";
import settings = require("../../settings"); import settings from "../../settings";
const wrapBuilder = (builder, input, onExit) => { const wrapBuilder = (builder, input, onExit) => {
const resultBuffer = new streamBuffers.WritableStreamBuffer(); const resultBuffer = new WritableStreamBuffer();
const errorBuffer = new streamBuffers.WritableStreamBuffer(); const errorBuffer = new WritableStreamBuffer();
builder.stdout.on("data", (data) => { builder.stdout.on("data", (data) => {
resultBuffer.write(data); resultBuffer.write(data);
@ -34,7 +34,7 @@ const safeParseJson = (data):any => {
} }
}; };
export = (params, processor) => ({ export default (params, processor) => ({
"process": () => { "process": () => {
const input = JSON.stringify(params); const input = JSON.stringify(params);
const builder = spawn(settings.builderExecutable, [params.command]); const builder = spawn(settings.builderExecutable, [params.command]);

@ -1,6 +1,6 @@
"use strict"; "use strict";
import sequential = require("./sequential"); import sequential from "./sequential";
const createTasks = function *(params) { const createTasks = function *(params) {
if (!params.skipMbsCheckStyle) { if (!params.skipMbsCheckStyle) {
@ -35,7 +35,7 @@ const createTasks = function *(params) {
}; };
}; };
export = (params, processor) => { export default (params, processor) => {
const tasks = Array.from(createTasks(params)); const tasks = Array.from(createTasks(params));
return sequential({ tasks }, processor); return sequential({ tasks }, processor);

@ -1,9 +1,9 @@
"use strict"; "use strict";
import path = require("path"); import { join } from "path";
import fs = require("fs"); import { readFile } from "fs";
import async = require("async"); import { parallel } from "async";
import glob = require("glob"); import * as glob from "glob";
const autoGeneratedMarker const autoGeneratedMarker
= "//------------------------------------------------------------------------------\n" = "//------------------------------------------------------------------------------\n"
@ -11,7 +11,7 @@ const autoGeneratedMarker
const flagDoneName = "dotnetcheckerDone"; const flagDoneName = "dotnetcheckerDone";
export = (params, processor) => ({ export default (params, processor) => ({
"process": () => { "process": () => {
if (processor.context.containsFlag(flagDoneName)) { if (processor.context.containsFlag(flagDoneName)) {
return processor.done(); return processor.done();
@ -56,8 +56,8 @@ export = (params, processor) => ({
return processor.onInfo(`Checked file ${file}`); return processor.onInfo(`Checked file ${file}`);
}; };
return async.parallel(files.map((file) => (callback) => fs.readFile( return parallel(files.map((file) => (callback) => readFile(
path.join(processor.context.exported, file), join(processor.context.exported, file),
{ "encoding": "utf8" }, { "encoding": "utf8" },
(readErr, data) => { (readErr, data) => {
if (readErr) { if (readErr) {

@ -1,11 +1,11 @@
"use strict"; "use strict";
import path = require("path"); import { join } from "path";
import _ = require("underscore"); import * as _ from "underscore";
import settings = require("../../settings"); import settings from "../../settings";
import dotnetbuilderwrapper = require("./dotnetbuilderwrapper"); import dotnetbuilderwrapper from "./dotnetbuilderwrapper";
export = (params, processor) => { export default (params, processor) => {
if (settings.isCodeAnalysisUnsupported && params.forceCodeAnalysis) { if (settings.isCodeAnalysisUnsupported && params.forceCodeAnalysis) {
processor.onError("Code analysis is not supported"); processor.onError("Code analysis is not supported");
@ -28,7 +28,7 @@ export = (params, processor) => {
"Configuration": params.configuration, "Configuration": params.configuration,
"OutputDirectory": params.overrideOutputDirectory, "OutputDirectory": params.overrideOutputDirectory,
"SkipCodeAnalysis": skipCodeAnalysis, "SkipCodeAnalysis": skipCodeAnalysis,
"SolutionPath": path.join(processor.context.exported, params.solution), "SolutionPath": join(processor.context.exported, params.solution),
"Target": params.target, "Target": params.target,
"command": "compile" "command": "compile"
}; };

@ -1,9 +1,9 @@
"use strict"; "use strict";
import _ = require("underscore"); import * as _ from "underscore";
import dotnetnugetprocessinternal = require("./dotnetnugetprocessinternal"); import dotnetnugetprocessinternal from "./dotnetnugetprocessinternal";
export = (params, processor) => dotnetnugetprocessinternal(_.extendOwn(params, { export default (params, processor) => dotnetnugetprocessinternal(_.extendOwn(params, {
"getFinalTask": (nupkg) => ({ "getFinalTask": (nupkg) => ({
"params": { "filename": nupkg }, "params": { "filename": nupkg },
"type": "copy" "type": "copy"

@ -1,8 +1,8 @@
"use strict"; "use strict";
import conditional = require("./conditional"); import conditional from "./conditional";
export = (params, processor) => conditional({ export default (params, processor) => conditional({
"branch": "master", "branch": "master",
"otherwise": { "otherwise": {
"name": "nuget-pack", "name": "nuget-pack",

@ -1,7 +1,7 @@
"use strict"; "use strict";
import path = require("path"); import { join } from "path";
import sequential = require("./sequential"); import sequential from "./sequential";
const postfixLength = 16; const postfixLength = 16;
const fourDigits = 10000; const fourDigits = 10000;
@ -15,7 +15,7 @@ const addPostfix = (version, params, processor) => {
return `${version}-r${processor.context.rev.substr(0, postfixLength)}`; return `${version}-r${processor.context.rev.substr(0, postfixLength)}`;
}; };
export = (params, processor) => { export default (params, processor) => {
const date = new Date(); const date = new Date();
const major = params.major || "0"; const major = params.major || "0";
const minor = (date.getFullYear() * fourDigits) + ((date.getMonth() + 1) * twoDigits) + date.getDate(); const minor = (date.getFullYear() * fourDigits) + ((date.getMonth() + 1) * twoDigits) + date.getDate();
@ -29,7 +29,7 @@ export = (params, processor) => {
"params": { "params": {
"BaseDirectory": processor.context.exported, "BaseDirectory": processor.context.exported,
"OutputDirectory": processor.context.exported, "OutputDirectory": processor.context.exported,
"SpecPath": path.join(processor.context.exported, params.nuspec), "SpecPath": join(processor.context.exported, params.nuspec),
"Version": version, "Version": version,
"command": "nugetpack" "command": "nugetpack"
}, },

@ -1,9 +1,9 @@
"use strict"; "use strict";
import _ = require("underscore"); import * as _ from "underscore";
import dotnetnugetprocessinternal = require("./dotnetnugetprocessinternal"); import dotnetnugetprocessinternal from "./dotnetnugetprocessinternal";
export = (params, processor) => dotnetnugetprocessinternal(_.extendOwn(params, { export default (params, processor) => dotnetnugetprocessinternal(_.extendOwn(params, {
"getFinalTask": (nupkg) => ({ "getFinalTask": (nupkg) => ({
"params": { "Package": nupkg }, "params": { "Package": nupkg },
"type": "dotnetnugetpushonly" "type": "dotnetnugetpushonly"

@ -1,12 +1,12 @@
"use strict"; "use strict";
import path = require("path"); import { join } from "path";
import dotnetbuilderwrapper = require("./dotnetbuilderwrapper"); import dotnetbuilderwrapper from "./dotnetbuilderwrapper";
import settings = require("../../settings"); import settings from "../../settings";
export = (params, processor) => dotnetbuilderwrapper({ export default (params, processor) => dotnetbuilderwrapper({
"ApiKey": settings.nugetApiKey, "ApiKey": settings.nugetApiKey,
"NugetHost": settings.nugetHost, "NugetHost": settings.nugetHost,
"Package": path.join(processor.context.exported, params.Package), "Package": join(processor.context.exported, params.Package),
"command": "nugetpush" "command": "nugetpush"
}, processor); }, processor);

@ -1,14 +1,14 @@
"use strict"; "use strict";
import path = require("path"); import { join } from "path";
import sequential = require("./sequential"); import sequential from "./sequential";
export = (params, processor) => sequential({ export default (params, processor) => sequential({
"tasks": [ "tasks": [
{ {
"params": { "params": {
"BaseDirectory": processor.context.exported, "BaseDirectory": processor.context.exported,
"SolutionPath": path.join(processor.context.exported, params.solution), "SolutionPath": join(processor.context.exported, params.solution),
"command": "nugetrestore" "command": "nugetrestore"
}, },
"type": "dotnetbuilderwrapper" "type": "dotnetbuilderwrapper"

@ -1,9 +1,9 @@
"use strict"; "use strict";
import path = require("path"); import { join } from "path";
import dotNetBuilderWrapper = require("./dotnetbuilderwrapper"); import dotNetBuilderWrapper from "./dotnetbuilderwrapper";
export = (params, processor) => dotNetBuilderWrapper({ export default (params, processor) => dotNetBuilderWrapper({
"TestLibraryPath": path.join(processor.context.exported, params.assembly), "TestLibraryPath": join(processor.context.exported, params.assembly),
"command": "nunit" "command": "nunit"
}, processor); }, processor);

@ -1,9 +1,9 @@
"use strict"; "use strict";
import glob = require("glob"); import * as glob from "glob";
const flagDoneName = "dotnetnunitallDone"; const flagDoneName = "dotnetnunitallDone";
export = (params, processor) => ({ export default (params, processor) => ({
"process": () => { "process": () => {
if (processor.context.containsFlag(flagDoneName)) { if (processor.context.containsFlag(flagDoneName)) {
processor.onWarn("dotnetnunitall task is executed more than once; this is probably a bug in your mbs.json"); processor.onWarn("dotnetnunitall task is executed more than once; this is probably a bug in your mbs.json");

@ -1,36 +1,36 @@
"use strict"; "use strict";
import path = require("path"); import { join } from "path";
import fs = require("fs"); import { readFileSync } from "fs";
import Mustache = require("mustache"); import { render } from "mustache";
import sequential = require("./sequential"); import sequential from "./sequential";
// eslint-disable-next-line no-sync // eslint-disable-next-line no-sync
const msbuildTemplate = fs.readFileSync(path.join(__dirname, "/dotnetpackwebapp.template.msbuild"), { "encoding": "utf8" }); const msbuildTemplate = readFileSync(join(__dirname, "/dotnetpackwebapp.template.msbuild"), { "encoding": "utf8" });
// eslint-disable-next-line no-sync // eslint-disable-next-line no-sync
const deployTemplate = fs.readFileSync(path.join(__dirname, "/dotnetpackwebapp.template.bat"), { "encoding": "utf8" }); const deployTemplate = readFileSync(join(__dirname, "/dotnetpackwebapp.template.bat"), { "encoding": "utf8" });
// eslint-disable-next-line no-sync // eslint-disable-next-line no-sync
const versionTemplate = fs.readFileSync(path.join(__dirname, "/dotnetpackwebapp.template.version.aspx"), { "encoding": "utf8" }); const versionTemplate = readFileSync(join(__dirname, "/dotnetpackwebapp.template.version.aspx"), { "encoding": "utf8" });
export = (params, processor) => sequential({ export default (params, processor) => sequential({
"tasks": [ "tasks": [
{ {
"params": { "params": {
"data": Mustache.render(msbuildTemplate, params), "data": render(msbuildTemplate, params),
"filename": "MakePackage.msbuild" "filename": "MakePackage.msbuild"
}, },
"type": "writefile" "type": "writefile"
}, },
{ {
"params": { "params": {
"data": Mustache.render(deployTemplate, params), "data": render(deployTemplate, params),
"filename": "Deploy.bat" "filename": "Deploy.bat"
}, },
"type": "writefile" "type": "writefile"
}, },
{ {
"params": { "params": {
"data": Mustache.render(versionTemplate, params), "data": render(versionTemplate, params),
"filename": "version.aspx" "filename": "version.aspx"
}, },
"type": "writefile" "type": "writefile"

@ -1,10 +1,10 @@
"use strict"; "use strict";
import path = require("path"); import { join } from "path";
import fs = require("fs"); import { readFile, writeFile } from "fs";
import async = require("async"); import { parallel, waterfall } from "async";
import glob = require("glob"); import * as glob from "glob";
import settings = require("../../settings"); import settings from "../../settings";
const flagDoneName = "dotnetrewriterDone"; const flagDoneName = "dotnetrewriterDone";
@ -31,7 +31,7 @@ const processAssemblyInfo = (params, processor, appendInformationalVersion) => (
return cb(null, processInformationalVersion(processInternalsVisible(originalContent))); return cb(null, processInformationalVersion(processInternalsVisible(originalContent)));
}; };
export = (params, processor) => ({ export default (params, processor) => ({
"process": () => { "process": () => {
if (processor.context.containsFlag(flagDoneName)) { if (processor.context.containsFlag(flagDoneName)) {
return processor.done(); return processor.done();
@ -54,10 +54,10 @@ export = (params, processor) => ({
return processor.done(); return processor.done();
} }
return async.parallel(files.map((file) => (callback) => async.waterfall([ return parallel(files.map((file) => (callback) => waterfall([
fs.readFile.bind(null, path.join(processor.context.exported, file), { "encoding": "utf8" }), readFile.bind(null, join(processor.context.exported, file), { "encoding": "utf8" }),
processAssemblyInfo(params, processor, file.toLowerCase().includes("assemblyinfo.cs")), processAssemblyInfo(params, processor, file.toLowerCase().includes("assemblyinfo.cs")),
fs.writeFile.bind(null, path.join(processor.context.exported, file)) writeFile.bind(null, join(processor.context.exported, file))
], (err) => { ], (err) => {
if (err) { if (err) {
processor.onError(`Unable to rewrite file ${file}: ${err}`); processor.onError(`Unable to rewrite file ${file}: ${err}`);

@ -1,6 +1,6 @@
"use strict"; "use strict";
export = (params, processor) => ({ export default (params, processor) => ({
"process": () => { "process": () => {
if (params.error) { if (params.error) {
processor.onError(params.error); processor.onError(params.error);

@ -1,15 +1,15 @@
"use strict"; "use strict";
import path = require("path"); import { join } from "path";
import { CLIEngine } from "eslint"; import { CLIEngine } from "eslint";
import settings = require("../../settings"); import settings from "../../settings";
const cli = new CLIEngine({ "configFile": settings.eslintBrowserConfig }); const cli = new CLIEngine({ "configFile": settings.eslintBrowserConfig });
const errorSeverity = 2; const errorSeverity = 2;
export = (params, processor) => ({ export default (params, processor) => ({
"process": () => { "process": () => {
const filePath = path.join(processor.context.exported, params.filename); const filePath = join(processor.context.exported, params.filename);
const result = cli.executeOnFiles([filePath]); const result = cli.executeOnFiles([filePath]);
processor.onInfo(`ESLinted ${params.filename}`); processor.onInfo(`ESLinted ${params.filename}`);

@ -1,9 +1,9 @@
"use strict"; "use strict";
import glob = require("glob"); import * as glob from "glob";
const flagDoneName = "eslintbrowserallDone"; const flagDoneName = "eslintbrowserallDone";
export = (params, processor) => ({ export default (params, processor) => ({
"process": () => { "process": () => {
if (processor.context.containsFlag(flagDoneName)) { if (processor.context.containsFlag(flagDoneName)) {
processor.onWarn("eslintbrowserall task is executed more than once; this is probably a bug in your mbs.json"); processor.onWarn("eslintbrowserall task is executed more than once; this is probably a bug in your mbs.json");

@ -10,7 +10,7 @@ require("fs").readdirSync(__dirname)
const name = file.replace(".ts", ""); const name = file.replace(".ts", "");
// eslint-disable-next-line global-require // eslint-disable-next-line global-require
tasks[name] = require(`./${file}`); tasks[name] = require(`./${file}`).default;
} }
}); });

@ -1,3 +1,3 @@
"use strict"; "use strict";
export = (params, processor) => ({ "process": () => processor.done() }); export default (params, processor) => ({ "process": () => processor.done() });

@ -1,8 +1,8 @@
"use strict"; "use strict";
import sequential = require("./sequential"); import sequential from "./sequential";
export = (params, processor) => sequential({ export default (params, processor) => sequential({
"tasks": [ "tasks": [
{ {
"params": { "excludeFiles": params.eslintExcludeFiles }, "params": { "excludeFiles": params.eslintExcludeFiles },

@ -1,7 +1,7 @@
"use strict"; "use strict";
import async = require("async"); import { parallel } from "async";
const mapper = (processor) => (task) => (callback) => processor.processTask(task, callback); const mapper = (processor) => (task) => (callback) => processor.processTask(task, callback);
export = (params, processor) => ({ "process": () => async.parallel(params.tasks.map(mapper(processor)), () => processor.done()) }); export default (params, processor) => ({ "process": () => parallel(params.tasks.map(mapper(processor)), () => processor.done()) });

@ -1,7 +1,7 @@
"use strict"; "use strict";
import async = require("async"); import { series } from "async";
const mapper = (processor) => (task) => (callback) => processor.processTask(task, callback); const mapper = (processor) => (task) => (callback) => processor.processTask(task, callback);
export = (params, processor) => ({ "process": () => async.series(params.tasks.map(mapper(processor)), () => processor.done()) }); export default (params, processor) => ({ "process": () => series(params.tasks.map(mapper(processor)), () => processor.done()) });

@ -1,15 +1,15 @@
"use strict"; "use strict";
import fs = require("fs"); import { writeFile } from "fs";
import path = require("path"); import { join, normalize } from "path";
import UglifyJS = require("uglify-js"); import { minify } from "uglify-js";
export = (params, processor) => ({ export default (params, processor) => ({
"process": () => { "process": () => {
const filePath = path.normalize(path.join(processor.context.exported, params.filename)); const filePath = normalize(join(processor.context.exported, params.filename));
const result = UglifyJS.minify(filePath); const result = minify(filePath);
fs.writeFile(filePath, result.code, (err) => { writeFile(filePath, result.code, (err) => {
if (err) { if (err) {
processor.onError(`Unable to write uglified script for ${params.filename}: ${err}`); processor.onError(`Unable to write uglified script for ${params.filename}: ${err}`);
} else { } else {

@ -1,10 +1,10 @@
"use strict"; "use strict";
import glob = require("glob"); import * as glob from "glob";
const doneFlagName = "uglifyjsallDone"; const doneFlagName = "uglifyjsallDone";
export = (params, processor) => ({ export default (params, processor) => ({
"process": () => { "process": () => {
if (processor.context.containsFlag(doneFlagName)) { if (processor.context.containsFlag(doneFlagName)) {
processor.onWarn("dotnetnunitall task is executed more than once; this is probably a bug in your mbs.json"); processor.onWarn("dotnetnunitall task is executed more than once; this is probably a bug in your mbs.json");

@ -1,15 +1,15 @@
"use strict"; "use strict";
import fs = require("fs"); import { writeFile } from "fs";
import path = require("path"); import { join } from "path";
export = (params, processor) => ({ export default (params, processor) => ({
"process": () => { "process": () => {
const filePath = path.join(processor.context.exported, params.filename); const filePath = join(processor.context.exported, params.filename);
processor.onInfo(`Writing to ${filePath}`); processor.onInfo(`Writing to ${filePath}`);
fs.writeFile(filePath, params.data, (err) => { writeFile(filePath, params.data, (err) => {
if (err) { if (err) {
processor.onError(`Unable to write file: ${err}`); processor.onError(`Unable to write file: ${err}`);
} else { } else {

@ -1,17 +1,17 @@
"use strict"; "use strict";
import fs = require("fs"); import { createWriteStream } from "fs";
import path = require("path"); import { join, normalize } from "path";
import Archiver = require("archiver"); import * as Archiver from "archiver";
export = (params, processor) => ({ export default (params, processor) => ({
"process": () => { "process": () => {
const sourceDirectoryPath = path.normalize(path.join(processor.context.exported, String(params.directory || ""))); const sourceDirectoryPath = normalize(join(processor.context.exported, String(params.directory || "")));
const targetArchivePath = path.normalize(path.join(processor.context.release, params.archive)); const targetArchivePath = normalize(join(processor.context.release, params.archive));
processor.onInfo(`Compressing "${params.directory}" to "${params.archive}"`); processor.onInfo(`Compressing "${params.directory}" to "${params.archive}"`);
const output = fs.createWriteStream(targetArchivePath); const output = createWriteStream(targetArchivePath);
const archive = new Archiver("zip"); const archive = new Archiver("zip");
output.on("close", () => processor.done()); output.on("close", () => processor.done());

@ -1,6 +1,6 @@
"use strict"; "use strict";
export = (req, res) => { export default (req, res) => {
const options = { const options = {
"branch": `/refs/heads/${req.params.branch}`, "branch": `/refs/heads/${req.params.branch}`,
"branchName": req.params.branch, "branchName": req.params.branch,

@ -1,10 +1,10 @@
"use strict"; "use strict";
import postreceive = require("./postreceive"); import postreceive from "./postreceive";
import manual = require("./manual"); import * as manual from "./manual";
import status = require("./status"); import * as status from "./status";
import artifact = require("./artifact"); import artifact from "./artifact";
import release = require("./release"); import release from "./release";
const index = (req, res) => res.render("index", { "title": `Express<br/>\r\n${req}` }); const index = (req, res) => res.render("index", { "title": `Express<br/>\r\n${req}` });

@ -1,7 +1,7 @@
"use strict"; "use strict";
import _ = require("underscore"); import * as _ from "underscore";
import builder = require("../lib/builder"); import { build } from "../lib/builder";
export const get = (req, res) => res.render("manual"); export const get = (req, res) => res.render("manual");
@ -11,7 +11,7 @@ export const post = (req, res) => {
"url": `https://pos-github.payonline.ru/${req.body.owner}/${req.body.reponame}` "url": `https://pos-github.payonline.ru/${req.body.owner}/${req.body.reponame}`
}); });
builder.build(options, (err, result) => { build(options, (err, result) => {
console.log("Done processing manual request"); console.log("Done processing manual request");
console.log(`Error: ${err}`); console.log(`Error: ${err}`);
res.render("manual-done", { res.render("manual-done", {

@ -1,7 +1,7 @@
"use strict"; "use strict";
import builder = require("../lib/builder"); import { build } from "../lib/builder";
import commenter = require("../lib/commenter"); import { commentOnPullRequest } from "../lib/commenter";
const getBranchDescription = (options) => `${options.owner}/${options.reponame}:${options.branchname || options.branch}`; const getBranchDescription = (options) => `${options.owner}/${options.reponame}:${options.branchname || options.branch}`;
@ -18,7 +18,7 @@ const processPush = (req, res, payload) => {
console.log(`Got push event for ${getBranchDescription(options)}`); console.log(`Got push event for ${getBranchDescription(options)}`);
builder.build(options, (err, result) => { build(options, (err, result) => {
console.log("Done processing request from GitHub"); console.log("Done processing request from GitHub");
console.log(`Error: ${err}`); console.log(`Error: ${err}`);
res.send(`Done processing request from GitHub\r\nError: ${err}\r\nResult: ${result}`); res.send(`Done processing request from GitHub\r\nError: ${err}\r\nResult: ${result}`);
@ -78,7 +78,7 @@ const processPullRequest = (req, res, payload) => {
return res.send(""); return res.send("");
} }
return commenter.commentOnPullRequest( return commentOnPullRequest(
(action === "closed" && masterOptions) || options, (action === "closed" && masterOptions) || options,
(err, data) => { (err, data) => {
if (err) { if (err) {
@ -90,7 +90,7 @@ const processPullRequest = (req, res, payload) => {
); );
}; };
export = (req, res) => { export default (req, res) => {
if (!req.body || (!req.body.payload && !req.body.repository)) { if (!req.body || (!req.body.payload && !req.body.repository)) {
return res.end(); return res.end();
} }

@ -1,9 +1,9 @@
"use strict"; "use strict";
import path = require("path"); import { join } from "path";
import Archiver = require("archiver"); import * as Archiver from "archiver";
import reportProcessor = require("../lib/report-processor"); import { readReport } from "../lib/report-processor";
const getDatePart = (report) => { const getDatePart = (report) => {
if (!report.date) { if (!report.date) {
@ -23,7 +23,7 @@ const getDatePart = (report) => {
return `${year}.${month}.${day}.${hours}.${minutes}.${seconds}`; return `${year}.${month}.${day}.${hours}.${minutes}.${seconds}`;
}; };
export = (req, res, next) => { export default (req, res, next) => {
const options = { const options = {
"branch": `/refs/heads/${req.params.branch}`, "branch": `/refs/heads/${req.params.branch}`,
"branchName": req.params.branch, "branchName": req.params.branch,
@ -32,9 +32,9 @@ export = (req, res, next) => {
"rev": req.params.rev "rev": req.params.rev
}; };
const releasePath = path.join(req.app.get("releasepath"), options.owner, options.reponame, options.branch, options.rev); const releasePath = join(req.app.get("releasepath"), options.owner, options.reponame, options.branch, options.rev);
reportProcessor.readReport(releasePath, (err, report) => { readReport(releasePath, (err, report) => {
if (err) { if (err) {
return next(err); return next(err);
} }

@ -1,8 +1,8 @@
"use strict"; "use strict";
import url = require("url"); import { parse } from "url";
import _ = require("underscore"); import * as _ from "underscore";
import statusProcessor = require("../lib/status-processor"); import { getReport } from "../lib/status-processor";
const parseOptionsFromReferer = (path, callback) => { const parseOptionsFromReferer = (path, callback) => {
const pathParts = path.split("/").filter((value) => value); const pathParts = path.split("/").filter((value) => value);
@ -89,12 +89,12 @@ export const image = (req, res) => {
res.render("status-image", _.extend(options, getAdditionalOptions(err, options))); res.render("status-image", _.extend(options, getAdditionalOptions(err, options)));
}; };
parseOptionsFromReferer(url.parse(req.headers.referer || "").pathname || "", (err, options) => { parseOptionsFromReferer(parse(req.headers.referer || "").pathname || "", (err, options) => {
if (err) { if (err) {
return handle(err, options); return handle(err, options);
} }
return statusProcessor.getReport(req.app, options, handle); return getReport(req.app, options, handle);
}); });
}; };
@ -107,7 +107,7 @@ export const page = (req, res) => {
"rev": req.params.rev "rev": req.params.rev
}; };
statusProcessor.getReport(req.app, options, createShowReport(res)); getReport(req.app, options, createShowReport(res));
}; };
export const pageFromGithub = (req, res) => parseOptionsFromReferer(req.params[0], (err, options) => { export const pageFromGithub = (req, res) => parseOptionsFromReferer(req.params[0], (err, options) => {
@ -115,5 +115,5 @@ export const pageFromGithub = (req, res) => parseOptionsFromReferer(req.params[0
return createShowReport(res)(err, options); return createShowReport(res)(err, options);
} }
return statusProcessor.getReport(req.app, options, createShowReport(res)); return getReport(req.app, options, createShowReport(res));
}); });

@ -1,6 +1,6 @@
"use strict"; "use strict";
const GitHubApi = require("github"); import GitHubApi = require("github");
const createGithub = () => new GitHubApi({ const createGithub = () => new GitHubApi({
"debug": false, "debug": false,
@ -8,11 +8,10 @@ const createGithub = () => new GitHubApi({
"host": "pos-github.payonline.ru", "host": "pos-github.payonline.ru",
"pathPrefix": "/api/v3", "pathPrefix": "/api/v3",
"protocol": "https", "protocol": "https",
"timeout": 5000, "timeout": 5000
"version": "3.0.0"
}); });
export = { export default {
"builderExecutable": "../DotNetBuilder/bin/Debug/MicroBuildServer.DotNetBuilder.exe", "builderExecutable": "../DotNetBuilder/bin/Debug/MicroBuildServer.DotNetBuilder.exe",
"codeSigningKeyFile": null, "codeSigningKeyFile": null,
"codeSigningPublicKey": null, "codeSigningPublicKey": null,

Loading…
Cancel
Save