Switch to ES6 modules

dependabot/npm_and_yarn/BuildServer/eslint-7.2.0
Inga 🏳‍🌈 7 years ago
parent c9dfd2b5b9
commit 48de21871c
  1. 38
      BuildServer/app.ts
  2. 48
      BuildServer/lib/builder.ts
  3. 8
      BuildServer/lib/commenter.ts
  4. 16
      BuildServer/lib/git/copy.ts
  5. 26
      BuildServer/lib/git/loader.ts
  6. 42
      BuildServer/lib/report-processor.ts
  7. 22
      BuildServer/lib/status-processor.ts
  8. 2
      BuildServer/lib/task-processor.ts
  9. 4
      BuildServer/lib/tasks/cleanupafterdotnetbuild.ts
  10. 2
      BuildServer/lib/tasks/conditional.ts
  11. 12
      BuildServer/lib/tasks/copy.ts
  12. 4
      BuildServer/lib/tasks/copyglob.ts
  13. 16
      BuildServer/lib/tasks/cssnano.ts
  14. 4
      BuildServer/lib/tasks/cssnanoall.ts
  15. 10
      BuildServer/lib/tasks/deletefromcode.ts
  16. 4
      BuildServer/lib/tasks/dotnetbuild.ts
  17. 4
      BuildServer/lib/tasks/dotnetbuildandtest.ts
  18. 10
      BuildServer/lib/tasks/dotnetbuilderwrapper.ts
  19. 4
      BuildServer/lib/tasks/dotnetbuildwithoutcleanup.ts
  20. 14
      BuildServer/lib/tasks/dotnetcheckstyle.ts
  21. 12
      BuildServer/lib/tasks/dotnetcompile.ts
  22. 6
      BuildServer/lib/tasks/dotnetnugetpack.ts
  23. 4
      BuildServer/lib/tasks/dotnetnugetprocess.ts
  24. 8
      BuildServer/lib/tasks/dotnetnugetprocessinternal.ts
  25. 6
      BuildServer/lib/tasks/dotnetnugetpush.ts
  26. 10
      BuildServer/lib/tasks/dotnetnugetpushonly.ts
  27. 8
      BuildServer/lib/tasks/dotnetnugetrestore.ts
  28. 8
      BuildServer/lib/tasks/dotnetnunit.ts
  29. 4
      BuildServer/lib/tasks/dotnetnunitall.ts
  30. 22
      BuildServer/lib/tasks/dotnetpackwebapp.ts
  31. 18
      BuildServer/lib/tasks/dotnetrewrite.ts
  32. 2
      BuildServer/lib/tasks/echo.ts
  33. 8
      BuildServer/lib/tasks/eslintbrowser.ts
  34. 4
      BuildServer/lib/tasks/eslintbrowserall.ts
  35. 2
      BuildServer/lib/tasks/index.ts
  36. 2
      BuildServer/lib/tasks/noop.ts
  37. 4
      BuildServer/lib/tasks/packform.ts
  38. 4
      BuildServer/lib/tasks/parallel.ts
  39. 4
      BuildServer/lib/tasks/sequential.ts
  40. 14
      BuildServer/lib/tasks/uglifyjs.ts
  41. 4
      BuildServer/lib/tasks/uglifyjsall.ts
  42. 10
      BuildServer/lib/tasks/writefile.ts
  43. 14
      BuildServer/lib/tasks/zip.ts
  44. 2
      BuildServer/routes/artifact.ts
  45. 10
      BuildServer/routes/index.ts
  46. 6
      BuildServer/routes/manual.ts
  47. 10
      BuildServer/routes/postreceive.ts
  48. 12
      BuildServer/routes/release.ts
  49. 14
      BuildServer/routes/status.ts
  50. 7
      BuildServer/settings.ts.example

@ -1,37 +1,37 @@
"use strict";
import realFs = require("fs");
import fs = require("graceful-fs");
import * as realFs from "fs";
import * as fs from "graceful-fs";
fs.gracefulify(realFs);
import express = require("express");
import routes = require("./routes");
import http = require("http");
import path = require("path");
import serveFavicon = require("serve-favicon");
import morgan = require("morgan");
import bodyParser = require("body-parser");
import methodOverride = require("method-override");
import serveStatic = require("serve-static");
import errorhandler = require("errorhandler");
import * as express from "express";
import * as routes from "./routes";
import { createServer } from "http";
import { join } from "path";
import * as serveFavicon from "serve-favicon";
import * as morgan from "morgan";
import { json as bodyJson, urlencoded as bodyUrlencoded } from "body-parser";
import * as methodOverride from "method-override";
import * as serveStatic from "serve-static";
import * as errorhandler from "errorhandler";
import settings = require("./settings");
import settings from "./settings";
const app = express();
app.set("port", settings.port); // eslint-disable-line no-process-env
app.set("views", path.join(__dirname, "views"));
app.set("views", join(__dirname, "views"));
app.set("view engine", "jade");
app.set("gitpath", settings.gitpath);
app.set("tmpcodepath", settings.tmpcodepath);
app.set("releasepath", settings.releasepath);
app.use(serveFavicon(path.join(__dirname, "public/images/favicon.png")));
app.use(serveFavicon(join(__dirname, "public/images/favicon.png")));
app.use(morgan("dev"));
app.use(bodyParser.json({ "limit": "10mb" }));
app.use(bodyParser.urlencoded({ "extended": false }));
app.use(bodyJson({ "limit": "10mb" }));
app.use(bodyUrlencoded({ "extended": false }));
app.use(methodOverride());
app.use(serveStatic(path.join(__dirname, "public")));
app.use(serveStatic(join(__dirname, "public")));
if (app.get("env") === "development") {
app.use(errorhandler());
@ -52,4 +52,4 @@ app.route("/status.svg").get(routes.status.image);
app.route("/release/:owner/:reponame/:branch/:rev").get(routes.release);
app.route("/artifact/:owner/:reponame/:branch/:rev/*").get(routes.artifact);
http.createServer(app).listen(app.get("port"), () => console.log(`Express server listening on port ${app.get("port")}`));
createServer(app).listen(app.get("port"), () => console.log(`Express server listening on port ${app.get("port")}`));

@ -1,14 +1,14 @@
"use strict";
import path = require("path");
import fs = require("fs");
import fse = require("fs-extra");
import async = require("async");
import gitLoader = require("./git/loader");
import processor = require("./task-processor");
import reportProcessor = require("./report-processor");
import mailSender = require("./mail-sender");
import settings = require("../settings");
import { join } from "path";
import { exists, readFile, writeFileSync } from "fs";
import { mkdirsSync, remove } from "fs-extra";
import { parallel, queue } from "async";
import { gitLoader } from "./git/loader";
import { processTask } from "./task-processor";
import { writeReport } from "./report-processor";
import { send as sendMail } from "./mail-sender";
import settings from "../settings";
const codePostfix = "";
const mailLazinessLevel = 1000;
@ -77,11 +77,11 @@ export const build = (options, buildCallback) => {
const rev = options.rev;
const branch = options.branch;
const skipGitLoader = options.skipGitLoader;
const local = path.join(options.app.get("gitpath"), "r");
const tmp = path.join(options.app.get("tmpcodepath"), rev.substr(0, maxTmpcodepathLength));
const local = join(options.app.get("gitpath"), "r");
const tmp = join(options.app.get("tmpcodepath"), rev.substr(0, maxTmpcodepathLength));
const exported = tmp + codePostfix;
const release = path.join(options.app.get("releasepath"), owner, reponame, branch, rev);
const statusQueue = async.queue((task, queueCallback) => task(queueCallback), 1);
const release = join(options.app.get("releasepath"), owner, reponame, branch, rev);
const statusQueue = queue((task, queueCallback) => task(queueCallback), 1);
const actualGitLoader = wrapGitLoader(skipGitLoader);
const date = new Date();
const versionMajor = date.getFullYear();
@ -99,11 +99,11 @@ export const build = (options, buildCallback) => {
"state": "pending"
}, queueCallback));
fse.mkdirsSync(release);
mkdirsSync(release);
fs.writeFileSync(path.join(options.app.get("releasepath"), owner, reponame, branch, "latest.id"), rev);
fse.mkdirsSync(path.join(options.app.get("releasepath"), owner, reponame, "$revs"));
fs.writeFileSync(path.join(options.app.get("releasepath"), owner, reponame, "$revs", `${rev}.branch`), branch);
writeFileSync(join(options.app.get("releasepath"), owner, reponame, branch, "latest.id"), rev);
mkdirsSync(join(options.app.get("releasepath"), owner, reponame, "$revs"));
writeFileSync(join(options.app.get("releasepath"), owner, reponame, "$revs", `${rev}.branch`), branch);
const createErrorMessageForMail = (doneErr) => {
if (!doneErr) {
@ -129,8 +129,8 @@ export const build = (options, buildCallback) => {
const warnMessage = (allWarns[0] || {}).message;
const infoMessage = (allInfos[allInfos.length - 1] || {}).message;
reportProcessor.writeReport(release, doneErr, result, (writeErr) => {
statusQueue.push((queueCallback) => async.parallel([
writeReport(release, doneErr, result, (writeErr) => {
statusQueue.push((queueCallback) => parallel([
(parallelCallback) => notifyStatus({
"description": errorMessage || warnMessage || infoMessage || "Success",
"hash": rev,
@ -138,7 +138,7 @@ export const build = (options, buildCallback) => {
reponame,
"state": createFinalState(!doneErr)
}, parallelCallback),
(parallelCallback) => mailSender.send({
(parallelCallback) => sendMail({
"from": settings.smtp.sender,
"headers": { "X-Laziness-level": mailLazinessLevel },
"subject": createBuildDoneMessage(doneErr, `${owner}/${reponame}/${branch}`),
@ -150,7 +150,7 @@ export const build = (options, buildCallback) => {
return process.nextTick(parallelCallback);
}
return fse.remove(tmp, parallelCallback);
return remove(tmp, parallelCallback);
}
], queueCallback));
@ -177,12 +177,12 @@ export const build = (options, buildCallback) => {
console.log("Done loading from git");
return fs.exists(path.join(exported, "mbs.json"), (exists) => {
return exists(join(exported, "mbs.json"), (exists) => {
if (!exists) {
return done(null, "MBSNotFound");
}
return fs.readFile(path.join(exported, "mbs.json"), (readErr, data) => {
return readFile(join(exported, "mbs.json"), (readErr, data) => {
if (readErr) {
return done(readErr, "MBSUnableToRead");
}
@ -195,7 +195,7 @@ export const build = (options, buildCallback) => {
return done(err, "MBSMalformed");
}
return processor.processTask(parsed, {
return processTask(parsed, {
branch,
exported,
owner,

@ -1,8 +1,8 @@
"use strict";
import _ = require("underscore");
import reportProcessor = require("./report-processor");
import settings = require("../settings");
import * as _ from "underscore";
import { getStatusMessageFromRelease } from "./report-processor";
import settings from "../settings";
const featureNamePattern = /^feature-(\d+)(?:-[a-zA-Z0-9]+)+$/;
const versionNamePattern = /^v\d+(\.\d+)*$/;
@ -140,7 +140,7 @@ export const commentOnPullRequest = (originalOptions, callback) => {
const optionsGithub = _.extend(originalOptions, { "github": settings.createGithub(originalOptions.baseRepoOptions.owner) });
const options = _.extend(optionsGithub, { "onTenthAttempt": () => writeComment(optionsGithub, "Waiting for build to finish...", () => {}) });
return checkPullRequest(options, () => reportProcessor.getStatusMessageFromRelease(options.app, options.headRepoOptions, (statusMessageErr, statusSuccessMessage) => {
return checkPullRequest(options, () => getStatusMessageFromRelease(options.app, options.headRepoOptions, (statusMessageErr, statusSuccessMessage) => {
const escapedErr = String(statusMessageErr || "").substring(0, maxCommentLength)
.replace(/`/g, "` ");
const message = statusMessageErr

@ -1,9 +1,9 @@
"use strict";
import { EventEmitter } from "events"; // eslint-disable-line fp/no-events
import path = require("path");
import fs = require("fs");
import async = require("async");
import { join } from "path";
import { writeFile, mkdir } from "fs";
import { parallel } from "async";
import { Copier } from "recursive-tree-copy";
const safeGetEntries = (tree):any => {
@ -17,20 +17,20 @@ const safeGetEntries = (tree):any => {
const gitToFsCopier = new Copier({
"concurrency": 4,
"copyLeaf": (entry, targetDir, callback) => {
const targetPath = path.join(targetDir, entry.name());
const targetPath = join(targetDir, entry.name());
entry.getBlob((err, blob) => {
if (err) {
return callback(err);
}
return fs.writeFile(targetPath, blob.content(), callback);
return writeFile(targetPath, blob.content(), callback);
});
},
"createTargetTree": (tree, targetDir, callback) => {
const targetSubdir = path.join(targetDir, tree.name);
const targetSubdir = join(targetDir, tree.name);
fs.mkdir(targetSubdir, (err) => {
mkdir(targetSubdir, (err) => {
// Workaround for broken trees
if (err && err.code !== "EEXIST") {
return callback(err);
@ -50,7 +50,7 @@ const gitToFsCopier = new Copier({
return emitter.emit("error", err);
}
return async.parallel(entries.map((entry) => (callback) => {
return parallel(entries.map((entry) => (callback) => {
if (entry.isTree()) {
return entry.getTree((getTreeErr, subTree) => {
if (getTreeErr) {

@ -1,17 +1,9 @@
"use strict";
import nodegit = require("nodegit");
import fse = require("fs-extra");
import { Repository, Remote } from "nodegit";
import { mkdirsSync, removeSync } from "fs-extra";
import { gitToFs } from "./copy";
const mkdirs = (path) => {
fse.mkdirsSync(path); // eslint-disable-line no-sync
};
const removedirs = (path) => {
fse.removeSync(path); // eslint-disable-line no-sync
};
const fixUrl = (url) => {
if (!url.startsWith("https://")) {
return url;
@ -30,19 +22,19 @@ options = {
}
*/
export = (options, globalCallback) => {
export const gitLoader = (options, globalCallback) => {
const url = fixUrl(options.remote);
const path = `${options.local}/${options.hash}`;
const exported = options.exported;
removedirs(path);
mkdirs(path);
removeSync(path); // eslint-disable-line no-sync
mkdirsSync(path); // eslint-disable-line no-sync
console.log(`Cloning ${url} to ${path}`);
nodegit.Repository.init(path, 1)
Repository.init(path, 1)
.catch(globalCallback)
.then((repo) => nodegit.Remote.create(repo, "origin", url)
.then((repo) => Remote.create(repo, "origin", url)
.catch(globalCallback)
.then((remote) => remote.fetch([options.branch])
.catch(globalCallback)
@ -56,8 +48,8 @@ export = (options, globalCallback) => {
return repo.getCommit(options.hash)
.catch(globalCallback)
.then((commit) => {
removedirs(exported);
mkdirs(exported);
removeSync(exported);
mkdirsSync(exported);
gitToFs(commit, exported, (err, result) => {
repo.free();

@ -1,11 +1,11 @@
"use strict";
import path = require("path");
import fs = require("fs");
import zlib = require("zlib");
import glob = require("glob");
import streamBuffers = require("stream-buffers");
import _ = require("underscore");
import { join } from "path";
import { createReadStream, createWriteStream, exists } from "fs";
import { createGzip, createGunzip } from "zlib";
import * as glob from "glob";
import { ReadableStreamBuffer, WritableStreamBuffer } from "stream-buffers";
import * as _ from "underscore";
const reportFilename = "report.json.gz";
const maxAttemptsNumber = 100;
@ -30,12 +30,12 @@ export const writeReport = (releaseDir, err, result, callback) => {
result
});
const readable = new streamBuffers.ReadableStreamBuffer(readableStreamBufferOptions);
const writeStream = fs.createWriteStream(path.join(releaseDir, reportFilename));
const readable = new ReadableStreamBuffer(readableStreamBufferOptions);
const writeStream = createWriteStream(join(releaseDir, reportFilename));
readable
.on("error", callback)
.pipe(zlib.createGzip())
.pipe(createGzip())
.on("error", callback)
.pipe(writeStream)
.on("error", callback)
@ -49,12 +49,12 @@ export const writeReport = (releaseDir, err, result, callback) => {
};
export const readReport = (releaseDir, callback) => {
const readStream = fs.createReadStream(path.join(releaseDir, reportFilename));
const writable = new streamBuffers.WritableStreamBuffer();
const readStream = createReadStream(join(releaseDir, reportFilename));
const writable = new WritableStreamBuffer();
readStream
.on("error", callback)
.pipe(zlib.createGunzip())
.pipe(createGunzip())
.on("error", callback)
.pipe(writable)
.on("error", callback)
@ -72,7 +72,7 @@ export const readReport = (releaseDir, callback) => {
};
export const loadReport = (app, options, callback) => {
const releaseDir = path.join(app.get("releasepath"), options.owner, options.reponame, options.branch, options.rev);
const releaseDir = join(app.get("releasepath"), options.owner, options.reponame, options.branch, options.rev);
glob("**", {
"cwd": releaseDir,
@ -82,10 +82,10 @@ export const loadReport = (app, options, callback) => {
return callback(err, options);
}
const reportFile = path.join(releaseDir, reportFilename);
const reportFile = join(releaseDir, reportFilename);
return fs.exists(reportFile, (exists) => {
if (!exists) {
return exists(reportFile, (reportFileExists) => {
if (!reportFileExists) {
return callback("ReportFileNotFound", options);
}
@ -105,12 +105,12 @@ export const loadReport = (app, options, callback) => {
export const getStatusMessageFromRelease = (app, originalOptions, callback) => {
const options = _.extend(originalOptions, { "attemptsGetReport": (Number(originalOptions.attemptsGetReport) || Number()) + 1 });
const releaseDir = path.join(app.get("releasepath"), options.owner, options.reponame, options.branch, options.rev);
const reportFile = path.join(releaseDir, reportFilename);
const releaseDir = join(app.get("releasepath"), options.owner, options.reponame, options.branch, options.rev);
const reportFile = join(releaseDir, reportFilename);
fs.exists(reportFile, (exists) => {
if (!exists) {
return setTimeout(() => fs.exists(releaseDir, (dirExists) => {
exists(reportFile, (reportFileExists) => {
if (!reportFileExists) {
return setTimeout(() => exists(releaseDir, (dirExists) => {
if (!dirExists) {
return callback("Release directory not found. Probably repository hooks are not configured");
}

@ -1,19 +1,19 @@
"use strict";
import path = require("path");
import fs = require("fs");
import _ = require("underscore");
import reportProcessor = require("./report-processor");
import { join } from "path";
import { exists, readFile } from "fs";
import * as _ from "underscore";
import { loadReport } from "./report-processor";
const addBranchInfo = (app, options, callback) => {
const branchFile = path.join(app.get("releasepath"), options.owner, options.reponame, "$revs", `${options.rev}.branch`);
const branchFile = join(app.get("releasepath"), options.owner, options.reponame, "$revs", `${options.rev}.branch`);
fs.exists(branchFile, (exists) => {
exists(branchFile, (exists) => {
if (!exists) {
return callback("BranchFileNotFound", options);
}
return fs.readFile(branchFile, (err, data) => {
return readFile(branchFile, (err, data) => {
if (err) {
return callback(err, options);
}
@ -31,14 +31,14 @@ const addBranchInfo = (app, options, callback) => {
};
const addRevInfo = (app, options, callback) => {
const revFile = path.join(app.get("releasepath"), options.owner, options.reponame, options.branch, "latest.id");
const revFile = join(app.get("releasepath"), options.owner, options.reponame, options.branch, "latest.id");
fs.exists(revFile, (exists) => {
exists(revFile, (exists) => {
if (!exists) {
return callback("RevFileNotFound", options);
}
return fs.readFile(revFile, (err, data) => {
return readFile(revFile, (err, data) => {
if (err) {
return callback(err, options);
}
@ -81,5 +81,5 @@ export const getReport = (app, options, callback) => parseOptions(app, options,
return callback(err, {});
}
return reportProcessor.loadReport(app, result, callback);
return loadReport(app, result, callback);
});

@ -1,6 +1,6 @@
"use strict";
import _ = require("underscore");
import * as _ from "underscore";
import tasks from "./tasks";
// TaskProcessor does not look like EventEmitter, so no need to extend EventEmitter and use `emit' here.

@ -1,8 +1,8 @@
"use strict";
import glob = require("glob");
import * as glob from "glob";
export = (params, processor) => ({
export default (params, processor) => ({
"process": () => glob("**/obj/{Debug,Release}/*.{dll,pdb,xml}", {
"cwd": processor.context.exported,
"dot": true

@ -1,6 +1,6 @@
"use strict";
export = (params, processor) => {
export default (params, processor) => {
const condition = (!params.owner || params.owner === processor.context.owner)
&& (!params.branch || params.branch === processor.context.branch || `refs/heads/${params.branch}` === processor.context.branch);
const task = (condition && params.task) || params.otherwise;

@ -1,16 +1,16 @@
"use strict";
import path = require("path");
import fse = require("fs-extra");
import { join } from "path";
import { copy } from "fs-extra";
export = (params, processor) => ({
export default (params, processor) => ({
"process": () => {
const sourceFilePath = path.join(processor.context.exported, params.filename);
const targetFilePath = path.join(processor.context.release, params.filename);
const sourceFilePath = join(processor.context.exported, params.filename);
const targetFilePath = join(processor.context.release, params.filename);
processor.onInfo(`Copying ${sourceFilePath} to ${targetFilePath}`);
fse.copy(sourceFilePath, targetFilePath, (err) => {
copy(sourceFilePath, targetFilePath, (err) => {
if (err) {
processor.onError(`Unable to copy file: ${err}`);
} else {

@ -1,8 +1,8 @@
"use strict";
import glob = require("glob");
import * as glob from "glob";
export = (params, processor) => ({
export default (params, processor) => ({
"process": () => glob(params.mask, {
"cwd": processor.context.exported,
"dot": true

@ -1,27 +1,27 @@
"use strict";
import fs = require("fs");
import path = require("path");
import cssnano = require("cssnano");
import { readFile, writeFile } from "fs";
import { join } from "path";
import { process as cssnanoProcess } from "cssnano";
export = (params, processor) => ({
export default (params, processor) => ({
"process": () => {
const filePath = path.join(processor.context.exported, params.filename);
const filePath = join(processor.context.exported, params.filename);
fs.readFile(filePath, (readErr, css) => {
readFile(filePath, (readErr, css) => {
if (readErr) {
processor.onError(`Unable to read stylesheet ${params.filename}: ${readErr}`);
return processor.done();
}
return cssnano.process(css)
return cssnanoProcess(css)
.catch((cssErr) => {
processor.onError(`Unable to uglify stylesheet: ${cssErr}`);
processor.done();
})
.then((result) => {
fs.writeFile(filePath, result.css, (writeErr) => {
writeFile(filePath, result.css, (writeErr) => {
if (writeErr) {
processor.onError(`Unable to write uglified stylesheet for ${params.filename}: ${writeErr}`);
} else {

@ -1,9 +1,9 @@
"use strict";
import glob = require("glob");
import * as glob from "glob";
const flagDoneName = "cssnanoallDone";
export = (params, processor) => ({
export default (params, processor) => ({
"process": () => {
if (processor.context.containsFlag(flagDoneName)) {
processor.onWarn("cssnanoall task is executed more than once; this is probably a bug in your mbs.json");

@ -1,15 +1,15 @@
"use strict";
import path = require("path");
import fse = require("fs-extra");
import { join } from "path";
import { remove } from "fs-extra";
export = (params, processor) => ({
export default (params, processor) => ({
"process": () => {
const sourceFilePath = path.join(processor.context.exported, params.filename);
const sourceFilePath = join(processor.context.exported, params.filename);
processor.onInfo(`Deleting ${sourceFilePath}`);
fse.remove(sourceFilePath, (err) => {
remove(sourceFilePath, (err) => {
if (err) {
processor.onError(`Unable to delete file: ${err}`);
} else {

@ -1,8 +1,8 @@
"use strict";
import sequential = require("./sequential");
import sequential from "./sequential";
export = (params, processor) => sequential({
export default (params, processor) => sequential({
"tasks": [
{
"name": "build",

@ -1,8 +1,8 @@
"use strict";
import sequential = require("./sequential");
import sequential from "./sequential";
export = (params, processor) => sequential({
export default (params, processor) => sequential({
"tasks": [
{
"name": "build",

@ -1,12 +1,12 @@
"use strict";
import { spawn } from "child_process";
import streamBuffers = require("stream-buffers");
import settings = require("../../settings");
import { WritableStreamBuffer } from "stream-buffers";
import settings from "../../settings";
const wrapBuilder = (builder, input, onExit) => {
const resultBuffer = new streamBuffers.WritableStreamBuffer();
const errorBuffer = new streamBuffers.WritableStreamBuffer();
const resultBuffer = new WritableStreamBuffer();
const errorBuffer = new WritableStreamBuffer();
builder.stdout.on("data", (data) => {
resultBuffer.write(data);
@ -34,7 +34,7 @@ const safeParseJson = (data):any => {
}
};
export = (params, processor) => ({
export default (params, processor) => ({
"process": () => {
const input = JSON.stringify(params);
const builder = spawn(settings.builderExecutable, [params.command]);

@ -1,6 +1,6 @@
"use strict";
import sequential = require("./sequential");
import sequential from "./sequential";
const createTasks = function *(params) {
if (!params.skipMbsCheckStyle) {
@ -35,7 +35,7 @@ const createTasks = function *(params) {
};
};
export = (params, processor) => {
export default (params, processor) => {
const tasks = Array.from(createTasks(params));
return sequential({ tasks }, processor);

@ -1,9 +1,9 @@
"use strict";
import path = require("path");
import fs = require("fs");
import async = require("async");
import glob = require("glob");
import { join } from "path";
import { readFile } from "fs";
import { parallel } from "async";
import * as glob from "glob";
const autoGeneratedMarker
= "//------------------------------------------------------------------------------\n"
@ -11,7 +11,7 @@ const autoGeneratedMarker
const flagDoneName = "dotnetcheckerDone";
export = (params, processor) => ({
export default (params, processor) => ({
"process": () => {
if (processor.context.containsFlag(flagDoneName)) {
return processor.done();
@ -56,8 +56,8 @@ export = (params, processor) => ({
return processor.onInfo(`Checked file ${file}`);
};
return async.parallel(files.map((file) => (callback) => fs.readFile(
path.join(processor.context.exported, file),
return parallel(files.map((file) => (callback) => readFile(
join(processor.context.exported, file),
{ "encoding": "utf8" },
(readErr, data) => {
if (readErr) {

@ -1,11 +1,11 @@
"use strict";
import path = require("path");
import _ = require("underscore");
import settings = require("../../settings");
import dotnetbuilderwrapper = require("./dotnetbuilderwrapper");
import { join } from "path";
import * as _ from "underscore";
import settings from "../../settings";
import dotnetbuilderwrapper from "./dotnetbuilderwrapper";
export = (params, processor) => {
export default (params, processor) => {
if (settings.isCodeAnalysisUnsupported && params.forceCodeAnalysis) {
processor.onError("Code analysis is not supported");
@ -28,7 +28,7 @@ export = (params, processor) => {
"Configuration": params.configuration,
"OutputDirectory": params.overrideOutputDirectory,
"SkipCodeAnalysis": skipCodeAnalysis,
"SolutionPath": path.join(processor.context.exported, params.solution),
"SolutionPath": join(processor.context.exported, params.solution),
"Target": params.target,
"command": "compile"
};

@ -1,9 +1,9 @@
"use strict";
import _ = require("underscore");
import dotnetnugetprocessinternal = require("./dotnetnugetprocessinternal");
import * as _ from "underscore";
import dotnetnugetprocessinternal from "./dotnetnugetprocessinternal";
export = (params, processor) => dotnetnugetprocessinternal(_.extendOwn(params, {
export default (params, processor) => dotnetnugetprocessinternal(_.extendOwn(params, {
"getFinalTask": (nupkg) => ({
"params": { "filename": nupkg },
"type": "copy"

@ -1,8 +1,8 @@
"use strict";
import conditional = require("./conditional");
import conditional from "./conditional";
export = (params, processor) => conditional({
export default (params, processor) => conditional({
"branch": "master",
"otherwise": {
"name": "nuget-pack",

@ -1,7 +1,7 @@
"use strict";
import path = require("path");
import sequential = require("./sequential");
import { join } from "path";
import sequential from "./sequential";
const postfixLength = 16;
const fourDigits = 10000;
@ -15,7 +15,7 @@ const addPostfix = (version, params, processor) => {
return `${version}-r${processor.context.rev.substr(0, postfixLength)}`;
};
export = (params, processor) => {
export default (params, processor) => {
const date = new Date();
const major = params.major || "0";
const minor = (date.getFullYear() * fourDigits) + ((date.getMonth() + 1) * twoDigits) + date.getDate();
@ -29,7 +29,7 @@ export = (params, processor) => {
"params": {
"BaseDirectory": processor.context.exported,
"OutputDirectory": processor.context.exported,
"SpecPath": path.join(processor.context.exported, params.nuspec),
"SpecPath": join(processor.context.exported, params.nuspec),
"Version": version,
"command": "nugetpack"
},

@ -1,9 +1,9 @@
"use strict";
import _ = require("underscore");
import dotnetnugetprocessinternal = require("./dotnetnugetprocessinternal");
import * as _ from "underscore";
import dotnetnugetprocessinternal from "./dotnetnugetprocessinternal";
export = (params, processor) => dotnetnugetprocessinternal(_.extendOwn(params, {
export default (params, processor) => dotnetnugetprocessinternal(_.extendOwn(params, {
"getFinalTask": (nupkg) => ({
"params": { "Package": nupkg },
"type": "dotnetnugetpushonly"

@ -1,12 +1,12 @@
"use strict";
import path = require("path");
import dotnetbuilderwrapper = require("./dotnetbuilderwrapper");
import settings = require("../../settings");
import { join } from "path";
import dotnetbuilderwrapper from "./dotnetbuilderwrapper";
import settings from "../../settings";
export = (params, processor) => dotnetbuilderwrapper({
export default (params, processor) => dotnetbuilderwrapper({
"ApiKey": settings.nugetApiKey,
"NugetHost": settings.nugetHost,
"Package": path.join(processor.context.exported, params.Package),
"Package": join(processor.context.exported, params.Package),
"command": "nugetpush"
}, processor);

@ -1,14 +1,14 @@
"use strict";
import path = require("path");
import sequential = require("./sequential");
import { join } from "path";
import sequential from "./sequential";
export = (params, processor) => sequential({
export default (params, processor) => sequential({
"tasks": [
{
"params": {
"BaseDirectory": processor.context.exported,
"SolutionPath": path.join(processor.context.exported, params.solution),
"SolutionPath": join(processor.context.exported, params.solution),
"command": "nugetrestore"
},
"type": "dotnetbuilderwrapper"

@ -1,9 +1,9 @@
"use strict";
import path = require("path");
import dotNetBuilderWrapper = require("./dotnetbuilderwrapper");
import { join } from "path";
import dotNetBuilderWrapper from "./dotnetbuilderwrapper";
export = (params, processor) => dotNetBuilderWrapper({
"TestLibraryPath": path.join(processor.context.exported, params.assembly),
export default (params, processor) => dotNetBuilderWrapper({
"TestLibraryPath": join(processor.context.exported, params.assembly),
"command": "nunit"
}, processor);

@ -1,9 +1,9 @@
"use strict";
import glob = require("glob");
import * as glob from "glob";
const flagDoneName = "dotnetnunitallDone";
export = (params, processor) => ({
export default (params, processor) => ({
"process": () => {
if (processor.context.containsFlag(flagDoneName)) {
processor.onWarn("dotnetnunitall task is executed more than once; this is probably a bug in your mbs.json");

@ -1,36 +1,36 @@
"use strict";
import path = require("path");
import fs = require("fs");
import Mustache = require("mustache");
import sequential = require("./sequential");
import { join } from "path";
import { readFileSync } from "fs";
import { render } from "mustache";
import sequential from "./sequential";
// eslint-disable-next-line no-sync
const msbuildTemplate = fs.readFileSync(path.join(__dirname, "/dotnetpackwebapp.template.msbuild"), { "encoding": "utf8" });
const msbuildTemplate = readFileSync(join(__dirname, "/dotnetpackwebapp.template.msbuild"), { "encoding": "utf8" });
// eslint-disable-next-line no-sync
const deployTemplate = fs.readFileSync(path.join(__dirname, "/dotnetpackwebapp.template.bat"), { "encoding": "utf8" });
const deployTemplate = readFileSync(join(__dirname, "/dotnetpackwebapp.template.bat"), { "encoding": "utf8" });
// eslint-disable-next-line no-sync
const versionTemplate = fs.readFileSync(path.join(__dirname, "/dotnetpackwebapp.template.version.aspx"), { "encoding": "utf8" });
const versionTemplate = readFileSync(join(__dirname, "/dotnetpackwebapp.template.version.aspx"), { "encoding": "utf8" });
export = (params, processor) => sequential({
export default (params, processor) => sequential({
"tasks": [
{
"params": {
"data": Mustache.render(msbuildTemplate, params),
"data": render(msbuildTemplate, params),
"filename": "MakePackage.msbuild"
},
"type": "writefile"
},
{
"params": {
"data": Mustache.render(deployTemplate, params),
"data": render(deployTemplate, params),
"filename": "Deploy.bat"
},
"type": "writefile"
},
{
"params": {
"data": Mustache.render(versionTemplate, params),
"data": render(versionTemplate, params),
"filename": "version.aspx"
},
"type": "writefile"

@ -1,10 +1,10 @@
"use strict";
import path = require("path");
import fs = require("fs");
import async = require("async");
import glob = require("glob");
import settings = require("../../settings");
import { join } from "path";
import { readFile, writeFile } from "fs";
import { parallel, waterfall } from "async";
import * as glob from "glob";
import settings from "../../settings";
const flagDoneName = "dotnetrewriterDone";
@ -31,7 +31,7 @@ const processAssemblyInfo = (params, processor, appendInformationalVersion) => (
return cb(null, processInformationalVersion(processInternalsVisible(originalContent)));
};
export = (params, processor) => ({
export default (params, processor) => ({
"process": () => {
if (processor.context.containsFlag(flagDoneName)) {
return processor.done();
@ -54,10 +54,10 @@ export = (params, processor) => ({
return processor.done();
}
return async.parallel(files.map((file) => (callback) => async.waterfall([
fs.readFile.bind(null, path.join(processor.context.exported, file), { "encoding": "utf8" }),
return parallel(files.map((file) => (callback) => waterfall([
readFile.bind(null, join(processor.context.exported, file), { "encoding": "utf8" }),
processAssemblyInfo(params, processor, file.toLowerCase().includes("assemblyinfo.cs")),
fs.writeFile.bind(null, path.join(processor.context.exported, file))
writeFile.bind(null, join(processor.context.exported, file))
], (err) => {
if (err) {
processor.onError(`Unable to rewrite file ${file}: ${err}`);

@ -1,6 +1,6 @@
"use strict";
export = (params, processor) => ({
export default (params, processor) => ({
"process": () => {
if (params.error) {
processor.onError(params.error);

@ -1,15 +1,15 @@
"use strict";
import path = require("path");
import { join } from "path";
import { CLIEngine } from "eslint";
import settings = require("../../settings");
import settings from "../../settings";
const cli = new CLIEngine({ "configFile": settings.eslintBrowserConfig });
const errorSeverity = 2;
export = (params, processor) => ({
export default (params, processor) => ({
"process": () => {
const filePath = path.join(processor.context.exported, params.filename);
const filePath = join(processor.context.exported, params.filename);
const result = cli.executeOnFiles([filePath]);
processor.onInfo(`ESLinted ${params.filename}`);

@ -1,9 +1,9 @@
"use strict";
import glob = require("glob");
import * as glob from "glob";
const flagDoneName = "eslintbrowserallDone";
export = (params, processor) => ({
export default (params, processor) => ({
"process": () => {
if (processor.context.containsFlag(flagDoneName)) {
processor.onWarn("eslintbrowserall task is executed more than once; this is probably a bug in your mbs.json");

@ -10,7 +10,7 @@ require("fs").readdirSync(__dirname)
const name = file.replace(".ts", "");
// eslint-disable-next-line global-require
tasks[name] = require(`./${file}`);
tasks[name] = require(`./${file}`).default;
}
});

@ -1,3 +1,3 @@
"use strict";
export = (params, processor) => ({ "process": () => processor.done() });
export default (params, processor) => ({ "process": () => processor.done() });

@ -1,8 +1,8 @@
"use strict";
import sequential = require("./sequential");
import sequential from "./sequential";
export = (params, processor) => sequential({
export default (params, processor) => sequential({
"tasks": [
{
"params": { "excludeFiles": params.eslintExcludeFiles },

@ -1,7 +1,7 @@
"use strict";
import async = require("async");
import { parallel } from "async";
const mapper = (processor) => (task) => (callback) => processor.processTask(task, callback);
export = (params, processor) => ({ "process": () => async.parallel(params.tasks.map(mapper(processor)), () => processor.done()) });
export default (params, processor) => ({ "process": () => parallel(params.tasks.map(mapper(processor)), () => processor.done()) });

@ -1,7 +1,7 @@
"use strict";
import async = require("async");
import { series } from "async";
const mapper = (processor) => (task) => (callback) => processor.processTask(task, callback);
export = (params, processor) => ({ "process": () => async.series(params.tasks.map(mapper(processor)), () => processor.done()) });
export default (params, processor) => ({ "process": () => series(params.tasks.map(mapper(processor)), () => processor.done()) });

@ -1,15 +1,15 @@
"use strict";
import fs = require("fs");
import path = require("path");
import UglifyJS = require("uglify-js");
import { writeFile } from "fs";
import { join, normalize } from "path";
import { minify } from "uglify-js";
export = (params, processor) => ({
export default (params, processor) => ({
"process": () => {
const filePath = path.normalize(path.join(processor.context.exported, params.filename));
const result = UglifyJS.minify(filePath);
const filePath = normalize(join(processor.context.exported, params.filename));
const result = minify(filePath);
fs.writeFile(filePath, result.code, (err) => {
writeFile(filePath, result.code, (err) => {
if (err) {
processor.onError(`Unable to write uglified script for ${params.filename}: ${err}`);
} else {

@ -1,10 +1,10 @@
"use strict";
import glob = require("glob");
import * as glob from "glob";
const doneFlagName = "uglifyjsallDone";
export = (params, processor) => ({
export default (params, processor) => ({
"process": () => {
if (processor.context.containsFlag(doneFlagName)) {
processor.onWarn("dotnetnunitall task is executed more than once; this is probably a bug in your mbs.json");

@ -1,15 +1,15 @@
"use strict";
import fs = require("fs");
import path = require("path");
import { writeFile } from "fs";
import { join } from "path";
export = (params, processor) => ({
export default (params, processor) => ({
"process": () => {
const filePath = path.join(processor.context.exported, params.filename);
const filePath = join(processor.context.exported, params.filename);
processor.onInfo(`Writing to ${filePath}`);
fs.writeFile(filePath, params.data, (err) => {
writeFile(filePath, params.data, (err) => {
if (err) {
processor.onError(`Unable to write file: ${err}`);
} else {

@ -1,17 +1,17 @@
"use strict";
import fs = require("fs");
import path = require("path");
import Archiver = require("archiver");
import { createWriteStream } from "fs";
import { join, normalize } from "path";
import * as Archiver from "archiver";
export = (params, processor) => ({
export default (params, processor) => ({
"process": () => {
const sourceDirectoryPath = path.normalize(path.join(processor.context.exported, String(params.directory || "")));
const targetArchivePath = path.normalize(path.join(processor.context.release, params.archive));
const sourceDirectoryPath = normalize(join(processor.context.exported, String(params.directory || "")));
const targetArchivePath = normalize(join(processor.context.release, params.archive));
processor.onInfo(`Compressing "${params.directory}" to "${params.archive}"`);
const output = fs.createWriteStream(targetArchivePath);
const output = createWriteStream(targetArchivePath);
const archive = new Archiver("zip");
output.on("close", () => processor.done());

@ -1,6 +1,6 @@
"use strict";
export = (req, res) => {
export default (req, res) => {
const options = {
"branch": `/refs/heads/${req.params.branch}`,
"branchName": req.params.branch,

@ -1,10 +1,10 @@
"use strict";
import postreceive = require("./postreceive");
import manual = require("./manual");
import status = require("./status");
import artifact = require("./artifact");
import release = require("./release");
import postreceive from "./postreceive";
import * as manual from "./manual";
import * as status from "./status";
import artifact from "./artifact";
import release from "./release";
const index = (req, res) => res.render("index", { "title": `Express<br/>\r\n${req}` });

@ -1,7 +1,7 @@
"use strict";
import _ = require("underscore");
import builder = require("../lib/builder");
import * as _ from "underscore";
import { build } from "../lib/builder";
export const get = (req, res) => res.render("manual");
@ -11,7 +11,7 @@ export const post = (req, res) => {
"url": `https://pos-github.payonline.ru/${req.body.owner}/${req.body.reponame}`
});
builder.build(options, (err, result) => {
build(options, (err, result) => {
console.log("Done processing manual request");
console.log(`Error: ${err}`);
res.render("manual-done", {

@ -1,7 +1,7 @@
"use strict";
import builder = require("../lib/builder");
import commenter = require("../lib/commenter");
import { build } from "../lib/builder";
import { commentOnPullRequest } from "../lib/commenter";
const getBranchDescription = (options) => `${options.owner}/${options.reponame}:${options.branchname || options.branch}`;
@ -18,7 +18,7 @@ const processPush = (req, res, payload) => {
console.log(`Got push event for ${getBranchDescription(options)}`);
builder.build(options, (err, result) => {
build(options, (err, result) => {
console.log("Done processing request from GitHub");
console.log(`Error: ${err}`);
res.send(`Done processing request from GitHub\r\nError: ${err}\r\nResult: ${result}`);
@ -78,7 +78,7 @@ const processPullRequest = (req, res, payload) => {
return res.send("");
}
return commenter.commentOnPullRequest(
return commentOnPullRequest(
(action === "closed" && masterOptions) || options,
(err, data) => {
if (err) {
@ -90,7 +90,7 @@ const processPullRequest = (req, res, payload) => {
);
};
export = (req, res) => {
export default (req, res) => {
if (!req.body || (!req.body.payload && !req.body.repository)) {
return res.end();
}

@ -1,9 +1,9 @@
"use strict";
import path = require("path");
import Archiver = require("archiver");
import { join } from "path";
import * as Archiver from "archiver";
import reportProcessor = require("../lib/report-processor");
import { readReport } from "../lib/report-processor";
const getDatePart = (report) => {
if (!report.date) {
@ -23,7 +23,7 @@ const getDatePart = (report) => {
return `${year}.${month}.${day}.${hours}.${minutes}.${seconds}`;
};
export = (req, res, next) => {
export default (req, res, next) => {
const options = {
"branch": `/refs/heads/${req.params.branch}`,
"branchName": req.params.branch,
@ -32,9 +32,9 @@ export = (req, res, next) => {
"rev": req.params.rev
};
const releasePath = path.join(req.app.get("releasepath"), options.owner, options.reponame, options.branch, options.rev);
const releasePath = join(req.app.get("releasepath"), options.owner, options.reponame, options.branch, options.rev);
reportProcessor.readReport(releasePath, (err, report) => {
readReport(releasePath, (err, report) => {
if (err) {
return next(err);
}

@ -1,8 +1,8 @@
"use strict";
import url = require("url");
import _ = require("underscore");
import statusProcessor = require("../lib/status-processor");
import { parse } from "url";
import * as _ from "underscore";
import { getReport } from "../lib/status-processor";
const parseOptionsFromReferer = (path, callback) => {
const pathParts = path.split("/").filter((value) => value);
@ -89,12 +89,12 @@ export const image = (req, res) => {
res.render("status-image", _.extend(options, getAdditionalOptions(err, options)));
};
parseOptionsFromReferer(url.parse(req.headers.referer || "").pathname || "", (err, options) => {
parseOptionsFromReferer(parse(req.headers.referer || "").pathname || "", (err, options) => {
if (err) {
return handle(err, options);
}
return statusProcessor.getReport(req.app, options, handle);
return getReport(req.app, options, handle);
});
};
@ -107,7 +107,7 @@ export const page = (req, res) => {
"rev": req.params.rev
};
statusProcessor.getReport(req.app, options, createShowReport(res));
getReport(req.app, options, createShowReport(res));
};
export const pageFromGithub = (req, res) => parseOptionsFromReferer(req.params[0], (err, options) => {
@ -115,5 +115,5 @@ export const pageFromGithub = (req, res) => parseOptionsFromReferer(req.params[0
return createShowReport(res)(err, options);
}
return statusProcessor.getReport(req.app, options, createShowReport(res));
return getReport(req.app, options, createShowReport(res));
});

@ -1,6 +1,6 @@
"use strict";
const GitHubApi = require("github");
import GitHubApi = require("github");
const createGithub = () => new GitHubApi({
"debug": false,
@ -8,11 +8,10 @@ const createGithub = () => new GitHubApi({
"host": "pos-github.payonline.ru",
"pathPrefix": "/api/v3",
"protocol": "https",
"timeout": 5000,
"version": "3.0.0"
"timeout": 5000
});
export = {
export default {
"builderExecutable": "../DotNetBuilder/bin/Debug/MicroBuildServer.DotNetBuilder.exe",
"codeSigningKeyFile": null,
"codeSigningPublicKey": null,

Loading…
Cancel
Save