Improved code style

dependabot/npm_and_yarn/BuildServer/eslint-7.2.0
Inga 🏳‍🌈 7 years ago
parent 4085799726
commit 5d4d86715b
  1. 4
      BuildServer/compress-old-reports.js
  2. 159
      BuildServer/lib/builder.js
  3. 42
      BuildServer/lib/commenter.js
  4. 4
      BuildServer/lib/git/loader.js
  5. 14
      BuildServer/lib/mail-sender.js
  6. 47
      BuildServer/lib/report-processor.js
  7. 1
      BuildServer/lib/status-processor.js
  8. 44
      BuildServer/lib/task-processor.js
  9. 4
      BuildServer/lib/tasks/conditional.js
  10. 16
      BuildServer/lib/tasks/cssnano.js
  11. 4
      BuildServer/lib/tasks/dotnetbuilderwrapper.js
  12. 61
      BuildServer/lib/tasks/dotnetcheckstyle.js
  13. 47
      BuildServer/lib/tasks/dotnetnugetpack.js
  14. 41
      BuildServer/lib/tasks/dotnetnugetprocessinternal.js
  15. 47
      BuildServer/lib/tasks/dotnetnugetpush.js
  16. 3
      BuildServer/lib/tasks/dotnetpackwebapp.js
  17. 42
      BuildServer/lib/tasks/dotnetrewrite.js
  18. 4
      BuildServer/lib/tasks/eslintbrowser.js
  19. 15
      BuildServer/lib/tasks/index.js
  20. 17
      BuildServer/package.json
  21. 1
      BuildServer/routes/release.js

@ -7,6 +7,8 @@ const glob = require("glob");
const async = require("async");
const settings = require("./settings");
const streamsNumber = 100;
glob("**\\report.json", { "cwd": settings.releasepath }, (globErr, files) => {
if (globErr) {
return console.log(globErr);
@ -24,7 +26,7 @@ glob("**\\report.json", { "cwd": settings.releasepath }, (globErr, files) => {
.on("finish", () => {
fs.unlink(originalPath, callback);
});
}), 100, (err) => {
}), streamsNumber, (err) => {
if (err) {
console.log(err);
}

@ -11,10 +11,30 @@ const mailSender = require("./mail-sender");
const settings = require("../settings");
const codePostfix = "";
const mailLazinessLevel = 1000;
const maxDescriptionLength = 140;
const maxTmpcodepathLength = 15;
const twoDigits = 100;
const notifyStatus = (options, callback) => {
const createFinalState = (isSuccess) => {
if (isSuccess) {
return "success";
}
return "error";
};
const createBuildDoneMessage = (isSuccess, name) => {
if (isSuccess) {
return `Successfully built ${name}`;
}
return `Build failed for ${name}`;
};
const notifyStatus = (options, notifyStatusCallback) => {
const status = {
"description": String(options.description || "").substr(0, 140),
"description": String(options.description || "").substr(0, maxDescriptionLength),
"owner": options.owner,
"repo": options.reponame,
"sha": options.hash,
@ -22,19 +42,27 @@ const notifyStatus = (options, callback) => {
"target_url": `${settings.siteRoot}status/${options.owner}/${options.reponame}/${options.hash}`
};
settings.createGithub(options.owner).repos.createStatus(status, (err) => {
if (err) {
console.log(`Error while creating status: ${err}`);
settings.createGithub(options.owner).repos.createStatus(status, (createStatusErr) => {
if (createStatusErr) {
console.log(`Error while creating status: ${createStatusErr}`);
console.log(status);
return callback(err);
return notifyStatusCallback(createStatusErr);
}
return callback();
return notifyStatusCallback();
});
};
const build = (options, callback) => {
const wrapGitLoader = (skipGitLoader) => {
if (!skipGitLoader) {
return gitLoader;
}
return (gitLoaderOptions, gitLoaderCallback) => process.nextTick(gitLoaderCallback);
};
const build = (options, buildCallback) => {
const url = options.url;
const owner = options.owner;
const reponame = options.reponame;
@ -42,28 +70,26 @@ const build = (options, callback) => {
const branch = options.branch;
const skipGitLoader = options.skipGitLoader;
const local = path.join(options.app.get("gitpath"), "r");
const tmp = path.join(options.app.get("tmpcodepath"), rev.substr(0, 15));
const tmp = path.join(options.app.get("tmpcodepath"), rev.substr(0, maxTmpcodepathLength));
const exported = tmp + codePostfix;
const release = path.join(options.app.get("releasepath"), owner, reponame, branch, rev);
const statusQueue = async.queue((task, callback) => task(callback), 1);
const actualGitLoader = skipGitLoader
? (options, callback) => process.nextTick(callback)
: gitLoader;
const statusQueue = async.queue((task, queueCallback) => task(queueCallback), 1);
const actualGitLoader = wrapGitLoader(skipGitLoader);
const date = new Date();
const versionMajor = date.getFullYear();
const versionMinor = date.getMonth() + 1;
const versionBuild = date.getDate();
const versionRev = (date.getHours() * 100) + date.getMinutes();
const versionRev = (date.getHours() * twoDigits) + date.getMinutes();
const version = `${versionMajor}.${versionMinor}.${versionBuild}.${versionRev}`;
const versionInfo = `${version}; built from ${rev}; repository: ${owner}/${reponame}; branch: ${branch}`;
statusQueue.push((callback) => notifyStatus({
statusQueue.push((queueCallback) => notifyStatus({
"description": "Preparing to build...",
"hash": rev,
owner,
reponame,
"state": "pending"
}, callback));
}, queueCallback));
fse.mkdirsSync(release);
@ -71,57 +97,60 @@ const build = (options, callback) => {
fse.mkdirsSync(path.join(options.app.get("releasepath"), owner, reponame, "$revs"));
fs.writeFileSync(path.join(options.app.get("releasepath"), owner, reponame, "$revs", `${rev}.branch`), branch);
const done = (err, result) => {
const errorMessage = result && result.errors
? ((result.errors.$allMessages || [])[0] || {}).message
: err;
const warnMessage = result && result.warns
? ((result.warns.$allMessages || [])[0] || {}).message
: err;
const infoMessage = result && result.infos
? ((result.infos.$allMessages || []).slice(-1)[0] || {}).message
: err;
reportProcessor.writeReport(release, err, result, (writeErr) => {
statusQueue.push((callback) => async.parallel([
(callback) => notifyStatus({
const createErrorMessageForMail = (doneErr) => {
if (!doneErr) {
return "";
}
return `Error message: ${doneErr}\r\n\r\n`;
};
const createResultMessageForMail = (result) => {
if (!result || !result.messages || !result.messages.$allMessages) {
return JSON.stringify(result, null, " ");
}
return result.messages.$allMessages.map((msg) => `${msg.prefix}\t${msg.message}`).join("\r\n");
};
const done = (doneErr, result) => {
const allErrors = ((result || {}).errors || {}).$allMessages || [];
const allWarns = ((result || {}).warns || {}).$allMessages || [];
const allInfos = ((result || {}).infos || {}).$allMessages || [];
const errorMessage = (allErrors[0] || {}).message || doneErr;
const warnMessage = (allWarns[0] || {}).message;
const infoMessage = (allInfos[allInfos.length - 1] || {}).message;
reportProcessor.writeReport(release, doneErr, result, (writeErr) => {
statusQueue.push((queueCallback) => async.parallel([
(parallelCallback) => notifyStatus({
"description": errorMessage || warnMessage || infoMessage || "Success",
"hash": rev,
owner,
reponame,
"state": err
? "error"
: "success"
}, callback),
(callback) => mailSender.send({
"state": createFinalState(!doneErr)
}, parallelCallback),
(parallelCallback) => mailSender.send({
"from": settings.smtp.sender,
"headers": { "X-Laziness-level": 1000 },
"subject": `${err ? "Build failed for" : "Successfully built"} ${owner}/${reponame}/${branch}`,
"text": `Build status URL: ${settings.siteRoot}status/${owner}/${reponame}/${rev}\r\n\r\n`
+ (
err
? `Error message: ${err}\r\n\r\n`
: "")
+ (
(!result || !result.messages || !result.messages.$allMessages)
? JSON.stringify(result, null, 4)
: result.messages.$allMessages.map((msg) => `${msg.prefix}\t${msg.message}`).join("\r\n")),
"headers": { "X-Laziness-level": mailLazinessLevel },
"subject": createBuildDoneMessage(doneErr, `${owner}/${reponame}/${branch}`),
"text": `Build status URL: ${settings.siteRoot}status/${owner}/${reponame}/${rev}\r\n\r\n${createErrorMessageForMail(doneErr)}${createResultMessageForMail(result)}`,
"to": settings.smtp.receiver
}, callback),
(callback) => {
if (err) {
return process.nextTick(callback);
}, parallelCallback),
(parallelCallback) => {
if (doneErr) {
return process.nextTick(parallelCallback);
}
return fse.remove(tmp, callback);
return fse.remove(tmp, parallelCallback);
}
], callback));
], queueCallback));
if (writeErr) {
return callback(writeErr);
return buildCallback(writeErr);
}
return callback(err, result);
return buildCallback(doneErr, result);
});
};
@ -131,11 +160,11 @@ const build = (options, callback) => {
"hash": rev,
local,
"remote": `${url}.git`
}, (err) => {
if (err) {
console.log(err);
}, (gitLoaderErr) => {
if (gitLoaderErr) {
console.log(gitLoaderErr);
return done(`Git fetch error: ${err}`);
return done(`Git fetch error: ${gitLoaderErr}`);
}
console.log("Done loading from git");
@ -145,9 +174,9 @@ const build = (options, callback) => {
return done(null, "MBSNotFound");
}
return fs.readFile(path.join(exported, "mbs.json"), (err, data) => {
if (err) {
return done(err, "MBSUnableToRead");
return fs.readFile(path.join(exported, "mbs.json"), (readErr, data) => {
if (readErr) {
return done(readErr, "MBSUnableToRead");
}
let task = null;
@ -169,12 +198,12 @@ const build = (options, callback) => {
rev,
tmp,
versionInfo
}, (err, result) => {
if (err) {
return done(err, result);
}, (processErr, result) => {
if (processErr) {
return done(processErr, result);
}
return done(err, result);
return done(processErr, result);
});
});
});

@ -7,6 +7,9 @@ const featureNamePattern = /^feature-(\d+)(?:-[a-zA-Z0-9]+)+$/;
const versionNamePattern = /^v\d+(\.\d+)*$/;
const masterNamePattern = /^master$/;
const httpNotFound = 404;
const maxCommentLength = 64000;
const writeComment = (options, message, callback) => options.github.issues.createComment({
"body": message,
"number": options.number,
@ -31,12 +34,12 @@ const checkHasIssue = (options, issueNumber, callback) => options.github.issues.
"number": issueNumber,
"owner": options.baseRepoOptions.owner,
"repo": options.baseRepoOptions.reponame
}, (err, result) => {
if (err && err.code !== 404) {
return callback(err);
}, (getIssueErr, result) => {
if (getIssueErr && getIssueErr.code !== httpNotFound) {
return callback(getIssueErr);
}
if (err || result.number.toString() !== issueNumber) {
if (getIssueErr || result.number.toString() !== issueNumber) {
return callback(null, false);
}
@ -51,9 +54,9 @@ const checkHasReleases = (options, callback) => options.github.repos.getReleases
"owner": options.baseRepoOptions.owner,
"per_page": 1,
"repo": options.baseRepoOptions.reponame
}, (err, result) => {
if (err) {
return callback(err);
}, (getReleasesErr, result) => {
if (getReleasesErr) {
return callback(getReleasesErr);
}
return callback(null, result && result.length);
@ -72,8 +75,8 @@ const checkPullRequest = (options, callback) => {
return closePullRequest(options, "Only merging from version to master is allowed", callback);
}
return checkHasReleases(options, (err, hasReleases) => {
if (err) {
return checkHasReleases(options, (hasReleasesErr, hasReleases) => {
if (hasReleasesErr) {
return writeComment(options, "Unable to check for releases", callback);
}
@ -99,9 +102,9 @@ const checkPullRequest = (options, callback) => {
const issueNumber = featureNamePattern.exec(head.branchname)[1];
return checkHasIssue(options, issueNumber, (err, hasIssue, issueTitle) => {
if (err) {
return writeComment(options, `Unable to check for issue:\r\n\r\n${err.message}`, callback);
return checkHasIssue(options, issueNumber, (hasIssueErr, hasIssue, issueTitle) => {
if (hasIssueErr) {
return writeComment(options, `Unable to check for issue:\r\n\r\n${hasIssueErr.message}`, callback);
}
if (!hasIssue) {
@ -110,8 +113,8 @@ const checkPullRequest = (options, callback) => {
const shouldHaveReleases = versionNamePattern.test(base.branchname);
return checkHasReleases(options, (err, hasReleases) => {
if (err) {
return checkHasReleases(options, (hasReleasesErr, hasReleases) => {
if (hasReleasesErr) {
return writeComment(options, "Unable to check for releases", callback);
}
@ -124,7 +127,7 @@ const checkPullRequest = (options, callback) => {
}
if (options.action === "opened") {
return writeComment(options, `Merging feature #${issueNumber} (${issueTitle}) to ${base.branchname}${shouldHaveReleases ? " release" : ""}`, callback);
return writeComment(options, `Merging feature #${issueNumber} (${issueTitle}) to ${base.branchname}`, callback);
}
return process.nextTick(callback);
@ -136,11 +139,12 @@ exports.commentOnPullRequest = (options, callback) => {
options.github = settings.createGithub(options.baseRepoOptions.owner);
options.headRepoOptions.onTenthAttempt = () => writeComment(options, "Waiting for build to finish...");
return checkPullRequest(options, (err, successMessage) => reportProcessor.getStatusMessageFromRelease(options.app, options.headRepoOptions, (err, successMessage) => {
const escapedErr = String(err || "").substring(0, 64000).replace(/`/g, "` ");
const message = err
return checkPullRequest(options, () => reportProcessor.getStatusMessageFromRelease(options.app, options.headRepoOptions, (statusMessageErr, statusSuccessMessage) => {
const escapedErr = String(statusMessageErr || "").substring(0, maxCommentLength)
.replace(/`/g, "` ");
const message = statusMessageErr
? `Was not built:\r\n\r\n\`\`\`\r\n${escapedErr}\r\n\`\`\`\r\n\r\nDO NOT MERGE!`
: `Build OK\r\n\r\n${successMessage}`;
: `Build OK\r\n\r\n${statusSuccessMessage}`;
const statusUrlMessage = `Build status URL: ${settings.siteRoot}status/${options.headRepoOptions.owner}/${options.headRepoOptions.reponame}/${options.headRepoOptions.rev}\r\n\r\n`;
return writeComment(options, `${message}\r\n\r\n${statusUrlMessage}`, callback);

@ -28,8 +28,8 @@ module.exports = (options, globalCallback) => {
removedirs(path);
mkdirs(path);
if (url.substr(0, 8) === "https://") {
url = `git://${url.substr(8)}`;
if (url.startsWith("https://")) {
url = `git://${url.substr("https://".length)}`;
}
console.log(`Cloning ${url} to ${path}`);

@ -1,15 +1,3 @@
"use strict";
const nodemailer = require("nodemailer");
const settings = require("../settings");
exports.send = (message, callback) => {
return process.nextTick(callback);
/*
var transport = nodemailer.createTransport("SMTP", settings.smtp);
transport.sendMail(message, (err, result) => {
transport.close();
callback(err, result);
});
*/
};
exports.send = (message, callback) => process.nextTick(callback);

@ -8,6 +8,20 @@ const streamBuffers = require("stream-buffers");
const _ = require("underscore");
const reportFilename = "report.json.gz";
const maxAttemptsNumber = 100;
const attemptsTimeout = 30000;
const reportReadTimeout = 5000;
const directoryCheckTimeout = 2000;
const attemptsDebugFrequency = 10;
const readableStreamBufferOptions = {
"chunkSize": 262144,
"frequency": 1
};
const getAllErrors = (report) => ((report.result || {}).errors || {}).$allMessages || [];
const getAllWarns = (report) => ((report.result || {}).warns || {}).$allMessages || [];
const getAllInfos = (report) => ((report.result || {}).infos || {}).$allMessages || [];
const writeReport = (releaseDir, err, result, callback) => {
const data = JSON.stringify({
@ -16,10 +30,7 @@ const writeReport = (releaseDir, err, result, callback) => {
result
});
const readable = new streamBuffers.ReadableStreamBuffer({
chunkSize: 1024 * 256,
frequency: 1
});
const readable = new streamBuffers.ReadableStreamBuffer(readableStreamBufferOptions);
const writeStream = fs.createWriteStream(path.join(releaseDir, reportFilename));
readable
@ -101,7 +112,7 @@ exports.getStatusMessageFromRelease = (app, options, callback) => {
const releaseDir = path.join(app.get("releasepath"), options.owner, options.reponame, options.branch, options.rev);
const reportFile = path.join(releaseDir, reportFilename);
options.attemptsGetReport = (options.attemptsGetReport || 0) + 1;
options.attemptsGetReport = (Number(options.attemptsGetReport) || Number()) + 1;
fs.exists(reportFile, (exists) => {
if (!exists) {
@ -110,17 +121,17 @@ exports.getStatusMessageFromRelease = (app, options, callback) => {
return callback("Release directory not found. Probably repository hooks are not configured");
}
if (options.attemptsGetReport > 100) {
if (options.attemptsGetReport > maxAttemptsNumber) {
return callback("Report file not found");
}
// Maybe it is building right now
if ((options.attemptsGetReport % 10 === 0) && options.onTenthAttempt) {
if (!(options.attemptsGetReport % attemptsDebugFrequency) && options.onTenthAttempt) {
options.onTenthAttempt();
}
return setTimeout(() => exports.getStatusMessageFromRelease(app, options, callback), 30000);
}), 2000);
return setTimeout(() => exports.getStatusMessageFromRelease(app, options, callback), attemptsTimeout);
}), directoryCheckTimeout);
}
return setTimeout(() => readReport(releaseDir, (readErr, report) => {
@ -132,11 +143,15 @@ exports.getStatusMessageFromRelease = (app, options, callback) => {
return callback("mbs.json is not found");
}
if (report.result && ((report.result.errors || {}).$allMessages || []).length + ((report.result.warns || {}).$allMessages || []).length > 0) {
const errors = getAllErrors(report);
const warns = getAllWarns(report);
const infos = getAllInfos(report);
if (errors.length + warns.length) {
return callback(_.map(
(report.result.errors || {}).$allMessages || [], (message) => `ERR: ${message.message}`
errors, (message) => `ERR: ${message.message}`
).concat(_.map(
(report.result.warns || {}).$allMessages || [], (message) => `WARN: ${message.message}`
warns, (message) => `WARN: ${message.message}`
))
.join("\r\n"));
}
@ -145,11 +160,7 @@ exports.getStatusMessageFromRelease = (app, options, callback) => {
return callback(`CRITICAL ERROR: ${report.err}`);
}
if ((report.result.infos.$allMessages || []).length > 0) {
return callback(null, report.result.infos.$allMessages[report.result.infos.$allMessages.length - 1].message);
}
return callback(null, "OK");
}), 5000);
return callback(null, (infos[infos.length - 1] || { "message": "OK" }).message);
}), reportReadTimeout);
});
};

@ -2,7 +2,6 @@
const path = require("path");
const fs = require("fs");
const glob = require("glob");
const reportProcessor = require("./report-processor");

@ -47,34 +47,32 @@ const TaskProcessor = function (task, outerProcessor, callback) {
return this;
};
const pushMessage = (list, message, prefix) => {
const parts = prefix.split("/");
let innerList = list;
parts.forEach((part) => {
innerList = innerList[part] = innerList[part] || {};
});
innerList.$messages = innerList.$messages || [];
innerList.$messages.push(message);
list.$allMessages = list.$allMessages || [];
list.$allMessages.push({
message,
prefix
});
};
exports.processTask = (task, context, callback) => {
const errors = {};
const warns = {};
const infos = {};
const messages = {};
const messageProcessor = (list) => {
const f = (list, message, prefix) => {
const parts = prefix.split("/");
let innerList = list;
parts.forEach((part) => {
innerList = innerList[part] = innerList[part] || {};
});
innerList.$messages = innerList.$messages || [];
innerList.$messages.push(message);
list.$allMessages = list.$allMessages || [];
list.$allMessages.push({
message,
prefix
});
};
return (message, prefix) => {
f(list, message, prefix);
f(messages, message, prefix);
};
const messageProcessor = (list) => (message, prefix) => {
pushMessage(list, message, prefix);
pushMessage(messages, message, prefix);
};
const processor = new TaskProcessor(task, {
context,

@ -3,9 +3,7 @@
module.exports = (params, processor) => {
const condition = (!params.owner || params.owner === processor.context.owner)
&& (!params.branch || params.branch === processor.context.branch || `refs/heads/${params.branch}` === processor.context.branch);
const task = condition
? params.task
: params.otherwise;
const task = (condition && params.task) || params.otherwise;
return { "process": () => processor.processTask(task || { "type": "noop" }, processor.done.bind(processor)) };
};

@ -8,22 +8,22 @@ module.exports = (params, processor) => ({
"process": () => {
const filePath = path.join(processor.context.exported, params.filename);
fs.readFile(filePath, (err, css) => {
if (err) {
processor.onError(`Unable to read stylesheet ${params.filename}: ${err}`);
fs.readFile(filePath, (readErr, css) => {
if (readErr) {
processor.onError(`Unable to read stylesheet ${params.filename}: ${readErr}`);
return processor.done();
}
return cssnano.process(css)
.catch((err) => {
processor.onError(`Unable to uglify stylesheet: ${err}`);
.catch((cssErr) => {
processor.onError(`Unable to uglify stylesheet: ${cssErr}`);
processor.done();
})
.then((result) => {
fs.writeFile(filePath, result.css, (err) => {
if (err) {
processor.onError(`Unable to write uglified stylesheet for ${params.filename}: ${err}`);
fs.writeFile(filePath, result.css, (writeErr) => {
if (writeErr) {
processor.onError(`Unable to write uglified stylesheet for ${params.filename}: ${writeErr}`);
} else {
processor.onInfo(`Saved uglified stylesheet for ${params.filename}; uglified length: ${result.css.length}`);
}

@ -9,7 +9,7 @@ module.exports = (params, processor) => ({
let error = "";
const builder = spawn(settings.builderExecutable, [params.command]);
processor.onInfo(`DotNetBuilderWrapper processing (at ${new Date().toISOString()}): ${JSON.stringify(params, null, 4)}`);
processor.onInfo(`DotNetBuilderWrapper processing (at ${new Date().toISOString()}): ${JSON.stringify(params, null, " ")}`);
builder.stdout.on("data", (data) => {
result += data;
@ -20,7 +20,7 @@ module.exports = (params, processor) => ({
});
builder.on("exit", (code) => {
if (code !== 0) {
if (code) {
error = `Return code is ${code}\r\n${error}`;
processor.onError(error);

@ -17,9 +17,9 @@ module.exports = (params, processor) => ({
processor.context.dotnetcheckerDone = true;
return glob("**/*.cs", { "cwd": processor.context.exported }, (err, files) => {
if (err) {
processor.onError(err);
return glob("**/*.cs", { "cwd": processor.context.exported }, (globErr, files) => {
if (globErr) {
processor.onError(globErr);
return processor.done();
}
@ -32,44 +32,43 @@ module.exports = (params, processor) => ({
return processor.done();
}
return async.parallel(files.map((file) => (callback) => fs.readFile(path.join(processor.context.exported, file), { "encoding": "utf8" }, (err, data) => {
if (err) {
processor.onError(`Unable to check file ${file}: ${err}`);
return callback(err);
const processFile = (data, file) => {
if (data.includes("\r\n")) {
return processor.onError(`Windows-style EOL (0D0A) found in file ${file}`);
}
if (data.indexOf("\r\n") >= 0) {
processor.onError(`Windows-style EOL (0D0A) found in file ${file}`);
if (!params.ignoreCodeStyle) {
if (data.substr(1, autoGeneratedMarker.length) === autoGeneratedMarker || data.startsWith(autoGeneratedMarker)) {
return processor.onInfo(`Skipping auto-generated file ${file}`);
}
return callback();
}
if (data.includes("\t") && data.includes(" ")) {
processor.onError(`Both tabs and spaces found in file ${file}`);
}
if (params.ignoreCodeStyle) {
return callback();
if (data.includes("\t")) {
processor.onError(`Tabs found in file ${file}`);
}
}
if (
data.substr(1, autoGeneratedMarker.length) === autoGeneratedMarker
|| data.substr(0, autoGeneratedMarker.length) === autoGeneratedMarker
) {
processor.onInfo(`Skipping auto-generated file ${file}`);
return processor.onInfo(`Checked file ${file}`);
};
return callback();
}
return async.parallel(files.map((file) => (callback) => fs.readFile(
path.join(processor.context.exported, file),
{ "encoding": "utf8" },
(readErr, data) => {
if (readErr) {
processor.onError(`Unable to check file ${file}: ${readErr}`);
if (data.indexOf("\t") >= 0 && data.indexOf(" ") >= 0) {
processor.onError(`Both tabs and spaces found in file ${file}`);
}
if (data.indexOf("\t") >= 0) {
processor.onError(`Tabs found in file ${file}`);
}
return callback(readErr);
}
processor.onInfo(`Checked file ${file}`);
processFile(data, file);
return callback();
})), processor.done.bind(processor));
return callback();
}
)), processor.done.bind(processor));
});
}
});

@ -1,40 +1,11 @@
"use strict";
const path = require("path");
const sequential = require("./sequential");
const addPostfix = (version, params, processor) => {
if (params.withoutCommitSha) {
return version;
}
return `${version}-r${processor.context.rev.substr(0, 16)}`;
};
module.exports = (params, processor) => {
const date = new Date();
const major = params.major || "0";
const minor = (date.getFullYear() * 10000) + ((date.getMonth() + 1) * 100) + date.getDate();
const build = (date.getHours() * 10000) + (date.getMinutes() * 100) + date.getSeconds();
const version = addPostfix(params.version || `${major}.${minor}.${build}`, params, processor);
const nupkg = `${params.name}.${version}.nupkg`;
return sequential({
"tasks": [
{
"params": {
"BaseDirectory": processor.context.exported,
"OutputDirectory": processor.context.exported,
"SpecPath": path.join(processor.context.exported, params.nuspec),
"Version": version,
"command": "nugetpack"
},
"type": "dotnetbuilderwrapper"
},
{
"params": { "filename": nupkg },
"type": "copy"
}
]
}, processor);
};
const _ = require("underscore");
const dotnetnugetprocessinternal = require("./dotnetnugetprocessinternal");
module.exports = (params, processor) => dotnetnugetprocessinternal(_.extendOwn(params, {
"getFinalTask": (nupkg) => ({
"params": { "filename": nupkg },
"type": "copy"
})
}), processor);

@ -0,0 +1,41 @@
"use strict";
const path = require("path");
const sequential = require("./sequential");
const postfixLength = 16;
const fourDigits = 10000;
const twoDigits = 100;
const addPostfix = (version, params, processor) => {
if (params.withoutCommitSha) {
return version;
}
return `${version}-r${processor.context.rev.substr(0, postfixLength)}`;
};
module.exports = (params, processor) => {
const date = new Date();
const major = params.major || "0";
const minor = (date.getFullYear() * fourDigits) + ((date.getMonth() + 1) * twoDigits) + date.getDate();
const build = (date.getHours() * fourDigits) + (date.getMinutes() * twoDigits) + date.getSeconds();
const version = addPostfix(params.version || `${major}.${minor}.${build}`, params, processor);
const nupkg = `${params.name}.${version}.nupkg`;
return sequential({
"tasks": [
{
"params": {
"BaseDirectory": processor.context.exported,
"OutputDirectory": processor.context.exported,
"SpecPath": path.join(processor.context.exported, params.nuspec),
"Version": version,
"command": "nugetpack"
},
"type": "dotnetbuilderwrapper"
},
params.getFinalTask(nupkg)
]
}, processor);
};

@ -1,40 +1,11 @@
"use strict";
const path = require("path");
const sequential = require("./sequential");
const addPostfix = (version, params, processor) => {
if (params.withoutCommitSha) {
return version;
}
return `${version}-r${processor.context.rev.substr(0, 16)}`;
};
module.exports = (params, processor) => {
const date = new Date();
const major = params.major || "0";
const minor = (date.getFullYear() * 10000) + ((date.getMonth() + 1) * 100) + date.getDate();
const build = (date.getHours() * 10000) + (date.getMinutes() * 100) + date.getSeconds();
const version = addPostfix(params.version || `${major}.${minor}.${build}`, params, processor);
const nupkg = `${params.name}.${version}.nupkg`;
return sequential({
"tasks": [
{
"params": {
"BaseDirectory": processor.context.exported,
"OutputDirectory": processor.context.exported,
"SpecPath": path.join(processor.context.exported, params.nuspec),
"Version": version,
"command": "nugetpack"
},
"type": "dotnetbuilderwrapper"
},
{
"params": { "Package": nupkg },
"type": "dotnetnugetpushonly"
}
]
}, processor);
};
const _ = require("underscore");
const dotnetnugetprocessinternal = require("./dotnetnugetprocessinternal");
module.exports = (params, processor) => dotnetnugetprocessinternal(_.extendOwn(params, {
"getFinalTask": (nupkg) => ({
"params": { "Package": nupkg },
"type": "dotnetnugetpushonly"
})
}), processor);

@ -6,8 +6,11 @@ const Mustache = require("mustache");
const sequential = require("./sequential");
// eslint-disable-next-line no-sync
const msbuildTemplate = fs.readFileSync(path.join(__dirname, "/dotnetpackwebapp.template.msbuild"), { "encoding": "utf8" });
// eslint-disable-next-line no-sync
const deployTemplate = fs.readFileSync(path.join(__dirname, "/dotnetpackwebapp.template.bat"), { "encoding": "utf8" });
// eslint-disable-next-line no-sync
const versionTemplate = fs.readFileSync(path.join(__dirname, "/dotnetpackwebapp.template.version.aspx"), { "encoding": "utf8" });
module.exports = (params, processor) => sequential({

@ -6,6 +6,23 @@ const async = require("async");
const glob = require("glob");
const settings = require("../../settings");
const processAssemblyInfo = (params, processor, appendInformationalVersion) => (originalContent, cb) => {
let content = originalContent;
if (!params.skipCodeSigning && !settings.skipCodeSigning) {
content = content.replace(
/InternalsVisibleTo\s*\(\s*"([\w.]+)"\s*\)/g,
(match, p1) => `InternalsVisibleTo("${p1},PublicKey=${settings.codeSigningPublicKey}")`
);
}
if (appendInformationalVersion) {
content = `${content}\n[assembly: System.Reflection.AssemblyInformationalVersion("${processor.context.versionInfo}")]\n`;
}
return cb(null, content);
};
module.exports = (params, processor) => ({
"process": () => {
if (processor.context.dotnetrewriterDone) {
@ -14,26 +31,9 @@ module.exports = (params, processor) => ({
processor.context.dotnetrewriterDone = true;
const processAssemblyInfo = (appendInformationalVersion) => (originalContent, cb) => {
let content = originalContent;
if (!params.skipCodeSigning && !settings.skipCodeSigning) {
content = content.replace(
/InternalsVisibleTo\s*\(\s*"([\w.]+)"\s*\)/g,
(match, p1) => `InternalsVisibleTo("${p1},PublicKey=${settings.codeSigningPublicKey}")`
);
}
if (appendInformationalVersion) {
content = `${content}\n[assembly: System.Reflection.AssemblyInformationalVersion("${processor.context.versionInfo}")]\n`;
}
return cb(null, content);
};
return glob("**/{InternalsVisible,AssemblyInfo}*.cs", { "cwd": processor.context.exported }, (err, files) => {
if (err) {
processor.onError(err);
return glob("**/{InternalsVisible,AssemblyInfo}*.cs", { "cwd": processor.context.exported }, (globErr, files) => {
if (globErr) {
processor.onError(globErr);
return processor.done();
}
@ -48,7 +48,7 @@ module.exports = (params, processor) => ({
return async.parallel(files.map((file) => (callback) => async.waterfall([
fs.readFile.bind(null, path.join(processor.context.exported, file), { "encoding": "utf8" }),
processAssemblyInfo(file.toLowerCase().indexOf("assemblyinfo.cs") >= 0),
processAssemblyInfo(params, processor, file.toLowerCase().includes("assemblyinfo.cs")),
fs.writeFile.bind(null, path.join(processor.context.exported, file))
], (err) => {
if (err) {

@ -5,6 +5,8 @@ const CLIEngine = require("eslint").CLIEngine;
const settings = require("../../settings");
const cli = new CLIEngine({ "configFile": settings.eslintBrowserConfig });
const errorSeverity = 2;
module.exports = (params, processor) => ({
"process": () => {
const filePath = path.join(processor.context.exported, params.filename);
@ -16,7 +18,7 @@ module.exports = (params, processor) => ({
subresult.messages.forEach((message) => {
const messageText = `${params.filename}:${message.line},${message.column} (${message.ruleId}) ${message.message}`;
if (message.fatal || message.severity === 2) {
if (message.fatal || message.severity === errorSeverity) {
processor.onError(messageText);
} else {
processor.onWarn(messageText);

@ -1,10 +1,13 @@
"use strict";
// Code taken from http://stackoverflow.com/a/17204293
require("fs").readdirSync(__dirname).forEach((file) => {
if (file.match(/\.js$/) !== null && file !== "index.js") {
const name = file.replace(".js", "");
// eslint-disable-next-line no-sync
require("fs").readdirSync(__dirname)
.forEach((file) => {
if (file.match(/\.js$/) !== null && file !== "index.js") {
const name = file.replace(".js", "");
exports[name] = require(`./${file}`);
}
});
// eslint-disable-next-line global-require
exports[name] = require(`./${file}`);
}
});

@ -87,6 +87,23 @@
"initialized": "never"
}
],
"no-magic-numbers": [
"warn",
{
"ignore": [
0,
1
]
}
],
"id-length": [
"warn",
{
"exceptions": [
"_"
]
}
],
"no-extra-parens": [
"warn",
"all",

@ -1,7 +1,6 @@
"use strict";
const path = require("path");
const fs = require("fs");
const Archiver = require("archiver");
const reportProcessor = require("../lib/report-processor");

Loading…
Cancel
Save