commit
17d326866e
@ -1,55 +0,0 @@ |
||||
"use strict"; |
||||
|
||||
const realFs = require("fs"); |
||||
const fs = require("graceful-fs"); |
||||
|
||||
fs.gracefulify(realFs); |
||||
|
||||
const express = require("express"); |
||||
const routes = require("./routes"); |
||||
const http = require("http"); |
||||
const path = require("path"); |
||||
const serveFavicon = require("serve-favicon"); |
||||
const morgan = require("morgan"); |
||||
const bodyParser = require("body-parser"); |
||||
const methodOverride = require("method-override"); |
||||
const serveStatic = require("serve-static"); |
||||
const errorhandler = require("errorhandler"); |
||||
|
||||
const settings = require("./settings"); |
||||
|
||||
const app = express(); |
||||
|
||||
app.set("port", process.env.PORT || settings.port); // eslint-disable-line no-process-env
|
||||
app.set("views", path.join(__dirname, "views")); |
||||
app.set("view engine", "jade"); |
||||
app.set("gitpath", settings.gitpath); |
||||
app.set("tmpcodepath", settings.tmpcodepath); |
||||
app.set("releasepath", settings.releasepath); |
||||
app.use(serveFavicon(path.join(__dirname, "public/images/favicon.png"))); |
||||
app.use(morgan("dev")); |
||||
app.use(bodyParser.json({ "limit": "10mb" })); |
||||
app.use(bodyParser.urlencoded({ "extended": false })); |
||||
app.use(methodOverride()); |
||||
app.use(serveStatic(path.join(__dirname, "public"))); |
||||
|
||||
if (app.get("env") === "development") { |
||||
app.use(errorhandler()); |
||||
} |
||||
|
||||
app.route("/").get(routes.index); |
||||
app.route("/github/postreceive") |
||||
.post(routes.postreceive) |
||||
.get((req, res) => res.send("Only automated POST requests are allowed for postreceive route")); |
||||
|
||||
app.route("/manual") |
||||
.get(routes.manual.get) |
||||
.post(routes.manual.post); |
||||
|
||||
app.route("/status/:owner/:reponame/:branch/:rev?").get(routes.status.page); |
||||
app.route("/pos-github.payonline.ru/*").get(routes.status.pageFromGithub); |
||||
app.route("/status.svg").get(routes.status.image); |
||||
app.route("/release/:owner/:reponame/:branch/:rev").get(routes.release); |
||||
app.route("/artifact/:owner/:reponame/:branch/:rev/*").get(routes.artifact); |
||||
|
||||
http.createServer(app).listen(app.get("port"), () => console.log(`Express server listening on port ${app.get("port")}`)); |
@ -0,0 +1,55 @@ |
||||
"use strict"; |
||||
|
||||
import * as fs from "fs"; |
||||
import { gracefulify } from "graceful-fs"; |
||||
|
||||
gracefulify(fs); |
||||
|
||||
import { json as bodyJson, urlencoded as bodyUrlencoded } from "body-parser"; |
||||
import * as errorhandler from "errorhandler"; |
||||
import * as express from "express"; |
||||
import { createServer } from "http"; |
||||
import * as methodOverride from "method-override"; |
||||
import * as morgan from "morgan"; |
||||
import { join } from "path"; |
||||
import * as serveFavicon from "serve-favicon"; |
||||
import * as serveStatic from "serve-static"; |
||||
|
||||
import * as routes from "./routes"; |
||||
import settings from "./settings"; |
||||
|
||||
const app = express(); |
||||
|
||||
app.set("port", settings.port); |
||||
app.set("views", join(__dirname, "views")); |
||||
app.set("view engine", "jade"); |
||||
app.set("gitpath", settings.gitpath); |
||||
app.set("tmpcodepath", settings.tmpcodepath); |
||||
app.set("releasepath", settings.releasepath); |
||||
app.use(serveFavicon(join(__dirname, "public/images/favicon.png"))); |
||||
app.use(morgan("dev")); |
||||
app.use(bodyJson({ limit: "10mb" })); |
||||
app.use(bodyUrlencoded({ extended: false })); |
||||
app.use(methodOverride()); |
||||
app.use(serveStatic(join(__dirname, "public"))); |
||||
|
||||
if (app.get("env") === "development") { |
||||
app.use(errorhandler()); |
||||
} |
||||
|
||||
app.route("/").get(routes.index); |
||||
app.route("/github/postreceive") |
||||
.post(routes.postreceive) |
||||
.get((req, res) => res.send("Only automated POST requests are allowed for postreceive route")); |
||||
|
||||
app.route("/manual") |
||||
.get(routes.manual.get) |
||||
.post(routes.manual.post); |
||||
|
||||
app.route("/status/:owner/:reponame/:branch/:rev?").get(routes.status.page); |
||||
app.route("/pos-github.payonline.ru/*").get(routes.status.pageFromGithub); |
||||
app.route("/status.svg").get(routes.status.image); |
||||
app.route("/release/:owner/:reponame/:branch/:rev").get(routes.release); |
||||
app.route("/artifact/:owner/:reponame/:branch/:rev/*").get(routes.artifact); |
||||
|
||||
createServer(app).listen(app.get("port"), () => console.log(`Express server listening on port ${app.get("port")}`)); |
@ -1,36 +0,0 @@ |
||||
"use strict"; |
||||
|
||||
const fs = require("fs"); |
||||
const path = require("path"); |
||||
const zlib = require("zlib"); |
||||
const glob = require("glob"); |
||||
const async = require("async"); |
||||
const settings = require("./settings"); |
||||
|
||||
const streamsNumber = 100; |
||||
|
||||
glob("**\\report.json", { "cwd": settings.releasepath }, (globErr, files) => { |
||||
if (globErr) { |
||||
return console.log(globErr); |
||||
} |
||||
|
||||
return async.parallelLimit(files.map((file) => (callback) => { |
||||
const originalPath = path.join(settings.releasepath, file); |
||||
const newPath = `${originalPath}.gz`; |
||||
|
||||
console.log(file); |
||||
fs.createReadStream(originalPath) |
||||
.pipe(zlib.createGzip()) |
||||
.pipe(fs.createWriteStream(newPath)) |
||||
.on("error", callback) |
||||
.on("finish", () => { |
||||
fs.unlink(originalPath, callback); |
||||
}); |
||||
}), streamsNumber, (err) => { |
||||
if (err) { |
||||
console.log(err); |
||||
} |
||||
|
||||
console.log("Done"); |
||||
}); |
||||
}); |
@ -0,0 +1,64 @@ |
||||
interface Message { |
||||
readonly message: string; |
||||
readonly prefix: string; |
||||
} |
||||
|
||||
interface PartialMessagesLeaf { |
||||
readonly $messages?: string[]; |
||||
} |
||||
|
||||
interface PartialMessagesRecursive { |
||||
readonly [propName: string]: Messages | string[] | Message[]; // workaround for compatibility with PartialMessagesLeaf and PartialMessagesRoot
|
||||
} |
||||
|
||||
interface PartialMessagesRoot { |
||||
readonly $allMessages: Message[]; |
||||
} |
||||
|
||||
type Messages = PartialMessagesLeaf & PartialMessagesRecursive; |
||||
|
||||
type MessagesRoot = PartialMessagesLeaf & PartialMessagesRecursive & PartialMessagesRoot; |
||||
|
||||
interface ReportResult { |
||||
readonly errors: MessagesRoot; |
||||
readonly warns: MessagesRoot; |
||||
readonly infos: MessagesRoot; |
||||
readonly messages: MessagesRoot; |
||||
} |
||||
|
||||
interface Report { |
||||
readonly date: number; |
||||
readonly err?: string; |
||||
readonly result?: ReportResult; |
||||
} |
||||
|
||||
interface TaskProcessorCallback { |
||||
(err: string): void; |
||||
} |
||||
|
||||
interface TaskProcessorCore { |
||||
readonly onError: (message: string | Error, prefix?: string) => void; |
||||
readonly onWarn: (message: string, prefix?: string) => void; |
||||
readonly onInfo: (message: string, prefix?: string) => void; |
||||
readonly context?: any; |
||||
} |
||||
|
||||
interface TaskProcessor extends TaskProcessorCore { |
||||
readonly process: () => void; |
||||
readonly processTask: (task: TaskInfo, innerCallback: TaskProcessorCallback) => void; |
||||
readonly done: () => void; |
||||
} |
||||
|
||||
interface TaskInfo { |
||||
name?: string; |
||||
type: string; |
||||
params: any; |
||||
} |
||||
|
||||
interface Task { |
||||
(params: any, processor: TaskProcessor): () => void; |
||||
} |
||||
|
||||
interface Tasks { |
||||
readonly [taskName: string]: Task; |
||||
} |
@ -1,3 +0,0 @@ |
||||
"use strict"; |
||||
|
||||
exports.send = (message, callback) => process.nextTick(callback); |
@ -0,0 +1,3 @@ |
||||
"use strict"; |
||||
|
||||
export const send = (message, callback) => process.nextTick(callback); |
@ -1,96 +0,0 @@ |
||||
"use strict"; |
||||
|
||||
const _ = require("underscore"); |
||||
const tasks = require("./tasks"); |
||||
|
||||
// TaskProcessor does not look like EventEmitter, so no need to extend EventEmitter and use `emit' here.
|
||||
const TaskProcessor = function (task, outerProcessor, callback) { |
||||
if (!this) { |
||||
return new TaskProcessor(task); |
||||
} |
||||
|
||||
const that = this; |
||||
const createTaskWorker = () => tasks[task.type](task.params || {}, that); |
||||
const errors = []; |
||||
const process = () => createTaskWorker().process(); |
||||
const getOuterPrefix = (prefix) => { |
||||
if (task.name && prefix) { |
||||
return `${task.name}/${prefix}`; |
||||
} |
||||
|
||||
return String(task.name || "") + String(prefix || ""); |
||||
}; |
||||
const onError = (message, prefix) => { |
||||
errors.push(message); |
||||
outerProcessor.onError(message, getOuterPrefix(prefix)); |
||||
}; |
||||
const onWarn = (message, prefix) => outerProcessor.onWarn(message, getOuterPrefix(prefix)); |
||||
const onInfo = (message, prefix) => outerProcessor.onInfo(message, getOuterPrefix(prefix)); |
||||
const processTask = (innerTask, innerCallback) => { |
||||
const innerProcessor = new TaskProcessor(innerTask, that, innerCallback); |
||||
|
||||
innerProcessor.process(); |
||||
}; |
||||
const done = () => callback(errors.join("\r\n")); |
||||
|
||||
that.process = process; |
||||
that.onError = onError; |
||||
that.onWarn = onWarn; |
||||
that.onInfo = onInfo; |
||||
that.processTask = processTask; |
||||
that.done = done; |
||||
that.context = outerProcessor.context; |
||||
}; |
||||
|
||||
const pushMessage = (list, message, parts, index) => { |
||||
if (!index) { |
||||
list.$allMessages = list.$allMessages || []; // eslint-disable-line fp/no-mutation
|
||||
list.$allMessages.push({ // eslint-disable-line fp/no-mutating-methods
|
||||
message, |
||||
"prefix": parts.join("/") |
||||
}); |
||||
} |
||||
|
||||
list.$messages = list.$messages || []; // eslint-disable-line fp/no-mutation
|
||||
if (index === parts.length) { |
||||
return list.$messages.push(message); // eslint-disable-line fp/no-mutating-methods
|
||||
} |
||||
|
||||
return pushMessage(list, message, parts, index + 1); |
||||
}; |
||||
|
||||
const addFlag = (flags) => (flagName) => { |
||||
flags[flagName] = true; // eslint-disable-line fp/no-mutation
|
||||
}; |
||||
|
||||
const containsFlag = (flags) => (flagName) => flags[flagName]; |
||||
|
||||
exports.processTask = (task, context, callback) => { |
||||
const errors = {}; |
||||
const warns = {}; |
||||
const infos = {}; |
||||
const messages = {}; |
||||
const messageProcessor = (list) => (message, prefix) => { |
||||
const parts = prefix.split("/"); |
||||
|
||||
pushMessage(list, message, parts, 0); |
||||
pushMessage(messages, message, parts, 0); |
||||
}; |
||||
const flags = {}; |
||||
const processor = new TaskProcessor(task, { |
||||
"context": _.extend(context, { |
||||
"addFlag": addFlag(flags), |
||||
"containsFlag": containsFlag(flags) |
||||
}), |
||||
"onError": messageProcessor(errors), |
||||
"onInfo": messageProcessor(infos), |
||||
"onWarn": messageProcessor(warns) |
||||
}, (err) => callback(err, { |
||||
errors, |
||||
infos, |
||||
messages, |
||||
warns |
||||
})); |
||||
|
||||
processor.process(); |
||||
}; |
@ -0,0 +1,94 @@ |
||||
"use strict"; |
||||
|
||||
import * as _ from "underscore"; |
||||
import tasks from "./tasks"; |
||||
|
||||
// TaskProcessor does not look like EventEmitter, so no need to extend EventEmitter and use `emit' here.
|
||||
const createTaskProcessor = (task: TaskInfo, outerProcessor: TaskProcessorCore, callback: TaskProcessorCallback) => { |
||||
const errors: string[] = []; |
||||
const getOuterPrefix = (prefix) => { |
||||
if (task.name && prefix) { |
||||
return `${task.name}/${prefix}`; |
||||
} |
||||
|
||||
return String(task.name || "") + String(prefix || ""); |
||||
}; |
||||
const onError = (message, prefix) => { |
||||
errors.push(message); |
||||
outerProcessor.onError(message, getOuterPrefix(prefix)); |
||||
}; |
||||
const onWarn = (message, prefix) => outerProcessor.onWarn(message, getOuterPrefix(prefix)); |
||||
const onInfo = (message, prefix) => outerProcessor.onInfo(message, getOuterPrefix(prefix)); |
||||
|
||||
let result: TaskProcessor; |
||||
result = { |
||||
context: outerProcessor.context, |
||||
done: () => callback(errors.join("\r\n")), |
||||
onError, |
||||
onWarn, |
||||
onInfo, |
||||
process: () => tasks[task.type](task.params || {}, result)(), |
||||
processTask: (innerTask, innerCallback) => createTaskProcessor(innerTask, result, innerCallback).process(), |
||||
}; |
||||
|
||||
return result; |
||||
}; |
||||
|
||||
const pushMessage = (list, message, parts, index) => { |
||||
if (!index) { |
||||
list.$allMessages.push({ |
||||
message, |
||||
prefix: parts.join("/"), |
||||
}); |
||||
} |
||||
|
||||
if (index < parts.length) { |
||||
if (!list[parts[index]]) { |
||||
list[parts[index]] = {}; |
||||
} |
||||
|
||||
return pushMessage(list[parts[index]], message, parts, index + 1); |
||||
} |
||||
|
||||
if (!list.$messages) { |
||||
list.$messages = []; |
||||
} |
||||
|
||||
return list.$messages.push(message); |
||||
}; |
||||
|
||||
const addFlag = (flags) => (flagName) => { |
||||
flags[flagName] = true; |
||||
}; |
||||
|
||||
const containsFlag = (flags) => (flagName) => flags[flagName]; |
||||
|
||||
export const processTask = (task, context, callback) => { |
||||
const errors: MessagesRoot = { $allMessages: [] }; |
||||
const warns: MessagesRoot = { $allMessages: [] }; |
||||
const infos: MessagesRoot = { $allMessages: [] }; |
||||
const messages: MessagesRoot = { $allMessages: [] }; |
||||
const messageProcessor = (list) => (message, prefix) => { |
||||
const parts = prefix.split("/"); |
||||
|
||||
pushMessage(list, message, parts, 0); |
||||
pushMessage(messages, message, parts, 0); |
||||
}; |
||||
const flags = {}; |
||||
const processor = createTaskProcessor(task, { |
||||
context: _.extend(context, { |
||||
addFlag: addFlag(flags), |
||||
containsFlag: containsFlag(flags), |
||||
}), |
||||
onError: messageProcessor(errors), |
||||
onInfo: messageProcessor(infos), |
||||
onWarn: messageProcessor(warns), |
||||
}, (err) => callback(err, { |
||||
errors, |
||||
infos, |
||||
messages, |
||||
warns, |
||||
})); |
||||
|
||||
processor.process(); |
||||
}; |
@ -1,31 +0,0 @@ |
||||
"use strict"; |
||||
|
||||
const glob = require("glob"); |
||||
|
||||
module.exports = (params, processor) => ({ |
||||
"process": () => glob("**/obj/{Debug,Release}/*.{dll,pdb,xml}", { |
||||
"cwd": processor.context.exported, |
||||
"dot": true |
||||
}, (err, files) => { |
||||
if (err) { |
||||
processor.onError(err); |
||||
|
||||
return processor.done(); |
||||
} |
||||
|
||||
if (!files || !files.length) { |
||||
return processor.done(); |
||||
} |
||||
|
||||
return processor.processTask({ |
||||
"params": { |
||||
"tasks": files.map((file) => ({ |
||||
"name": file, |
||||
"params": { "filename": file }, |
||||
"type": "deletefromcode" |
||||
})) |
||||
}, |
||||
"type": "parallel" |
||||
}, processor.done.bind(processor)); |
||||
}) |
||||
}); |
@ -0,0 +1,29 @@ |
||||
"use strict"; |
||||
|
||||
import * as glob from "glob"; |
||||
|
||||
export default ((params, processor) => () => glob("**/obj/{Debug,Release}/*.{dll,pdb,xml}", { |
||||
cwd: processor.context.exported, |
||||
dot: true, |
||||
}, (err, files) => { |
||||
if (err) { |
||||
processor.onError(err); |
||||
|
||||
return processor.done(); |
||||
} |
||||
|
||||
if (!files || !files.length) { |
||||
return processor.done(); |
||||
} |
||||
|
||||
return processor.processTask({ |
||||
params: { |
||||
tasks: files.map((file) => ({ |
||||
name: file, |
||||
params: { filename: file }, |
||||
type: "deletefromcode", |
||||
})), |
||||
}, |
||||
type: "parallel", |
||||
}, processor.done); |
||||
})) as Task; |
@ -1,9 +1,9 @@ |
||||
"use strict"; |
||||
|
||||
module.exports = (params, processor) => { |
||||
export default ((params, processor) => { |
||||
const condition = (!params.owner || params.owner === processor.context.owner) |
||||
&& (!params.branch || params.branch === processor.context.branch || `refs/heads/${params.branch}` === processor.context.branch); |
||||
const task = (condition && params.task) || params.otherwise; |
||||
|
||||
return { "process": () => processor.processTask(task || { "type": "noop" }, processor.done.bind(processor)) }; |
||||
}; |
||||
return () => processor.processTask(task || { type: "noop" }, processor.done); |
||||
}) as Task; |
@ -1,23 +0,0 @@ |
||||
"use strict"; |
||||
|
||||
const path = require("path"); |
||||
const fse = require("fs-extra"); |
||||
|
||||
module.exports = (params, processor) => ({ |
||||
"process": () => { |
||||
const sourceFilePath = path.join(processor.context.exported, params.filename); |
||||
const targetFilePath = path.join(processor.context.release, params.filename); |
||||
|
||||
processor.onInfo(`Copying ${sourceFilePath} to ${targetFilePath}`); |
||||
|
||||
fse.copy(sourceFilePath, targetFilePath, (err) => { |
||||
if (err) { |
||||
processor.onError(`Unable to copy file: ${err}`); |
||||
} else { |
||||
processor.onInfo("Copied file"); |
||||
} |
||||
|
||||
return processor.done(); |
||||
}); |
||||
} |
||||
}); |
@ -0,0 +1,21 @@ |
||||
"use strict"; |
||||
|
||||
import { copy } from "fs-extra"; |
||||
import { join } from "path"; |
||||
|
||||
export default ((params, processor) => () => { |
||||
const sourceFilePath = join(processor.context.exported, params.filename); |
||||
const targetFilePath = join(processor.context.release, params.filename); |
||||
|
||||
processor.onInfo(`Copying ${sourceFilePath} to ${targetFilePath}`); |
||||
|
||||
copy(sourceFilePath, targetFilePath, (err) => { |
||||
if (err) { |
||||
processor.onError(`Unable to copy file: ${err}`); |
||||
} else { |
||||
processor.onInfo("Copied file"); |
||||
} |
||||
|
||||
return processor.done(); |
||||
}); |
||||
}) as Task; |
@ -1,31 +0,0 @@ |
||||
"use strict"; |
||||
|
||||
const glob = require("glob"); |
||||
|
||||
module.exports = (params, processor) => ({ |
||||
"process": () => glob(params.mask, { |
||||
"cwd": processor.context.exported, |
||||
"dot": true |
||||
}, (err, files) => { |
||||
if (err) { |
||||
processor.onError(err); |
||||
|
||||
return processor.done(); |
||||
} |
||||
|
||||
if (!files || !files.length) { |
||||
return processor.done(); |
||||
} |
||||
|
||||
return processor.processTask({ |
||||
"params": { |
||||
"tasks": files.map((file) => ({ |
||||
"name": file, |
||||
"params": { "filename": file }, |
||||
"type": "copy" |
||||
})) |
||||
}, |
||||
"type": "parallel" |
||||
}, processor.done.bind(processor)); |
||||
}) |
||||
}); |
@ -0,0 +1,29 @@ |
||||
"use strict"; |
||||
|
||||
import * as glob from "glob"; |
||||
|
||||
export default ((params, processor) => () => glob(params.mask, { |
||||
cwd: processor.context.exported, |
||||
dot: true, |
||||
}, (err, files) => { |
||||
if (err) { |
||||
processor.onError(err); |
||||
|
||||
return processor.done(); |
||||
} |
||||
|
||||
if (!files || !files.length) { |
||||
return processor.done(); |
||||
} |
||||
|
||||
return processor.processTask({ |
||||
params: { |
||||
tasks: files.map((file) => ({ |
||||
name: file, |
||||
params: { filename: file }, |
||||
type: "copy", |
||||
})), |
||||
}, |
||||
type: "parallel", |
||||
}, processor.done); |
||||
})) as Task; |
@ -1,37 +0,0 @@ |
||||
"use strict"; |
||||
|
||||
const fs = require("fs"); |
||||
const path = require("path"); |
||||
const cssnano = require("cssnano"); |
||||
|
||||
module.exports = (params, processor) => ({ |
||||
"process": () => { |
||||
const filePath = path.join(processor.context.exported, params.filename); |
||||
|
||||
fs.readFile(filePath, (readErr, css) => { |
||||
if (readErr) { |
||||
processor.onError(`Unable to read stylesheet ${params.filename}: ${readErr}`); |
||||
|
||||
return processor.done(); |
||||
} |
||||
|
||||
return cssnano.process(css) |
||||
.catch((cssErr) => { |
||||
processor.onError(`Unable to uglify stylesheet: ${cssErr}`); |
||||
processor.done(); |
||||
}) |
||||
.then((result) => { |
||||
fs.writeFile(filePath, result.css, (writeErr) => { |
||||
if (writeErr) { |
||||
processor.onError(`Unable to write uglified stylesheet for ${params.filename}: ${writeErr}`); |
||||
} else { |
||||
processor.onInfo(`Saved uglified stylesheet for ${params.filename}; uglified length: ${result.css.length}`); |
||||
} |
||||
|
||||
processor.done(); |
||||
}); |
||||
}); |
||||
}); |
||||
} |
||||
}); |
||||
|
@ -0,0 +1,34 @@ |
||||
"use strict"; |
||||
|
||||
import { process as cssnanoProcess } from "cssnano"; |
||||
import { readFile, writeFile } from "fs"; |
||||
import { join } from "path"; |
||||
|
||||
export default ((params, processor) => () => { |
||||
const filePath = join(processor.context.exported, params.filename); |
||||
|
||||
readFile(filePath, (readErr, css) => { |
||||
if (readErr) { |
||||
processor.onError(`Unable to read stylesheet ${params.filename}: ${readErr}`); |
||||
|
||||
return processor.done(); |
||||
} |
||||
|
||||
return cssnanoProcess(css) |
||||
.catch((cssErr) => { |
||||
processor.onError(`Unable to uglify stylesheet: ${cssErr}`); |
||||
processor.done(); |
||||
}) |
||||
.then((result) => { |
||||
writeFile(filePath, result.css, (writeErr) => { |
||||
if (writeErr) { |
||||
processor.onError(`Unable to write uglified stylesheet for ${params.filename}: ${writeErr}`); |
||||
} else { |
||||
processor.onInfo(`Saved uglified stylesheet for ${params.filename}; uglified length: ${result.css.length}`); |
||||
} |
||||
|
||||
processor.done(); |
||||
}); |
||||
}); |
||||
}); |
||||
}) as Task; |
@ -1,36 +0,0 @@ |
||||
"use strict"; |
||||
|
||||
const glob = require("glob"); |
||||
const flagDoneName = "cssnanoallDone"; |
||||
|
||||
module.exports = (params, processor) => ({ |
||||
"process": () => { |
||||
if (processor.context.containsFlag(flagDoneName)) { |
||||
processor.onWarn("cssnanoall task is executed more than once; this is probably a bug in your mbs.json"); |
||||
} |
||||
|
||||
processor.context.addFlag(flagDoneName); |
||||
|
||||
glob("**/*.css", { |
||||
"cwd": processor.context.exported, |
||||
"dot": true |
||||
}, (err, files) => { |
||||
if (err) { |
||||
processor.onError(err); |
||||
|
||||
return processor.done(); |
||||
} |
||||
|
||||
return processor.processTask({ |
||||
"params": { |
||||
"tasks": files.map((file) => ({ |
||||
"name": file, |
||||
"params": { "filename": file }, |
||||
"type": "cssnano" |
||||
})) |
||||
}, |
||||
"type": (params.preventParallelTests && "sequential") || "parallel" |
||||
}, processor.done.bind(processor)); |
||||
}); |
||||
} |
||||
}); |
@ -0,0 +1,34 @@ |
||||
"use strict"; |
||||
|
||||
import * as glob from "glob"; |
||||
const flagDoneName = "cssnanoallDone"; |
||||
|
||||
export default ((params, processor) => () => { |
||||
if (processor.context.containsFlag(flagDoneName)) { |
||||
processor.onWarn("cssnanoall task is executed more than once; this is probably a bug in your mbs.json"); |
||||
} |
||||
|
||||
processor.context.addFlag(flagDoneName); |
||||
|
||||
glob("**/*.css", { |
||||
cwd: processor.context.exported, |
||||
dot: true, |
||||
}, (err, files) => { |
||||
if (err) { |
||||
processor.onError(err); |
||||
|
||||
return processor.done(); |
||||
} |
||||
|
||||
return processor.processTask({ |
||||
params: { |
||||
tasks: files.map((file) => ({ |
||||
name: file, |
||||
params: { filename: file }, |
||||
type: "cssnano", |
||||
})), |
||||
}, |
||||
type: (params.preventParallelTests && "sequential") || "parallel", |
||||
}, processor.done); |
||||
}); |
||||
}) as Task; |
@ -1,22 +0,0 @@ |
||||
"use strict"; |
||||
|
||||
const path = require("path"); |
||||
const fse = require("fs-extra"); |
||||
|
||||
module.exports = (params, processor) => ({ |
||||
"process": () => { |
||||
const sourceFilePath = path.join(processor.context.exported, params.filename); |
||||
|
||||
processor.onInfo(`Deleting ${sourceFilePath}`); |
||||
|
||||
fse.remove(sourceFilePath, (err) => { |
||||
if (err) { |
||||
processor.onError(`Unable to delete file: ${err}`); |
||||
} else { |
||||
processor.onInfo("Deleted file"); |
||||
} |
||||
|
||||
return processor.done(); |
||||
}); |
||||
} |
||||
}); |
@ -0,0 +1,20 @@ |
||||
"use strict"; |
||||
|
||||
import { remove } from "fs-extra"; |
||||
import { join } from "path"; |
||||
|
||||
export default ((params, processor) => () => { |
||||
const sourceFilePath = join(processor.context.exported, params.filename); |
||||
|
||||
processor.onInfo(`Deleting ${sourceFilePath}`); |
||||
|
||||
remove(sourceFilePath, (err) => { |
||||
if (err) { |
||||
processor.onError(`Unable to delete file: ${err}`); |
||||
} else { |
||||
processor.onInfo("Deleted file"); |
||||
} |
||||
|
||||
return processor.done(); |
||||
}); |
||||
}) as Task; |
@ -1,17 +0,0 @@ |
||||
"use strict"; |
||||
|
||||
const sequential = require("./sequential"); |
||||
|
||||
module.exports = (params, processor) => sequential({ |
||||
"tasks": [ |
||||
{ |
||||
"name": "build", |
||||
params, |
||||
"type": "dotnetbuildwithoutcleanup" |
||||
}, |
||||
{ |
||||
"name": "cleanup", |
||||
"type": "cleanupafterdotnetbuild" |
||||
} |
||||
] |
||||
}, processor); |
@ -0,0 +1,17 @@ |
||||
"use strict"; |
||||
|
||||
import sequential from "./sequential"; |
||||
|
||||
export default ((params, processor) => sequential({ |
||||
tasks: [ |
||||
{ |
||||
name: "build", |
||||
params, |
||||
type: "dotnetbuildwithoutcleanup", |
||||
}, |
||||
{ |
||||
name: "cleanup", |
||||
type: "cleanupafterdotnetbuild", |
||||
}, |
||||
], |
||||
}, processor)) as Task; |
@ -1,22 +0,0 @@ |
||||
"use strict"; |
||||
|
||||
const sequential = require("./sequential"); |
||||
|
||||
module.exports = (params, processor) => sequential({ |
||||
"tasks": [ |
||||
{ |
||||
"name": "build", |
||||
params, |
||||
"type": "dotnetbuildwithoutcleanup" |
||||
}, |
||||
{ |
||||
"name": "test", |
||||
params, |
||||
"type": "dotnetnunitall" |
||||
}, |
||||
{ |
||||
"name": "cleanup", |
||||
"type": "cleanupafterdotnetbuild" |
||||
} |
||||
] |
||||
}, processor); |
@ -0,0 +1,22 @@ |
||||
"use strict"; |
||||
|
||||
import sequential from "./sequential"; |
||||
|
||||
export default ((params, processor) => sequential({ |
||||
tasks: [ |
||||
{ |
||||
name: "build", |
||||
params, |
||||
type: "dotnetbuildwithoutcleanup", |
||||
}, |
||||
{ |
||||
name: "test", |
||||
params, |
||||
type: "dotnetnunitall", |
||||
}, |
||||
{ |
||||
name: "cleanup", |
||||
type: "cleanupafterdotnetbuild", |
||||
}, |
||||
], |
||||
}, processor)) as Task; |
@ -1,82 +0,0 @@ |
||||
"use strict"; |
||||
|
||||
const spawn = require("child_process").spawn; |
||||
const streamBuffers = require("stream-buffers"); |
||||
const settings = require("../../settings"); |
||||
|
||||
const wrapBuilder = (builder, input, onExit) => { |
||||
const resultBuffer = new streamBuffers.WritableStreamBuffer(); |
||||
const errorBuffer = new streamBuffers.WritableStreamBuffer(); |
||||
|
||||
builder.stdout.on("data", (data) => { |
||||
resultBuffer.write(data); |
||||
}); |
||||
|
||||
builder.stderr.on("data", (data) => { |
||||
errorBuffer.write(data); |
||||
}); |
||||
|
||||
builder.on("exit", (code) => { |
||||
resultBuffer.end(); |
||||
errorBuffer.end(); |
||||
onExit(code, resultBuffer.getContentsAsString(), errorBuffer.getContentsAsString()); |
||||
}); |
||||
|
||||
builder.stdin.write(input); |
||||
builder.stdin.end(); |
||||
}; |
||||
|
||||
const safeParseJson = (data) => { |
||||
try { |
||||
return { "parsed": JSON.parse(data) }; |
||||
} catch (err) { |
||||
return { err }; |
||||
} |
||||
}; |
||||
|
||||
module.exports = (params, processor) => ({ |
||||
"process": () => { |
||||
const input = JSON.stringify(params); |
||||
const builder = spawn(settings.builderExecutable, [params.command]); |
||||
|
||||
processor.onInfo(`DotNetBuilderWrapper processing (at ${new Date().toISOString()}): ${input}`); |
||||
|
||||
wrapBuilder(builder, input, (code, result, builderError) => { |
||||
if (code || builderError) { |
||||
processor.onError(`Return code is ${code}\r\n${builderError}`); |
||||
|
||||
return processor.done(); |
||||
} |
||||
|
||||
const { parsed, err } = safeParseJson(result); |
||||
|
||||
if (err || !parsed || !parsed.Messages) { |
||||
processor.onError(`Malformed JSON: ${err}`); |
||||
processor.onInfo(result); |
||||
|
||||
return processor.done(); |
||||
} |
||||
|
||||
const messages = parsed.Messages; |
||||
|
||||
messages.forEach((message) => { |
||||
if (!message) { |
||||
return processor.onError("Message is null"); |
||||
} |
||||
|
||||
switch (message.Type) { |
||||
case "info": |
||||
return processor.onInfo(message.Body); |
||||
case "warn": |
||||
return processor.onWarn(message.Body); |
||||
default: |
||||
return processor.onError(message.Body); |
||||
} |
||||
}); |
||||
|
||||
processor.onInfo(`Done DotNetBuilderWrapper processing (at ${new Date().toISOString()})`); |
||||
|
||||
return processor.done(); |
||||
}); |
||||
} |
||||
}); |
@ -0,0 +1,89 @@ |
||||
"use strict"; |
||||
|
||||
import { spawn } from "child_process"; |
||||
import * as JSONParse from "json-parse-safe"; |
||||
import { WritableStreamBuffer } from "stream-buffers"; |
||||
|
||||
import settings from "../../settings"; |
||||
|
||||
const wrapBuilder = (builder, input, onExit) => { |
||||
const stdoutPromise = new Promise((resolve, reject) => { |
||||
const streamBuffer = new WritableStreamBuffer(); |
||||
builder.stdout |
||||
.on("error", reject) |
||||
.pipe(streamBuffer) |
||||
.on("error", reject) |
||||
.on("finish", () => { |
||||
streamBuffer.end(); |
||||
resolve(streamBuffer.getContentsAsString()); |
||||
}); |
||||
}); |
||||
|
||||
const stderrPromise = new Promise((resolve, reject) => { |
||||
const streamBuffer = new WritableStreamBuffer(); |
||||
builder.stderr |
||||
.on("error", reject) |
||||
.pipe(streamBuffer) |
||||
.on("error", reject) |
||||
.on("finish", () => { |
||||
streamBuffer.end(); |
||||
resolve(streamBuffer.getContentsAsString()); |
||||
}); |
||||
}); |
||||
|
||||
const builderPromise = new Promise((resolve, reject) => { |
||||
builder.stdin.write(input); |
||||
builder.stdin.end(); |
||||
builder.on("exit", resolve); |
||||
}); |
||||
|
||||
Promise.all([stdoutPromise, stderrPromise, builderPromise]).then((values) => { |
||||
const [result, builderError, code] = values; |
||||
onExit(code, result, builderError); |
||||
}).catch((err) => onExit(0, undefined, err)); |
||||
}; |
||||
|
||||
export default ((params, processor) => () => { |
||||
const input = JSON.stringify(params); |
||||
const builder = spawn(settings.builderExecutable, [params.command]); |
||||
|
||||
processor.onInfo(`DotNetBuilderWrapper processing (at ${new Date().toISOString()}): ${input}`); |
||||
|
||||
wrapBuilder(builder, input, (code, result, builderError) => { |
||||
if (code || builderError) { |
||||
processor.onError(`Return code is ${code}\r\n${builderError}`); |
||||
|
||||
return processor.done(); |
||||
} |
||||
|
||||
const { value, error } = JSONParse(result); |
||||
|
||||
if (error || !value || !value.Messages) { |
||||
processor.onError(`Malformed JSON: ${error}`); |
||||
processor.onInfo(result); |
||||
|
||||
return processor.done(); |
||||
} |
||||
|
||||
const messages = value.Messages; |
||||
|
||||
messages.forEach((message) => { |
||||
if (!message) { |
||||
return processor.onError("Message is null"); |
||||
} |
||||
|
||||
switch (message.Type) { |
||||
case "info": |
||||
return processor.onInfo(message.Body); |
||||
case "warn": |
||||
return processor.onWarn(message.Body); |
||||
default: |
||||
return processor.onError(message.Body); |
||||
} |
||||
}); |
||||
|
||||
processor.onInfo(`Done DotNetBuilderWrapper processing (at ${new Date().toISOString()})`); |
||||
|
||||
return processor.done(); |
||||
}); |
||||
}) as Task; |
@ -1,42 +0,0 @@ |
||||
"use strict"; |
||||
|
||||
const sequential = require("./sequential"); |
||||
|
||||
const createTasks = function *(params) { |
||||
if (!params.skipMbsCheckStyle) { |
||||
yield { |
||||
params, |
||||
"type": "dotnetcheckstyle" |
||||
}; |
||||
} |
||||
|
||||
yield { |
||||
params, |
||||
"type": "dotnetrewrite" |
||||
}; |
||||
|
||||
if (!params.skipNugetRestore) { |
||||
yield { |
||||
params, |
||||
"type": "dotnetnugetrestore" |
||||
}; |
||||
} |
||||
|
||||
yield { |
||||
"params": { |
||||
"configuration": params.configuration, |
||||
"forceCodeAnalysis": params.forceCodeAnalysis, |
||||
"ignoreCodeAnalysis": params.ignoreCodeAnalysis, |
||||
"skipCodeSigning": params.skipCodeSigning, |
||||
"solution": params.solution, |
||||
"target": "Rebuild" |
||||
}, |
||||
"type": "dotnetcompile" |
||||
}; |
||||
}; |
||||
|
||||
module.exports = (params, processor) => { |
||||
const tasks = Array.from(createTasks(params)); |
||||
|
||||
return sequential({ tasks }, processor); |
||||
}; |
@ -0,0 +1,42 @@ |
||||
"use strict"; |
||||
|
||||
import sequential from "./sequential"; |
||||
|
||||
const createTasks = function *(params) { |
||||
if (!params.skipMbsCheckStyle) { |
||||
yield { |
||||
params, |
||||
type: "dotnetcheckstyle", |
||||
}; |
||||
} |
||||
|
||||
yield { |
||||
params, |
||||
type: "dotnetrewrite", |
||||
}; |
||||
|
||||
if (!params.skipNugetRestore) { |
||||
yield { |
||||
params, |
||||
type: "dotnetnugetrestore", |
||||
}; |
||||
} |
||||
|
||||
yield { |
||||
params: { |
||||
configuration: params.configuration, |
||||
forceCodeAnalysis: params.forceCodeAnalysis, |
||||
ignoreCodeAnalysis: params.ignoreCodeAnalysis, |
||||
skipCodeSigning: params.skipCodeSigning, |
||||
solution: params.solution, |
||||
target: "Rebuild", |
||||
}, |
||||
type: "dotnetcompile", |
||||
}; |
||||
}; |
||||
|
||||
export default ((params, processor) => { |
||||
const tasks = Array.from(createTasks(params)); |
||||
|
||||
return sequential({ tasks }, processor); |
||||
}) as Task; |
@ -1,76 +0,0 @@ |
||||
"use strict"; |
||||
|
||||
const path = require("path"); |
||||
const fs = require("fs"); |
||||
const async = require("async"); |
||||
const glob = require("glob"); |
||||
|
||||
const autoGeneratedMarker |
||||
= "//------------------------------------------------------------------------------\n" |
||||
+ "// <auto-generated>"; |
||||
|
||||
const flagDoneName = "dotnetcheckerDone"; |
||||
|
||||
module.exports = (params, processor) => ({ |
||||
"process": () => { |
||||
if (processor.context.containsFlag(flagDoneName)) { |
||||
return processor.done(); |
||||
} |
||||
|
||||
processor.context.addFlag(flagDoneName); |
||||
|
||||
return glob("**/*.cs", { "cwd": processor.context.exported }, (globErr, files) => { |
||||
if (globErr) { |
||||
processor.onError(globErr); |
||||
|
||||
return processor.done(); |
||||
} |
||||
|
||||
processor.onInfo(`Found ${files.length} .cs files`); |
||||
|
||||
if (!files || !files.length) { |
||||
processor.onWarn("No .cs files found"); |
||||
|
||||
return processor.done(); |
||||
} |
||||
|
||||
const processFile = (data, file) => { |
||||
if (data.includes("\r\n")) { |
||||
return processor.onError(`Windows-style EOL (0D0A) found in file ${file}`); |
||||
} |
||||
|
||||
if (!params.ignoreCodeStyle) { |
||||
if (data.substr(1, autoGeneratedMarker.length) === autoGeneratedMarker || data.startsWith(autoGeneratedMarker)) { |
||||
return processor.onInfo(`Skipping auto-generated file ${file}`); |
||||
} |
||||
|
||||
if (data.includes("\t") && data.includes(" ")) { |
||||
processor.onError(`Both tabs and spaces found in file ${file}`); |
||||
} |
||||
|
||||
if (data.includes("\t")) { |
||||
processor.onError(`Tabs found in file ${file}`); |
||||
} |
||||
} |
||||
|
||||
return processor.onInfo(`Checked file ${file}`); |
||||
}; |
||||
|
||||
return async.parallel(files.map((file) => (callback) => fs.readFile( |
||||
path.join(processor.context.exported, file), |
||||
{ "encoding": "utf8" }, |
||||
(readErr, data) => { |
||||
if (readErr) { |
||||
processor.onError(`Unable to check file ${file}: ${readErr}`); |
||||
|
||||
return callback(readErr); |
||||
} |
||||
|
||||
processFile(data, file); |
||||
|
||||
return callback(); |
||||
} |
||||
)), processor.done.bind(processor)); |
||||
}); |
||||
} |
||||
}); |
@ -0,0 +1,74 @@ |
||||
"use strict"; |
||||
|
||||
import { parallel } from "async"; |
||||
import { readFile } from "fs"; |
||||
import * as glob from "glob"; |
||||
import { join } from "path"; |
||||
|
||||
const autoGeneratedMarker |
||||
= "//------------------------------------------------------------------------------\n" |
||||
+ "// <auto-generated>"; |
||||
|
||||
const flagDoneName = "dotnetcheckerDone"; |
||||
|
||||
export default ((params, processor) => () => { |
||||
if (processor.context.containsFlag(flagDoneName)) { |
||||
return processor.done(); |
||||
} |
||||
|
||||
processor.context.addFlag(flagDoneName); |
||||
|
||||
return glob("**/*.cs", { cwd: processor.context.exported }, (globErr, files) => { |
||||
if (globErr) { |
||||
processor.onError(globErr); |
||||
|
||||
return processor.done(); |
||||
} |
||||
|
||||
processor.onInfo(`Found ${files.length} .cs files`); |
||||
|
||||
if (!files || !files.length) { |
||||
processor.onWarn("No .cs files found"); |
||||
|
||||
return processor.done(); |
||||
} |
||||
|
||||
const processFile = (data, file) => { |
||||
if (data.includes("\r\n")) { |
||||
return processor.onError(`Windows-style EOL (0D0A) found in file ${file}`); |
||||
} |
||||
|
||||
if (!params.ignoreCodeStyle) { |
||||
if (data.substr(1, autoGeneratedMarker.length) === autoGeneratedMarker || data.startsWith(autoGeneratedMarker)) { |
||||
return processor.onInfo(`Skipping auto-generated file ${file}`); |
||||
} |
||||
|
||||
if (data.includes("\t") && data.includes(" ")) { |
||||
processor.onError(`Both tabs and spaces found in file ${file}`); |
||||
} |
||||
|
||||
if (data.includes("\t")) { |
||||
processor.onError(`Tabs found in file ${file}`); |
||||
} |
||||
} |
||||
|
||||
return processor.onInfo(`Checked file ${file}`); |
||||
}; |
||||
|
||||
return parallel(files.map((file) => (callback) => readFile( |
||||
join(processor.context.exported, file), |
||||
{ encoding: "utf8" }, |
||||
(readErr, data) => { |
||||
if (readErr) { |
||||
processor.onError(`Unable to check file ${file}: ${readErr}`); |
||||
|
||||
return callback(readErr); |
||||
} |
||||
|
||||
processFile(data, file); |
||||
|
||||
return callback(); |
||||
}, |
||||
)), processor.done); |
||||
}); |
||||
}) as Task; |
@ -1,11 +0,0 @@ |
||||
"use strict"; |
||||
|
||||
const _ = require("underscore"); |
||||
const dotnetnugetprocessinternal = require("./dotnetnugetprocessinternal"); |
||||
|
||||
module.exports = (params, processor) => dotnetnugetprocessinternal(_.extendOwn(params, { |
||||
"getFinalTask": (nupkg) => ({ |
||||
"params": { "filename": nupkg }, |
||||
"type": "copy" |
||||
}) |
||||
}), processor); |
@ -0,0 +1,11 @@ |
||||
"use strict"; |
||||
|
||||
import * as _ from "underscore"; |
||||
import dotnetnugetprocessinternal from "./dotnetnugetprocessinternal"; |
||||
|
||||
export default ((params, processor) => dotnetnugetprocessinternal(_.extendOwn(params, { |
||||
getFinalTask: (nupkg) => ({ |
||||
params: { filename: nupkg }, |
||||
type: "copy", |
||||
}), |
||||
}), processor)) as Task; |
@ -1,30 +0,0 @@ |
||||
"use strict"; |
||||
|
||||
const conditional = require("./conditional"); |
||||
|
||||
module.exports = (params, processor) => conditional({ |
||||
"branch": "master", |
||||
"otherwise": { |
||||
"name": "nuget-pack", |
||||
"params": { |
||||
"major": params.major, |
||||
"name": params.nuspecName, |
||||
"nuspec": `${params.nuspecName}.nuspec`, |
||||
"version": params.version, |
||||
"withoutCommitSha": params.withoutCommitSha |
||||
}, |
||||
"type": "dotnetnugetpack" |
||||
}, |
||||
"owner": params.masterRepoOwner, |
||||
"task": { |
||||
"name": "nuget-push", |
||||
"params": { |
||||
"major": params.major, |
||||
"name": params.nuspecName, |
||||
"nuspec": `${params.nuspecName}.nuspec`, |
||||
"version": params.version, |
||||
"withoutCommitSha": params.withoutCommitSha |
||||
}, |
||||
"type": "dotnetnugetpush" |
||||
} |
||||
}, processor); |
@ -0,0 +1,30 @@ |
||||
"use strict"; |
||||
|
||||
import conditional from "./conditional"; |
||||
|
||||
export default ((params, processor) => conditional({ |
||||
branch: "master", |
||||
otherwise: { |
||||
name: "nuget-pack", |
||||
params: { |
||||
major: params.major, |
||||
name: params.nuspecName, |
||||
nuspec: `${params.nuspecName}.nuspec`, |
||||
version: params.version, |
||||
withoutCommitSha: params.withoutCommitSha, |
||||
}, |
||||
type: "dotnetnugetpack", |
||||
}, |
||||
owner: params.masterRepoOwner, |
||||
task: { |
||||
name: "nuget-push", |
||||
params: { |
||||
major: params.major, |
||||
name: params.nuspecName, |
||||
nuspec: `${params.nuspecName}.nuspec`, |
||||
version: params.version, |
||||
withoutCommitSha: params.withoutCommitSha, |
||||
}, |
||||
type: "dotnetnugetpush", |
||||
}, |
||||
}, processor)) as Task; |
@ -1,11 +0,0 @@ |
||||
"use strict"; |
||||
|
||||
const _ = require("underscore"); |
||||
const dotnetnugetprocessinternal = require("./dotnetnugetprocessinternal"); |
||||
|
||||
module.exports = (params, processor) => dotnetnugetprocessinternal(_.extendOwn(params, { |
||||
"getFinalTask": (nupkg) => ({ |
||||
"params": { "Package": nupkg }, |
||||
"type": "dotnetnugetpushonly" |
||||
}) |
||||
}), processor); |
@ -0,0 +1,11 @@ |
||||
"use strict"; |
||||
|
||||
import * as _ from "underscore"; |
||||
import dotnetnugetprocessinternal from "./dotnetnugetprocessinternal"; |
||||
|
||||
export default ((params, processor) => dotnetnugetprocessinternal(_.extendOwn(params, { |
||||
getFinalTask: (nupkg) => ({ |
||||
params: { Package: nupkg }, |
||||
type: "dotnetnugetpushonly", |
||||
}), |
||||
}), processor)) as Task; |
@ -1,12 +0,0 @@ |
||||
"use strict"; |
||||
|
||||
const path = require("path"); |
||||
const dotnetbuilderwrapper = require("./dotnetbuilderwrapper"); |
||||
const settings = require("../../settings"); |
||||
|
||||
module.exports = (params, processor) => dotnetbuilderwrapper({ |
||||
"ApiKey": settings.nugetApiKey, |
||||
"NugetHost": settings.nugetHost, |
||||
"Package": path.join(processor.context.exported, params.Package), |
||||
"command": "nugetpush" |
||||
}, processor); |
@ -0,0 +1,13 @@ |
||||
"use strict"; |
||||
|
||||
import { join } from "path"; |
||||
|
||||
import settings from "../../settings"; |
||||
import dotnetbuilderwrapper from "./dotnetbuilderwrapper"; |
||||
|
||||
export default ((params, processor) => dotnetbuilderwrapper({ |
||||
ApiKey: settings.nugetApiKey, |
||||
NugetHost: settings.nugetHost, |
||||
Package: join(processor.context.exported, params.Package), |
||||
command: "nugetpush", |
||||
}, processor)) as Task; |
@ -1,17 +0,0 @@ |
||||
"use strict"; |
||||
|
||||
const path = require("path"); |
||||
const sequential = require("./sequential"); |
||||
|
||||
module.exports = (params, processor) => sequential({ |
||||
"tasks": [ |
||||
{ |
||||
"params": { |
||||
"BaseDirectory": processor.context.exported, |
||||
"SolutionPath": path.join(processor.context.exported, params.solution), |
||||
"command": "nugetrestore" |
||||
}, |
||||
"type": "dotnetbuilderwrapper" |
||||
} |
||||
] |
||||
}, processor); |
@ -0,0 +1,17 @@ |
||||
"use strict"; |
||||
|
||||
import { join } from "path"; |
||||
import sequential from "./sequential"; |
||||
|
||||
export default ((params, processor) => sequential({ |
||||
tasks: [ |
||||
{ |
||||
params: { |
||||
BaseDirectory: processor.context.exported, |
||||
SolutionPath: join(processor.context.exported, params.solution), |
||||
command: "nugetrestore", |
||||
}, |
||||
type: "dotnetbuilderwrapper", |
||||
}, |
||||
], |
||||
}, processor)) as Task; |
@ -1,9 +0,0 @@ |
||||
"use strict"; |
||||
|
||||
const path = require("path"); |
||||
const dotNetBuilderWrapper = require("./dotnetbuilderwrapper"); |
||||
|
||||
module.exports = (params, processor) => dotNetBuilderWrapper({ |
||||
"TestLibraryPath": path.join(processor.context.exported, params.assembly), |
||||
"command": "nunit" |
||||
}, processor); |
@ -0,0 +1,9 @@ |
||||
"use strict"; |
||||
|
||||
import { join } from "path"; |
||||
import dotNetBuilderWrapper from "./dotnetbuilderwrapper"; |
||||
|
||||
export default ((params, processor) => dotNetBuilderWrapper({ |
||||
TestLibraryPath: join(processor.context.exported, params.assembly), |
||||
command: "nunit", |
||||
}, processor)) as Task; |
@ -1,42 +0,0 @@ |
||||
"use strict"; |
||||
|
||||
const glob = require("glob"); |
||||
const flagDoneName = "dotnetnunitallDone"; |
||||
|
||||
module.exports = (params, processor) => ({ |
||||
"process": () => { |
||||
if (processor.context.containsFlag(flagDoneName)) { |
||||
processor.onWarn("dotnetnunitall task is executed more than once; this is probably a bug in your mbs.json"); |
||||
} |
||||
|
||||
processor.context.addFlag(flagDoneName); |
||||
|
||||
glob("**/{bin,build}/**/*.{Tests,Test,UnitTests}.dll", { |
||||
"cwd": processor.context.exported, |
||||
"dot": true |
||||
}, (err, files) => { |
||||
if (err) { |
||||
processor.onError(err); |
||||
|
||||
return processor.done(); |
||||
} |
||||
|
||||
if (!files || !files.length) { |
||||
processor.onError(`No test assemblies found in ${processor.context.exported}`); |
||||
|
||||
return processor.done(); |
||||
} |
||||
|
||||
return processor.processTask({ |
||||
"params": { |
||||
"tasks": files.map((file) => ({ |
||||
"name": file, |
||||
"params": { "assembly": file }, |
||||
"type": "dotnetnunit" |
||||
})) |
||||
}, |
||||
"type": (params.preventParallelTests && "sequential") || "parallel" |
||||
}, processor.done.bind(processor)); |
||||
}); |
||||
} |
||||
}); |
@ -0,0 +1,40 @@ |
||||
"use strict"; |
||||
|
||||
import * as glob from "glob"; |
||||
const flagDoneName = "dotnetnunitallDone"; |
||||
|
||||
export default ((params, processor) => () => { |
||||
if (processor.context.containsFlag(flagDoneName)) { |
||||
processor.onWarn("dotnetnunitall task is executed more than once; this is probably a bug in your mbs.json"); |
||||
} |
||||
|
||||
processor.context.addFlag(flagDoneName); |
||||
|
||||
glob("**/{bin,build}/**/*.{Tests,Test,UnitTests}.dll", { |
||||
cwd: processor.context.exported, |
||||
dot: true, |
||||
}, (err, files) => { |
||||
if (err) { |
||||
processor.onError(err); |
||||
|
||||
return processor.done(); |
||||
} |
||||
|
||||
if (!files || !files.length) { |
||||
processor.onError(`No test assemblies found in ${processor.context.exported}`); |
||||
|
||||
return processor.done(); |
||||
} |
||||
|
||||
return processor.processTask({ |
||||
params: { |
||||
tasks: files.map((file) => ({ |
||||
name: file, |
||||
params: { assembly: file }, |
||||
type: "dotnetnunit", |
||||
})), |
||||
}, |
||||
type: (params.preventParallelTests && "sequential") || "parallel", |
||||
}, processor.done); |
||||
}); |
||||
}) as Task; |
@ -1,51 +0,0 @@ |
||||
"use strict"; |
||||
|
||||
const path = require("path"); |
||||
const fs = require("fs"); |
||||
const Mustache = require("mustache"); |
||||
|
||||
const sequential = require("./sequential"); |
||||
|
||||
// eslint-disable-next-line no-sync
|
||||
const msbuildTemplate = fs.readFileSync(path.join(__dirname, "/dotnetpackwebapp.template.msbuild"), { "encoding": "utf8" }); |
||||
// eslint-disable-next-line no-sync
|
||||
const deployTemplate = fs.readFileSync(path.join(__dirname, "/dotnetpackwebapp.template.bat"), { "encoding": "utf8" }); |
||||
// eslint-disable-next-line no-sync
|
||||
const versionTemplate = fs.readFileSync(path.join(__dirname, "/dotnetpackwebapp.template.version.aspx"), { "encoding": "utf8" }); |
||||
|
||||
module.exports = (params, processor) => sequential({ |
||||
"tasks": [ |
||||
{ |
||||
"params": { |
||||
"data": Mustache.render(msbuildTemplate, params), |
||||
"filename": "MakePackage.msbuild" |
||||
}, |
||||
"type": "writefile" |
||||
}, |
||||
{ |
||||
"params": { |
||||
"data": Mustache.render(deployTemplate, params), |
||||
"filename": "Deploy.bat" |
||||
}, |
||||
"type": "writefile" |
||||
}, |
||||
{ |
||||
"params": { |
||||
"data": Mustache.render(versionTemplate, params), |
||||
"filename": "version.aspx" |
||||
}, |
||||
"type": "writefile" |
||||
}, |
||||
{ |
||||
"params": { |
||||
"configuration": params.configuration, |
||||
"isCodeAnalysisUnsupported": params.isCodeAnalysisUnsupported, |
||||
"overrideOutputDirectory": processor.context.release, |
||||
"skipCodeSigning": params.skipCodeSigning, |
||||
"solution": "MakePackage.msbuild", |
||||
"target": "Package" |
||||
}, |
||||
"type": "dotnetcompile" |
||||
} |
||||
] |
||||
}, processor); |
@ -0,0 +1,47 @@ |
||||
"use strict"; |
||||
|
||||
import { readFileSync } from "fs"; |
||||
import { render } from "mustache"; |
||||
import { join } from "path"; |
||||
import sequential from "./sequential"; |
||||
|
||||
const msbuildTemplate = readFileSync(join(__dirname, "/dotnetpackwebapp.template.msbuild"), { encoding: "utf8" }); |
||||
const deployTemplate = readFileSync(join(__dirname, "/dotnetpackwebapp.template.bat"), { encoding: "utf8" }); |
||||
const versionTemplate = readFileSync(join(__dirname, "/dotnetpackwebapp.template.version.aspx"), { encoding: "utf8" }); |
||||
|
||||
export default ((params, processor) => sequential({ |
||||
tasks: [ |
||||
{ |
||||
params: { |
||||
data: render(msbuildTemplate, params), |
||||
filename: "MakePackage.msbuild", |
||||
}, |
||||
type: "writefile", |
||||
}, |
||||
{ |
||||
params: { |
||||
data: render(deployTemplate, params), |
||||
filename: "Deploy.bat", |
||||
}, |
||||
type: "writefile", |
||||
}, |
||||
{ |
||||
params: { |
||||
data: render(versionTemplate, params), |
||||
filename: "version.aspx", |
||||
}, |
||||
type: "writefile", |
||||
}, |
||||
{ |
||||
params: { |
||||
configuration: params.configuration, |
||||
isCodeAnalysisUnsupported: params.isCodeAnalysisUnsupported, |
||||
overrideOutputDirectory: processor.context.release, |
||||
skipCodeSigning: params.skipCodeSigning, |
||||
solution: "MakePackage.msbuild", |
||||
target: "Package", |
||||
}, |
||||
type: "dotnetcompile", |
||||
}, |
||||
], |
||||
}, processor)) as Task; |
@ -1,71 +0,0 @@ |
||||
"use strict"; |
||||
|
||||
const path = require("path"); |
||||
const fs = require("fs"); |
||||
const async = require("async"); |
||||
const glob = require("glob"); |
||||
const settings = require("../../settings"); |
||||
|
||||
const flagDoneName = "dotnetrewriterDone"; |
||||
|
||||
const processAssemblyInfo = (params, processor, appendInformationalVersion) => (originalContent, cb) => { |
||||
const processInternalsVisible = (content) => { |
||||
if (params.skipCodeSigning || settings.skipCodeSigning) { |
||||
return content; |
||||
} |
||||
|
||||
return content.replace( |
||||
/InternalsVisibleTo\s*\(\s*"([\w.]+)"\s*\)/g, |
||||
(match, p1) => `InternalsVisibleTo("${p1},PublicKey=${settings.codeSigningPublicKey}")` |
||||
); |
||||
}; |
||||
|
||||
const processInformationalVersion = (content) => { |
||||
if (!appendInformationalVersion) { |
||||
return content; |
||||
} |
||||
|
||||
return `${content}\n[assembly: System.Reflection.AssemblyInformationalVersion("${processor.context.versionInfo}")]\n`; |
||||
}; |
||||
|
||||
return cb(null, processInformationalVersion(processInternalsVisible(originalContent))); |
||||
}; |
||||
|
||||
module.exports = (params, processor) => ({ |
||||
"process": () => { |
||||
if (processor.context.containsFlag(flagDoneName)) { |
||||
return processor.done(); |
||||
} |
||||
|
||||
processor.context.addFlag(flagDoneName); |
||||
|
||||
return glob("**/{InternalsVisible,AssemblyInfo}*.cs", { "cwd": processor.context.exported }, (globErr, files) => { |
||||
if (globErr) { |
||||
processor.onError(globErr); |
||||
|
||||
return processor.done(); |
||||
} |
||||
|
||||
processor.onInfo(`Found ${files.length} AssemblyInfo.cs files`); |
||||
|
||||
if (!files || !files.length) { |
||||
processor.onWarn("No AssemblyInfo.cs found"); |
||||
|
||||
return processor.done(); |
||||
} |
||||
|
||||
return async.parallel(files.map((file) => (callback) => async.waterfall([ |
||||
fs.readFile.bind(null, path.join(processor.context.exported, file), { "encoding": "utf8" }), |
||||
processAssemblyInfo(params, processor, file.toLowerCase().includes("assemblyinfo.cs")), |
||||
fs.writeFile.bind(null, path.join(processor.context.exported, file)) |
||||
], (err) => { |
||||
if (err) { |
||||
processor.onError(`Unable to rewrite file ${file}: ${err}`); |
||||
} else { |
||||
processor.onInfo(`Rewritten file ${file}`); |
||||
} |
||||
callback(err); |
||||
})), processor.done.bind(processor)); |
||||
}); |
||||
} |
||||
}); |
@ -0,0 +1,69 @@ |
||||
"use strict"; |
||||
|
||||
import { parallel, waterfall } from "async"; |
||||
import { readFile, writeFile } from "fs"; |
||||
import * as glob from "glob"; |
||||
import { join } from "path"; |
||||
import settings from "../../settings"; |
||||
|
||||
const flagDoneName = "dotnetrewriterDone"; |
||||
|
||||
const processAssemblyInfo = (params, processor, appendInformationalVersion) => (originalContent, cb) => { |
||||
const processInternalsVisible = (content) => { |
||||
if (params.skipCodeSigning || settings.skipCodeSigning) { |
||||
return content; |
||||
} |
||||
|
||||
const pattern = /InternalsVisibleTo\s*\(\s*"([\w.]+)"\s*\)/g; |
||||
const replacer = (match, p1) => `InternalsVisibleTo("${p1},PublicKey=${settings.codeSigningPublicKey}")`; |
||||
|
||||
return content.replace(pattern, replacer); |
||||
}; |
||||
|
||||
const processInformationalVersion = (content) => { |
||||
if (!appendInformationalVersion) { |
||||
return content; |
||||
} |
||||
|
||||
return `${content}\n[assembly: System.Reflection.AssemblyInformationalVersion("${processor.context.versionInfo}")]\n`; |
||||
}; |
||||
|
||||
return cb(null, processInformationalVersion(processInternalsVisible(originalContent))); |
||||
}; |
||||
|
||||
export default ((params, processor) => () => { |
||||
if (processor.context.containsFlag(flagDoneName)) { |
||||
return processor.done(); |
||||
} |
||||
|
||||
processor.context.addFlag(flagDoneName); |
||||
|
||||
return glob("**/{InternalsVisible,AssemblyInfo}*.cs", { cwd: processor.context.exported }, (globErr, files) => { |
||||
if (globErr) { |
||||
processor.onError(globErr); |
||||
|
||||
return processor.done(); |
||||
} |
||||
|
||||
processor.onInfo(`Found ${files.length} AssemblyInfo.cs files`); |
||||
|
||||
if (!files || !files.length) { |
||||
processor.onWarn("No AssemblyInfo.cs found"); |
||||
|
||||
return processor.done(); |
||||
} |
||||
|
||||
return parallel(files.map((file) => (callback) => waterfall([ |
||||
readFile.bind(null, join(processor.context.exported, file), { encoding: "utf8" }), |
||||
processAssemblyInfo(params, processor, file.toLowerCase().includes("assemblyinfo.cs")), |
||||
writeFile.bind(null, join(processor.context.exported, file)), |
||||
], (err) => { |
||||
if (err) { |
||||
processor.onError(`Unable to rewrite file ${file}: ${err}`); |
||||
} else { |
||||
processor.onInfo(`Rewritten file ${file}`); |
||||
} |
||||
callback(err); |
||||
})), processor.done); |
||||
}); |
||||
}) as Task; |
@ -1,19 +0,0 @@ |
||||
"use strict"; |
||||
|
||||
module.exports = (params, processor) => ({ |
||||
"process": () => { |
||||
if (params.error) { |
||||
processor.onError(params.error); |
||||
} |
||||
|
||||
if (params.warn) { |
||||
processor.onWarn(params.warn); |
||||
} |
||||
|
||||
if (params.info) { |
||||
processor.onInfo(params.info); |
||||
} |
||||
|
||||
processor.done(); |
||||
} |
||||
}); |
@ -0,0 +1,17 @@ |
||||
"use strict"; |
||||
|
||||
export default ((params, processor) => () => { |
||||
if (params.error) { |
||||
processor.onError(params.error); |
||||
} |
||||
|
||||
if (params.warn) { |
||||
processor.onWarn(params.warn); |
||||
} |
||||
|
||||
if (params.info) { |
||||
processor.onInfo(params.info); |
||||
} |
||||
|
||||
processor.done(); |
||||
}) as Task; |
@ -1,32 +0,0 @@ |
||||
"use strict"; |
||||
|
||||
const path = require("path"); |
||||
const CLIEngine = require("eslint").CLIEngine; |
||||
const settings = require("../../settings"); |
||||
const cli = new CLIEngine({ "configFile": settings.eslintBrowserConfig }); |
||||
|
||||
const errorSeverity = 2; |
||||
|
||||
module.exports = (params, processor) => ({ |
||||
"process": () => { |
||||
const filePath = path.join(processor.context.exported, params.filename); |
||||
const result = cli.executeOnFiles([filePath]); |
||||
|
||||
processor.onInfo(`ESLinted ${params.filename}`); |
||||
|
||||
result.results.forEach((subresult) => { |
||||
subresult.messages.forEach((message) => { |
||||
const messageText = `${params.filename}:${message.line},${message.column} (${message.ruleId}) ${message.message}`; |
||||
|
||||
if (message.fatal || message.severity === errorSeverity) { |
||||
processor.onError(messageText); |
||||
} else { |
||||
processor.onWarn(messageText); |
||||
} |
||||
}); |
||||
}); |
||||
|
||||
processor.done(); |
||||
} |
||||
}); |
||||
|
@ -0,0 +1,30 @@ |
||||
"use strict"; |
||||
|
||||
import { CLIEngine } from "eslint"; |
||||
import { join } from "path"; |
||||
import settings from "../../settings"; |
||||
|
||||
const cli = new CLIEngine({ configFile: settings.eslintBrowserConfig }); |
||||
|
||||
const errorSeverity = 2; |
||||
|
||||
export default ((params, processor) => () => { |
||||
const filePath = join(processor.context.exported, params.filename); |
||||
const result = cli.executeOnFiles([filePath]); |
||||
|
||||
processor.onInfo(`ESLinted ${params.filename}`); |
||||
|
||||
result.results.forEach((subresult) => { |
||||
subresult.messages.forEach((message) => { |
||||
const messageText = `${params.filename}:${message.line},${message.column} (${message.ruleId}) ${message.message}`; |
||||
|
||||
if (message.fatal || message.severity === errorSeverity) { |
||||
processor.onError(messageText); |
||||
} else { |
||||
processor.onWarn(messageText); |
||||
} |
||||
}); |
||||
}); |
||||
|
||||
processor.done(); |
||||
}) as Task; |
@ -1,38 +0,0 @@ |
||||
"use strict"; |
||||
|
||||
const glob = require("glob"); |
||||
const flagDoneName = "eslintbrowserallDone"; |
||||
|
||||
module.exports = (params, processor) => ({ |
||||
"process": () => { |
||||
if (processor.context.containsFlag(flagDoneName)) { |
||||
processor.onWarn("eslintbrowserall task is executed more than once; this is probably a bug in your mbs.json"); |
||||
} |
||||
|
||||
processor.context.addFlag(flagDoneName); |
||||
|
||||
const excludeFiles = params.excludeFiles || []; |
||||
|
||||
glob("**/*.js", { |
||||
"cwd": processor.context.exported, |
||||
"dot": true |
||||
}, (err, files) => { |
||||
if (err) { |
||||
processor.onError(err); |
||||
|
||||
return processor.done(); |
||||
} |
||||
|
||||
return processor.processTask({ |
||||
"params": { |
||||
"tasks": files.filter((file) => !excludeFiles.includes(file)).map((file) => ({ |
||||
"name": file, |
||||
"params": { "filename": file }, |
||||
"type": "eslintbrowser" |
||||
})) |
||||
}, |
||||
"type": (params.preventParallelTests && "sequential") || "parallel" |
||||
}, processor.done.bind(processor)); |
||||
}); |
||||
} |
||||
}); |
@ -0,0 +1,36 @@ |
||||
"use strict"; |
||||
|
||||
import * as glob from "glob"; |
||||
const flagDoneName = "eslintbrowserallDone"; |
||||
|
||||
export default ((params, processor) => () => { |
||||
if (processor.context.containsFlag(flagDoneName)) { |
||||
processor.onWarn("eslintbrowserall task is executed more than once; this is probably a bug in your mbs.json"); |
||||
} |
||||
|
||||
processor.context.addFlag(flagDoneName); |
||||
|
||||
const excludeFiles = params.excludeFiles || []; |
||||
|
||||
glob("**/*.js", { |
||||
cwd: processor.context.exported, |
||||
dot: true, |
||||
}, (err, files) => { |
||||
if (err) { |
||||
processor.onError(err); |
||||
|
||||
return processor.done(); |
||||
} |
||||
|
||||
return processor.processTask({ |
||||
params: { |
||||
tasks: files.filter((file) => !excludeFiles.includes(file)).map((file) => ({ |
||||
name: file, |
||||
params: { filename: file }, |
||||
type: "eslintbrowser", |
||||
})), |
||||
}, |
||||
type: (params.preventParallelTests && "sequential") || "parallel", |
||||
}, processor.done); |
||||
}); |
||||
}) as Task; |
@ -1,13 +0,0 @@ |
||||
"use strict"; |
||||
|
||||
// Code taken from http://stackoverflow.com/a/17204293
|
||||
// eslint-disable-next-line no-sync
|
||||
require("fs").readdirSync(__dirname) |
||||
.forEach((file) => { |
||||
if (file.match(/\.js$/) !== null && file !== "index.js") { |
||||
const name = file.replace(".js", ""); |
||||
|
||||
// eslint-disable-next-line global-require
|
||||
exports[name] = require(`./${file}`); |
||||
} |
||||
}); |
@ -0,0 +1,16 @@ |
||||
"use strict"; |
||||
|
||||
import { readdirSync } from "fs"; |
||||
|
||||
const tasks = {}; |
||||
|
||||
// Code taken from http://stackoverflow.com/a/17204293
|
||||
readdirSync(__dirname) |
||||
.forEach((file) => { |
||||
if (file.match(/\.ts$/) !== null && file !== "index.ts") { |
||||
const name = file.replace(".ts", ""); |
||||
tasks[name] = require(`./${file}`).default; |
||||
} |
||||
}); |
||||
|
||||
export default tasks as Tasks; |
@ -1,3 +0,0 @@ |
||||
"use strict"; |
||||
|
||||
module.exports = (params, processor) => ({ "process": () => processor.done() }); |
@ -0,0 +1,3 @@ |
||||
"use strict"; |
||||
|
||||
export default ((params, processor) => processor.done()) as Task; |
@ -1,28 +0,0 @@ |
||||
"use strict"; |
||||
|
||||
const sequential = require("./sequential"); |
||||
|
||||
module.exports = (params, processor) => sequential({ |
||||
"tasks": [ |
||||
{ |
||||
"params": { "excludeFiles": params.eslintExcludeFiles }, |
||||
"type": "eslintbrowserall" |
||||
}, |
||||
{ "type": "uglifyjsall" }, |
||||
{ "type": "cssnanoall" }, |
||||
{ |
||||
"params": { |
||||
"data": processor.context.versionInfo, |
||||
"filename": "version.txt" |
||||
}, |
||||
"type": "writefile" |
||||
}, |
||||
{ |
||||
"params": { |
||||
"archive": `${processor.context.reponame}.zip`, |
||||
"directory": "" |
||||
}, |
||||
"type": "zip" |
||||
} |
||||
] |
||||
}, processor); |
@ -0,0 +1,28 @@ |
||||
"use strict"; |
||||
|
||||
import sequential from "./sequential"; |
||||
|
||||
export default ((params, processor) => sequential({ |
||||
tasks: [ |
||||
{ |
||||
params: { excludeFiles: params.eslintExcludeFiles }, |
||||
type: "eslintbrowserall", |
||||
}, |
||||
{ type: "uglifyjsall" }, |
||||
{ type: "cssnanoall" }, |
||||
{ |
||||
params: { |
||||
data: processor.context.versionInfo, |
||||
filename: "version.txt", |
||||
}, |
||||
type: "writefile", |
||||
}, |
||||
{ |
||||
params: { |
||||
archive: `${processor.context.reponame}.zip`, |
||||
directory: "", |
||||
}, |
||||
type: "zip", |
||||
}, |
||||
], |
||||
}, processor)) as Task; |
@ -1,7 +0,0 @@ |
||||
"use strict"; |
||||
|
||||
const async = require("async"); |
||||
|
||||
const mapper = (processor) => (task) => (callback) => processor.processTask(task, callback); |
||||
|
||||
module.exports = (params, processor) => ({ "process": () => async.parallel(params.tasks.map(mapper(processor)), () => processor.done()) }); |
@ -0,0 +1,7 @@ |
||||
"use strict"; |
||||
|
||||
import { parallel } from "async"; |
||||
|
||||
const mapper = (processor) => (task) => (callback) => processor.processTask(task, callback); |
||||
|
||||
export default ((params, processor) => () => parallel(params.tasks.map(mapper(processor)), processor.done)) as Task; |
@ -1,7 +0,0 @@ |
||||
"use strict"; |
||||
|
||||
const async = require("async"); |
||||
|
||||
const mapper = (processor) => (task) => (callback) => processor.processTask(task, callback); |
||||
|
||||
module.exports = (params, processor) => ({ "process": () => async.series(params.tasks.map(mapper(processor)), () => processor.done()) }); |
@ -0,0 +1,7 @@ |
||||
"use strict"; |
||||
|
||||
import { series } from "async"; |
||||
|
||||
const mapper = (processor) => (task) => (callback) => processor.processTask(task, callback); |
||||
|
||||
export default ((params, processor) => () => series(params.tasks.map(mapper(processor)), processor.done)) as Task; |
@ -1,23 +0,0 @@ |
||||
"use strict"; |
||||
|
||||
const fs = require("fs"); |
||||
const path = require("path"); |
||||
const UglifyJS = require("uglify-js"); |
||||
|
||||
module.exports = (params, processor) => ({ |
||||
"process": () => { |
||||
const filePath = path.normalize(path.join(processor.context.exported, params.filename)); |
||||
const result = UglifyJS.minify(filePath); |
||||
|
||||
fs.writeFile(filePath, result.code, (err) => { |
||||
if (err) { |
||||
processor.onError(`Unable to write uglified script for ${params.filename}: ${err}`); |
||||
} else { |
||||
processor.onInfo(`Saved uglified script for ${params.filename}; uglified length: ${result.code.length}`); |
||||
} |
||||
|
||||
processor.done(); |
||||
}); |
||||
} |
||||
}); |
||||
|
@ -0,0 +1,20 @@ |
||||
"use strict"; |
||||
|
||||
import { writeFile } from "fs"; |
||||
import { join, normalize } from "path"; |
||||
import { minify } from "uglify-js"; |
||||
|
||||
export default ((params, processor) => () => { |
||||
const filePath = normalize(join(processor.context.exported, params.filename)); |
||||
const result = minify(filePath); |
||||
|
||||
writeFile(filePath, result.code, (err) => { |
||||
if (err) { |
||||
processor.onError(`Unable to write uglified script for ${params.filename}: ${err}`); |
||||
} else { |
||||
processor.onInfo(`Saved uglified script for ${params.filename}; uglified length: ${result.code.length}`); |
||||
} |
||||
|
||||
processor.done(); |
||||
}); |
||||
}) as Task; |
@ -1,37 +0,0 @@ |
||||
"use strict"; |
||||
|
||||
const glob = require("glob"); |
||||
|
||||
const doneFlagName = "uglifyjsallDone"; |
||||
|
||||
module.exports = (params, processor) => ({ |
||||
"process": () => { |
||||
if (processor.context.containsFlag(doneFlagName)) { |
||||
processor.onWarn("dotnetnunitall task is executed more than once; this is probably a bug in your mbs.json"); |
||||
} |
||||
|
||||
processor.context.addFlag(doneFlagName); |
||||
|
||||
glob("**/*.js", { |
||||
"cwd": processor.context.exported, |
||||
"dot": true |
||||
}, (err, files) => { |
||||
if (err) { |
||||
processor.onError(err); |
||||
|
||||
return processor.done(); |
||||
} |
||||
|
||||
return processor.processTask({ |
||||
"params": { |
||||
"tasks": files.map((file) => ({ |
||||
"name": file, |
||||
"params": { "filename": file }, |
||||
"type": "uglifyjs" |
||||
})) |
||||
}, |
||||
"type": (params.preventParallelTests && "sequential") || "parallel" |
||||
}, processor.done.bind(processor)); |
||||
}); |
||||
} |
||||
}); |
@ -0,0 +1,35 @@ |
||||
"use strict"; |
||||
|
||||
import * as glob from "glob"; |
||||
|
||||
const doneFlagName = "uglifyjsallDone"; |
||||
|
||||
export default ((params, processor) => () => { |
||||
if (processor.context.containsFlag(doneFlagName)) { |
||||
processor.onWarn("dotnetnunitall task is executed more than once; this is probably a bug in your mbs.json"); |
||||
} |
||||
|
||||
processor.context.addFlag(doneFlagName); |
||||
|
||||
glob("**/*.js", { |
||||
cwd: processor.context.exported, |
||||
dot: true, |
||||
}, (err, files) => { |
||||
if (err) { |
||||
processor.onError(err); |
||||
|
||||
return processor.done(); |
||||
} |
||||
|
||||
return processor.processTask({ |
||||
params: { |
||||
tasks: files.map((file) => ({ |
||||
name: file, |
||||
params: { filename: file }, |
||||
type: "uglifyjs", |
||||
})), |
||||
}, |
||||
type: (params.preventParallelTests && "sequential") || "parallel", |
||||
}, processor.done); |
||||
}); |
||||
}) as Task; |
@ -1,22 +0,0 @@ |
||||
"use strict"; |
||||
|
||||
const fs = require("fs"); |
||||
const path = require("path"); |
||||
|
||||
module.exports = (params, processor) => ({ |
||||
"process": () => { |
||||
const filePath = path.join(processor.context.exported, params.filename); |
||||
|
||||
processor.onInfo(`Writing to ${filePath}`); |
||||
|
||||
fs.writeFile(filePath, params.data, (err) => { |
||||
if (err) { |
||||
processor.onError(`Unable to write file: ${err}`); |
||||
} else { |
||||
processor.onInfo("Written file"); |
||||
} |
||||
|
||||
return processor.done(); |
||||
}); |
||||
} |
||||
}); |
@ -0,0 +1,20 @@ |
||||
"use strict"; |
||||
|
||||
import { writeFile } from "fs"; |
||||
import { join } from "path"; |
||||
|
||||
export default ((params, processor) => () => { |
||||
const filePath = join(processor.context.exported, params.filename); |
||||
|
||||
processor.onInfo(`Writing to ${filePath}`); |
||||
|
||||
writeFile(filePath, params.data, (err) => { |
||||
if (err) { |
||||
processor.onError(`Unable to write file: ${err}`); |
||||
} else { |
||||
processor.onInfo("Written file"); |
||||
} |
||||
|
||||
return processor.done(); |
||||
}); |
||||
}) as Task; |
@ -1,24 +0,0 @@ |
||||
"use strict"; |
||||
|
||||
const fs = require("fs"); |
||||
const path = require("path"); |
||||
const Archiver = require("archiver"); |
||||
|
||||
module.exports = (params, processor) => ({ |
||||
"process": () => { |
||||
const sourceDirectoryPath = path.normalize(path.join(processor.context.exported, String(params.directory || ""))); |
||||
const targetArchivePath = path.normalize(path.join(processor.context.release, params.archive)); |
||||
|
||||
processor.onInfo(`Compressing "${params.directory}" to "${params.archive}"`); |
||||
|
||||
const output = fs.createWriteStream(targetArchivePath); |
||||
const archive = new Archiver("zip"); |
||||
|
||||
output.on("close", () => processor.done()); |
||||
|
||||
archive.on("error", (err) => processor.onError(`Error while compressing: ${err}`)); |
||||
archive.pipe(output); |
||||
archive.directory(sourceDirectoryPath, false); |
||||
archive.finalize(); |
||||
} |
||||
}); |
@ -0,0 +1,22 @@ |
||||
"use strict"; |
||||
|
||||
import { create as createArchiver } from "archiver"; |
||||
import { createWriteStream } from "fs"; |
||||
import { join, normalize } from "path"; |
||||
|
||||
export default ((params, processor) => () => { |
||||
const sourceDirectoryPath = normalize(join(processor.context.exported, String(params.directory || ""))); |
||||
const targetArchivePath = normalize(join(processor.context.release, params.archive)); |
||||
|
||||
processor.onInfo(`Compressing "${params.directory}" to "${params.archive}"`); |
||||
|
||||
const output = createWriteStream(targetArchivePath); |
||||
const archive = createArchiver("zip"); |
||||
|
||||
output.on("close", processor.done); |
||||
|
||||
archive.on("error", (err) => processor.onError(`Error while compressing: ${err}`)); |
||||
archive.pipe(output); |
||||
archive.directory(sourceDirectoryPath, false); |
||||
archive.finalize(); |
||||
}) as Task; |
@ -1,16 +0,0 @@ |
||||
"use strict"; |
||||
|
||||
module.exports = (req, res) => { |
||||
const options = { |
||||
"branch": `/refs/heads/${req.params.branch}`, |
||||
"branchName": req.params.branch, |
||||
"file": req.params[0], |
||||
"owner": req.params.owner, |
||||
"reponame": req.params.reponame, |
||||
"rev": req.params.rev |
||||
}; |
||||
|
||||
const pathParts = [req.app.get("releasepath"), options.owner, options.reponame, options.branch, options.rev, options.file]; |
||||
|
||||
res.sendfile(pathParts.join("/")); |
||||
}; |
@ -0,0 +1,16 @@ |
||||
"use strict"; |
||||
|
||||
export default (req, res) => { |
||||
const options = { |
||||
branch: `/refs/heads/${req.params.branch}`, |
||||
branchName: req.params.branch, |
||||
file: req.params[0], |
||||
owner: req.params.owner, |
||||
reponame: req.params.reponame, |
||||
rev: req.params.rev, |
||||
}; |
||||
|
||||
const pathParts = [req.app.get("releasepath"), options.owner, options.reponame, options.branch, options.rev, options.file]; |
||||
|
||||
res.sendfile(pathParts.join("/")); |
||||
}; |
@ -1,9 +0,0 @@ |
||||
"use strict"; |
||||
|
||||
exports.index = (req, res) => res.render("index", { "title": `Express<br/>\r\n${req}` }); |
||||
|
||||
exports.postreceive = require("./postreceive"); |
||||
exports.manual = require("./manual"); |
||||
exports.status = require("./status"); |
||||
exports.artifact = require("./artifact"); |
||||
exports.release = require("./release"); |
@ -0,0 +1,11 @@ |
||||
"use strict"; |
||||
|
||||
import artifact from "./artifact"; |
||||
import * as manual from "./manual"; |
||||
import postreceive from "./postreceive"; |
||||
import release from "./release"; |
||||
import * as status from "./status"; |
||||
|
||||
const index = (req, res) => res.render("index", { title: `Express<br/>\r\n${req}` }); |
||||
|
||||
export { index, postreceive, manual, status, artifact, release }; |
@ -1,22 +0,0 @@ |
||||
"use strict"; |
||||
|
||||
const _ = require("underscore"); |
||||
const builder = require("../lib/builder"); |
||||
|
||||
exports.get = (req, res) => res.render("manual"); |
||||
|
||||
exports.post = (req, res) => { |
||||
const options = _.extend(req.body, { |
||||
"app": req.app, |
||||
"url": `https://pos-github.payonline.ru/${req.body.owner}/${req.body.reponame}` |
||||
}); |
||||
|
||||
builder.build(options, (err, result) => { |
||||
console.log("Done processing manual request"); |
||||
console.log(`Error: ${err}`); |
||||
res.render("manual-done", { |
||||
err, |
||||
result |
||||
}); |
||||
}); |
||||
}; |
@ -0,0 +1,22 @@ |
||||
"use strict"; |
||||
|
||||
import * as _ from "underscore"; |
||||
import { build } from "../lib/builder"; |
||||
|
||||
export const get = (req, res) => res.render("manual"); |
||||
|
||||
export const post = (req, res) => { |
||||
const options = _.extend(req.body, { |
||||
app: req.app, |
||||
url: `https://pos-github.payonline.ru/${req.body.owner}/${req.body.reponame}`, |
||||
}); |
||||
|
||||
build(options, (err, result) => { |
||||
console.log("Done processing manual request"); |
||||
console.log(`Error: ${err}`); |
||||
res.render("manual-done", { |
||||
err, |
||||
result, |
||||
}); |
||||
}); |
||||
}; |
@ -0,0 +1,19 @@ |
||||
{ |
||||
"compilerOptions": { |
||||
"module": "commonjs", |
||||
"target": "es6", |
||||
"sourceMap": false, |
||||
"strictNullChecks": true, |
||||
"typeRoots": [ |
||||
"node_modules/@types" |
||||
] |
||||
}, |
||||
"include": [ |
||||
"*.ts", |
||||
"lib/**/*.ts", |
||||
"routes/**/*.ts" |
||||
], |
||||
"exclude": [ |
||||
"node_modules/**/*.ts" |
||||
] |
||||
} |
@ -0,0 +1,10 @@ |
||||
{ |
||||
"extends": [ |
||||
"tslint:latest", |
||||
"tslint-eslint-rules" |
||||
], |
||||
"rules": { |
||||
"no-console": false, |
||||
"max-line-length": false |
||||
} |
||||
} |
@ -0,0 +1,5 @@ |
||||
{ |
||||
"dependencies": { |
||||
"debug": "registry:npm/debug#2.0.0+20160723033700" |
||||
} |
||||
} |
Loading…
Reference in new issue