add to sdk config

pull/248/head
Fabian Jakobs 2016-02-03 18:52:24 +00:00
rodzic e7d3ee3abc
commit 9be1eb782e
23 zmienionych plików z 1429 dodań i 0 usunięć

100
b9/b9 100755
Wyświetl plik

@ -0,0 +1,100 @@
#!/bin/bash
set -e
SCRIPT_NAME=$BASH_SOURCE
if [ -h $SCRIPT_NAME ]; then SCRIPT_NAME=$(readlink $SCRIPT_NAME); fi
cd $(dirname $SCRIPT_NAME)
readonly UNAME=$(id -n -u)
for TMPDIR in /var/lib/docker/tmp /tmp; do
TMPDIR=$TMPDIR/$UNAME
mkdir -p $TMPDIR && break
done
TMP=$TMPDIR
TEMP=$TMPDIR
B9_DIR=$(pwd)
C9_DIR=$B9_DIR/..
B9=$B9_DIR/b9
DEBUG=""
for MODULE in ./lib/*.sh; do
source $MODULE
done
usage() {
echo "Usage: $B9 [global options ...] COMMAND [commands options...]"
echo
echo "Cloud9 build tool"
echo
echo "Global options:"
echo " --help show this help message"
echo " --debug trace bash commands"
echo
echo "Commands:"
echo
echo "[Deployment]"
echo " package Package and upload version of Cloud9"
echo " deploy Deploy a Cloud9 version"
echo ""
echo "[Internal]"
echo " check Run b9 tests"
echo " exec COMMAND [ARGS] Run arbitrary b9 function"
echo
exit 1
}
for ARG in "$@"; do
case $ARG in
--help|-h)
usage
;;
--debug)
DEBUG="--debug"
B9="$B9 --debug"
shift
;;
*)
break
;;
esac
done
ORIGINAL_COMMAND=$1
case $ORIGINAL_COMMAND in
package)
COMMAND=b9_package
;;
deploy)
COMMAND=b9_deploy
;;
prepare)
COMMAND=b9_prepare
;;
check)
COMMAND=b9_check
;;
exec) # for debugging only!
shift
COMMAND=$1
;;
"")
usage
;;
*)
echo "Invalid command. See $B9 --help for usage."
exit 1
;;
esac
shift
if [ "$DEBUG" ]; then
set -x
fi
$COMMAND "$@"

Wyświetl plik

@ -0,0 +1,21 @@
FROM debian:8.2
MAINTAINER Fabian Jakobs <fabian@c9.io>
ENV DEBIAN_FRONTEND noninteractive
# ubuntu user
RUN useradd --uid 1000 --shell /bin/bash -m --home-dir /home/ubuntu ubuntu && \
chmod 777 /tmp
RUN apt-get update && \
apt-get install -y curl openssh-client rsync && \
apt-get clean && \
rm -rf /var/lib/apt/lists/* /tmp/* /var/tmp/*
# install nodejs
RUN curl https://nodejs.org/dist/v0.10.41/node-v0.10.41-linux-x64.tar.gz | tar xvzf - -C /usr/local --strip-components=1
USER ubuntu
EXPOSE 8080
WORKDIR /home/ubuntu/newclient

Wyświetl plik

@ -0,0 +1,14 @@
Host github.com
User git
Port 22
Hostname github.com
IdentityFile /home/ubuntu/.ssh/id_rsa_deploy
TCPKeepAlive yes
IdentitiesOnly yes
StrictHostKeyChecking no
Host static.c9.io
IdentityFile /home/ubuntu/.ssh/id_rsa_deploy
StrictHostKeyChecking no
TCPKeepAlive yes
IdentitiesOnly yes

Wyświetl plik

@ -0,0 +1,51 @@
FROM debian:8.2
MAINTAINER Fabian Jakobs <fabian@c9.io>
ENV DEBIAN_FRONTEND noninteractive
RUN apt-get update
RUN apt-get install -y curl
# ubuntu user
RUN useradd --uid 1000 --shell /bin/bash -m --home-dir /home/ubuntu ubuntu && \
chmod 777 /tmp
# make ubuntu user sudo
RUN apt-get install -y sudo && \
sed -i 's/%sudo\s.*/%sudo ALL=NOPASSWD:ALL/' /etc/sudoers && \
usermod -a -G sudo ubuntu
RUN chown root:root /usr/bin/sudo && chmod 4755 /usr/bin/sudo
# install nodejs
RUN mkdir /nodejs && curl https://nodejs.org/dist/v0.10.40/node-v0.10.40-linux-x64.tar.gz | tar xvzf - -C /usr/local --strip-components=1
RUN npm install -g npm@2.14.11
# oldclient
RUN apt-get install -y openssh-client
# test runner
RUN npm install -g mocha
# install jsonalzyer dependencies
RUN apt-get install -y golang tmux python python-pip pylint php5 ruby build-essential
# test runner
RUN apt-get install -y redis-server
# for odev
RUN apt-get install -y haproxy
# for CI
RUN apt-get install -y git
ADD files/ssh_config /home/ubuntu/.ssh/config
USER ubuntu
# Cloud9 installer
RUN curl -L https://raw.githubusercontent.com/c9/install/master/install.sh | bash
EXPOSE 8080
VOLUME /home/ubuntu/newclient
WORKDIR /home/ubuntu/newclient

Wyświetl plik

@ -0,0 +1,14 @@
Host github.com
User git
Port 22
Hostname github.com
IdentityFile /home/ubuntu/.ssh/id_rsa_deploy
TCPKeepAlive yes
IdentitiesOnly yes
StrictHostKeyChecking no
Host static.c9.io
IdentityFile /home/ubuntu/.ssh/id_rsa_deploy
StrictHostKeyChecking no
TCPKeepAlive yes
IdentitiesOnly yes

17
b9/lib/_docker.sh 100644
Wyświetl plik

@ -0,0 +1,17 @@
_DO_NEWCLIENT_IMAGE=
_b9_get_newclient_image() {
if [ ! -z "$_DO_NEWCLIENT_IMAGE" ]; then
echo $_DO_NEWCLIENT_IMAGE
return
fi
local RESULT=$(docker build -t newclient --rm $B9_DIR/containers/newclient)
if [[ $(echo "$RESULT" | tail -n1) =~ Successfully\ built ]]; then
_DO_NEWCLIENT_IMAGE=$(echo "$RESULT" | tail -n1 | awk '{print $3}')
echo $_DO_NEWCLIENT_IMAGE
return
fi
echo $RESULT
return 1
}

15
b9/lib/_git.sh 100644
Wyświetl plik

@ -0,0 +1,15 @@
_b9_git_get_hash() {
pushd $C9_DIR &> /dev/null
git rev-parse HEAD
popd &> /dev/null
}
_b9_git_get_hash_short() {
pushd $C9_DIR &> /dev/null
git rev-parse --short=10 HEAD
popd &> /dev/null
}

8
b9/lib/_npm.sh 100644
Wyświetl plik

@ -0,0 +1,8 @@
_b9_npm() {
local WORKDIR=$1
shift
docker run --rm -w /home/ubuntu/newclient -v $WORKDIR:/home/ubuntu/newclient -v $HOME/.ssh/id_rsa_deploy:/home/ubuntu/.ssh/id_rsa_deploy:ro --sig-proxy -a STDIN -a STDOUT -a STDERR $(_b9_get_newclient_image) npm "$@"
# pushd $WORKDIR
# npm "$@"
# popd
}

6
b9/lib/check.sh 100644
Wyświetl plik

@ -0,0 +1,6 @@
source $C9_DIR/plugins/c9.docker/d9/_testing.sh
b9_check() {
echo "Running B9 tests"
_do_check_package
}

244
b9/lib/deploy.sh 100644
Wyświetl plik

@ -0,0 +1,244 @@
b9_deploy_usage() {
echo "Usage: $B9 deploy SERVICES TREEISH SERVER_PATTERN [ARG...]"
echo
echo "Deploy a Cloud9 version"
echo
echo "Options:"
echo " --settings=[all|beta|deploy|onlinedev] (default: all)"
echo " --strategy=[slow_start|parallel|serial] Deploy strategy to use (default: slow_start)"
echo " --regex Interpret server patter as regular expression"
echo " --no-check skip the health check"
exit 1
}
b9_deploy() {
[ "$1" == "--help" ] && b9_deploy_usage
local SERVICES=$1 && shift
local TREEISH=$1 && shift
local SERVER_PATTERN=$1 && shift
local SETTINGS=devel
local DRY_RUN=""
local ASSET="gcs"
local USE_REGEX=""
local NO_CHECK=""
local TYPE=newclient
local STRATEGY=slow_start
[ -z "$SERVICES" ] && b9_deploy_usage
[ -z "$TREEISH" ] && b9_deploy_usage
[ -z "$SERVER_PATTERN" ] && b9_deploy_usage
local ARG
for ARG in "$@"; do
case $ARG in
--settings=*)
SETTINGS="${ARG#*=}"
shift
;;
--strategy=*)
STRATEGY="${ARG#*=}"
shift
;;
--docker)
ASSET="docker"
shift
;;
--no-check)
NO_CHECK="--no-check"
shift
;;
--regex)
USE_REGEX="--regex"
shift
;;
--dry-run)
DRY_RUN="1"
shift
;;
--help)
b9_deploy_usage
shift
;;
*)
b9_deploy_usage
;;
esac
done
[ "$SERVICES" == "docker" ] && TYPE=docker
local SERVER_LIST
local VERSION
local TMPFILE=$(tempfile)
b9_package $TREEISH --settings=$SETTINGS --type=$TYPE | tee $TMPFILE
VERSION=$(cat $TMPFILE | tail -n1)
rm $TMPFILE
SERVER_LIST="$(_b9_deploy_server_list $SERVER_PATTERN $USE_REGEX)"
local CMD="$B9 exec _b9_deploy_one_from_${ASSET} $VERSION $SERVICES $SETTINGS $NO_CHECK"
if [ "$DRY_RUN" == "1" ]; then
CMD="echo $CMD"
fi
_b9_deploy_release_event "$SERVICES" $SETTINGS $VERSION $SERVER_PATTERN
_b9_deploy_strategy_${STRATEGY} "$SERVER_LIST" "$CMD"
}
_b9_deploy_strategy_slow_start() {
local SERVER_LIST=$1
local CMD=$2
# first one
$CMD $(echo "$SERVER_LIST" | head -n1)
# then two
echo "$SERVER_LIST" | tail -n +2 | head -n2 | parallel --halt 2 $CMD
# then the rest
echo "$SERVER_LIST" | tail -n +4 | parallel --halt 2 -j 15 $CMD
}
_b9_deploy_strategy_parallel() {
local SERVER_LIST=$1
local CMD=$2
# first one
$CMD $(echo "$SERVER_LIST" | head -n1)
# then the rest
echo "$SERVER_LIST" | tail -n +2 | parallel --halt 2 -j 30 $CMD
}
_b9_deploy_strategy_serial() {
local SERVER_LIST=$1
local CMD=$2
echo "$SERVER_LIST" | xargs -n1 $CMD
}
_b9_deploy_server_list () {
local SERVER_PATTERN=$1
local USE_REGEX=$2
$C9_DIR/scripts/gssh --no-cache $USE_REGEX --print-names "$SERVER_PATTERN" | shuf
}
_b9_deploy_one_from_gcs() {
local VERSION=$1
local SERVICES=$2
local SETTINGS=$3
local SERVER=$4
local NO_CHECK=$5
echo Deploying $VERSION \($SERVICES\) to $SERVER ... >&2
_b9_deploy_upload_from_gcs $VERSION $SERVER
_b9_deploy_update_services $VERSION $SERVICES $SERVER $SETTINGS
[ -z "$NO_CHECK" ] && _b9_deploy_check $SERVER $SERVICES $SETTINGS
echo Deployed $VERSION to $SERVER >&2
}
_b9_deploy_upload_from_gcs() {
local VERSION=$1
local SERVER=$2
local TGZ
TGZ=$(_d9_package_download_gcs $VERSION)
local VERSIONS_DIR="/home/ubuntu/versions"
local TARGET_FILE=${VERSIONS_DIR}/$(basename $TGZ)
local TARGET_DIR=${VERSIONS_DIR}/$(basename $TGZ ".tar.xz")
_b9_deploy_ssh $SERVER "rm -rf $TARGET_DIR $TARGET_FILE; mkdir -p /home/ubuntu/versions/history"
_b9_deploy_scp $TGZ $SERVER:$TARGET_FILE
_b9_deploy_ssh $SERVER "cd /home/ubuntu/versions && tar xf $TARGET_FILE && rm $TARGET_FILE"
}
_b9_deploy_update_services() {
local VERSION=$1
local SERVICES=$2
local SERVER=$3
local SETTINGS=$4
local TOTAL_VERSIONS_TO_KEEP=5
local VERSIONS_DIR="/home/ubuntu/versions"
local TARGET_DIR=${VERSIONS_DIR}/$VERSION
local BUILD_NAME=$(echo $VERSION | awk -F- '{printf "%s-%s-%s", $1, $2, $3}')
_b9_deploy_ssh $SERVER "
for SERVICE in $(echo $SERVICES | sed 's/,/ /g'); do
mv /home/ubuntu/\$SERVICE /home/ubuntu/versions/history/\$SERVICE-$(date +%FT%T) &>/dev/null;
ln -s $TARGET_DIR /home/ubuntu/\$SERVICE;
done
~/supervisord_start_script.sh || ~/supervisord_start_script.sh -f || ~/supervisord_start_script.sh -f;
cd /home/ubuntu/versions;
ls -t 2>/dev/null | grep $BUILD_NAME | tail -n +$TOTAL_VERSIONS_TO_KEEP | xargs sudo rm -rf;"
}
_b9_deploy_check() {
local SERVER=$1
local SERVICES=$2
local SETTINGS=$3
echo $SERVICES | sed 's/,/\n/g' | parallel --halt 2 -j 0 $B9 exec _b9_deploy_check_one $SERVER $SETTINGS
}
_b9_deploy_check_one() {
local SERVER=$1
local SETTINGS=$2
local SERVICE=$3
local HOST
local PORT
local WAIT=default
HOST=$(echo $SERVER | awk -F@ '{ print $2}')
if [ "$SERVICE" == "oldclient" ]; then
SERVICE="c9"
elif [ "$SERVICE" == "docker" ]; then
WAIT=long
SERVICE="docker-daemon"
elif [[ $SERVICE =~ ^vfs-[0-9]$ ]]; then
PORT="--port=804$(echo $SERVICE | awk -F- '{print $2}')"
SERVICE="vfs"
else
SERVICE=${SERVICE//-/_}
fi
if ! $C9_DIR/scripts/check-safe-deploy.sh --wait=$WAIT $PORT --server=$HOST --mode=$SETTINGS --service=$SERVICE; then
echo "One or more safe deploy checks failed :(" >&2
exit 1
fi
}
_b9_deploy_release_event() {
local SERVICES=$1
local SETTINGS=$2
local VERSION=$3
local SERVER_PATTERN=$4
echo $SERVICES | sed 's/,/\n/g' | xargs -I '{}' -n1 node $B9_DIR/lib/js/release_event.js '{}' $SETTINGS $VERSION $SERVER_PATTERN
}
_b9_deploy_ssh() {
/usr/bin/ssh \
-o LogLevel=ERROR \
-o StrictHostKeyChecking=no \
-o UserKnownHostsFile=/dev/null \
-i $(find ~/.ssh/ -name "*" | grep -Pe "./(google_compute_engine|id_rsa_ansible|id_rsa)$" | head -1)\
"$@"
}
_b9_deploy_scp() {
/usr/bin/scp \
-o LogLevel=ERROR \
-o StrictHostKeyChecking=no \
-o UserKnownHostsFile=/dev/null \
-i $(find ~/.ssh/ -name "*" | grep -Pe "./(google_compute_engine|id_rsa_ansible|id_rsa)$" | head -1) \
"$@"
}

Wyświetl plik

@ -0,0 +1,118 @@
#!/usr/bin/env node
"use strict";
var DEFAULT_MODULES = ["c9", "simple-template", "architect", "amd-loader", "heapdump", "optimist"];
var DEFAULT_SETTINGS = "deploy";
var npmBuild = require("architect-build/npm_build");
var async = require("async");
var fs = require("fs");
var optimist = require("optimist");
var _ = require("lodash");
module.exports = nodeModules;
if (!module.parent) {
main(process.argv.slice(2), function(err) {
if (err) {
console.error(err);
console.error("Stacktrace: ", err.stack);
process.exit(1);
}
});
}
function main(argv, callback) {
var options = optimist(argv)
.usage("Usage: $0 [CONFIG_NAME] [--help]")
.alias("s", "settings")
.default("settings", DEFAULT_SETTINGS)
.describe("settings", "Settings file to use")
.default("source", __dirname + "/../../..")
.describe("source", "Source directory")
.describe("targetFile", "Target package.json")
.boolean("help")
.describe("help", "Show command line options.");
argv = options.argv;
if (argv.help) {
options.showHelp();
return callback();
}
if (argv._.length != 1) {
options.showHelp();
return callback();
}
var config = argv._[0];
var settings = argv.settings;
var source = argv.source;
nodeModules(source, config, settings, function(err, json) {
if (err) return callback(err);
if (argv.targetFile)
fs.writeFileSync(argv.targetFile, JSON.stringify(json, null, 2));
else
console.log(JSON.stringify(json, null, 2));
});
}
function calculateRequiredNodeModules(sourceDir, buildConfig, configNames, settingsName, callback) {
if (buildConfig.nodeModulesInclude === "*") { // If the user specifically asks for all don't bother calculating
return callback();
}
async.map(configNames.split(","), calculateModules, function (err, modules) {
if (err) return callback(err);
return callback(null, _.uniq(_.flatten(modules))); /* Flatten array and remove duplicates */
});
function calculateModules (configName, done) {
npmBuild({
root: sourceDir,
args: [configName, '-s', settingsName]
}, function (err, result) {
if (err) return done(err);
var deps = result.roots;
var nodeModules = [];
deps.forEach(function (dep) {
if (dep.match(/node_modules/)) {
nodeModules.push(dep.replace(/node_modules\//, ""));
}
});
nodeModules.sort();
return done(null, nodeModules);
});
}
}
function nodeModules(sourceDir, configNames, settingsName, callback) {
var buildConfig = require(sourceDir + "/configs/" + configNames).buildConfig({mode: settingsName});
var packageJson = require(sourceDir + "/package.json");
var nodeModules = packageJson.dependencies;
delete packageJson.devDependencies;
delete packageJson.scripts;
/* Calculates what modules are needed for this config, so they can be contact'd to nodeModulesInclude */
calculateRequiredNodeModules(sourceDir, buildConfig, configNames, settingsName, function(err, modules) {
if (err) return callback(err);
var allModules = buildConfig.nodeModulesInclude.concat(modules).concat(DEFAULT_MODULES);
packageJson.dependencies = allModules.reduce(function(deps, name) {
if (nodeModules[name])
deps[name] = nodeModules[name];
else
deps[name] = "*";
return deps;
}, {});
callback(null, packageJson);
});
}

Wyświetl plik

@ -0,0 +1,23 @@
#!/usr/bin/env node
/*global describe it before after beforeEach afterEach */
"use strict";
"use server";
require("c9/inline-mocha")(module, null, { globals: ["define"]});
var assert = require("assert-diff");
var nodeModules = require("./filter_node_modules");
describe(__filename, function() {
it("should filter node modules for docker", function(done) {
nodeModules(__dirname + "/../../..", "docker", "deploy", function(err, config) {
assert(!err, err);
assert(config.dependencies["optimist"]);
assert(!config.dependencies["nodemailer-smtp-transport"]);
done();
});
});
});

Wyświetl plik

@ -0,0 +1,100 @@
#!/usr/bin/env node
"use strict";
var DEFAULT_SETTINGS = "deploy";
var ALWAYS_INCLUDE_SETTINGS = ["node", "mode", "manifest", "domains", "primaryDomain", "primaryBaseUrl", "baseUrlPattern"];
var fs = require("fs");
var optimist = require("optimist");
var loadManifest = require("c9/manifest").load;
var reJSON = require("c9/json-with-re");
module.exports = generateSettings;
if (!module.parent) {
main(process.argv.slice(2), function(err) {
if (err) {
console.error(err);
console.error("Stacktrace: ", err.stack);
process.exit(1);
}
});
}
function main(argv, callback) {
var options = optimist(argv)
.usage("Usage: $0 [CONFIG_NAME] [--help]")
.alias("s", "settings")
.default("settings", DEFAULT_SETTINGS)
.describe("settings", "Settings file to use")
.default("source", __dirname + "/../../..")
.describe("source", "Source directory")
.describe("targetFile", "Target package.json")
.boolean("help")
.describe("help", "Show command line options.");
argv = options.argv;
if (argv.help) {
options.showHelp();
return callback();
}
if (argv._.length != 1) {
options.showHelp();
return callback();
}
var config = argv._[0];
var settingsName = argv.settings;
var source = argv.source;
generateSettings(source, config, settingsName, function(err, contents) {
if (err) return callback(err);
if (argv.targetFile)
fs.writeFileSync(argv.targetFile, contents);
else
console.log(contents);
});
}
function generateSettings(source, config, settingsName, callback) {
// Check if build already exists.
var manifest = loadManifest(source);
manifest.hostname = "[%type%]-[%provider%]-[%region%]-[%index%]-[%env%]";
var oldSettings;
try {
oldSettings = require(source + "/settings/" + settingsName)(manifest);
} catch (e) {
return callback(e);
}
var buildConfig = require(source + "/configs/" + config).buildConfig({mode: settingsName});
if (buildConfig.settingsInclude == "*") {
newSettings = oldSettings;
}
else {
buildConfig.settingsInclude = buildConfig.settingsInclude.concat(ALWAYS_INCLUDE_SETTINGS);
var newSettings =
buildConfig.settingsInclude.reduce(function(settings, name) {
settings[name] = oldSettings[name];
return settings;
}, {});
}
newSettings.node = oldSettings.node;
var contents =
"var hostname = require('c9/hostname');\n" +
"var reJSON = require('c9/json-with-re');\n" +
"var fill = require('simple-template').fill;\n" +
"module.exports = function() {\n" +
" var options = hostname.parse(hostname.get());\n" +
" options.root = __dirname + '/..';\n" +
" var template = " + reJSON.stringify(newSettings, 2).replace(new RegExp(source, "g"), "[%root%]") + ";\n" +
" return reJSON.parse(fill(JSON.stringify(template), options));\n" +
"};";
callback(null, contents);
}

Wyświetl plik

@ -0,0 +1,52 @@
#!/usr/bin/env node
/*global describe it before after beforeEach afterEach */
"use strict";
"use server";
require("c9/inline-mocha")(module, null, { globals: ["define"]});
var assert = require("assert-diff");
var vm = require("vm");
var generateSettings = require("./generate_settings");
describe(__filename, function() {
it("should filter settings file", function(done) {
generateSettings(__dirname + "/../../..", "docker", "deploy", function(err, settings) {
assert(!err, err);
settings = eval(settings)();
assert(settings.docker);
assert(settings["docker-daemon"]);
assert(settings.aws);
assert(settings.sapi);
assert(settings.rabbitmq);
assert(!settings.c9);
assert(!settings.auth);
assert(!settings.worker);
assert(!settings.captcha);
assert(!settings.sendgrid);
assert(!settings.redis);
assert(!settings["redis-slave"]);
assert(!settings.sessionredis);
assert(!settings["sessionredis-slave"]);
assert(!settings.github);
assert(!settings.bitbucket);
assert(!settings.salesforce);
assert(!settings.google);
assert(!settings.c9_auth);
assert(!settings.services);
assert(!settings.mailer);
assert(!settings.zuora);
assert(!settings.pricing);
assert(!settings.catalog);
assert(!settings.minfraud);
assert(!settings.support);
done();
});
});
});

Wyświetl plik

@ -0,0 +1,69 @@
#!/usr/bin/env node
"use strict";
var DEFAULT_SETTINGS = "deploy";
var optimist = require("optimist");
var loadManifest = require("c9/manifest").load;
module.exports = listPlugins;
if (!module.parent) {
main(process.argv.slice(2), function(err) {
if (err) {
console.error(err);
console.error("Stacktrace: ", err.stack);
process.exit(1);
}
});
}
function main(argv, callback) {
var options = optimist(argv)
.usage("Usage: $0 [CONFIG_NAME] [--help]")
.alias("s", "settings")
.default("settings", DEFAULT_SETTINGS)
.describe("settings", "Settings file to use")
.default("source", __dirname + "/../../..")
.describe("source", "Source directory")
.boolean("help")
.describe("help", "Show command line options.");
argv = options.argv;
if (argv.help) {
options.showHelp();
return callback();
}
if (argv._.length != 1) {
options.showHelp();
return callback();
}
var config = argv._[0];
var settingsName = argv.settings;
var source = argv.source;
listPlugins(source, config, settingsName).forEach(function(line) {
console.log(line);
});
}
function listPlugins(source, configName, settingsName) {
var manifest = loadManifest(source);
manifest.hostname = "[%type%]-[%provider%]-[%region%]-[%index%]-[%env%]";
var settings = require(source + "/settings/" + settingsName)(manifest);
var config = require(source + "/configs/" + configName)(settings, optimist([]));
var plugins = Object.keys(config.reduce(function(processedPlugins, plugin) {
var packagePath = plugin.packagePath || plugin;
if (packagePath.indexOf("./") === 0) {
var pluginDir = packagePath.slice(2, packagePath.indexOf("/", 2));
processedPlugins[pluginDir] = true;
}
return processedPlugins;
}, {}));
return plugins;
}

Wyświetl plik

@ -0,0 +1,22 @@
#!/usr/bin/env node
/*global describe it before after beforeEach afterEach */
"use strict";
"use server";
require("c9/inline-mocha")(module, null, { globals: ["define"]});
var assert = require("assert-diff");
var listPlugins = require("./list_plugins");
describe(__filename, function() {
it("should filter node modules for docker", function() {
var list = listPlugins(__dirname + "/../../..", "docker", "deploy");
assert(list.indexOf("c9.docker") >= 0);
assert(list.indexOf("c9.mq") >= 0);
assert(list.indexOf("c9.db.redis") == -1);
});
});

Wyświetl plik

@ -0,0 +1,36 @@
"use strict";
var request = require('request');
var DATADOG_API_KEY = '64e56d39dfdd7f2bbf06f09100d51a18';
var DATADOG_API_URL = 'https://app.datadoghq.com/api/v1/events';
module.exports = releaseEvent;
if (!module.parent) {
var argv = process.argv;
releaseEvent(argv[2], argv[3], argv[4], argv[5]);
}
function datadogEvent(msg, callback) {
request.post({
url: DATADOG_API_URL,
qs: { api_key: DATADOG_API_KEY },
json: msg
}, callback);
}
function releaseEvent(application, mode, version, pattern) {
datadogEvent({
title: 'Release: ' + application + ' version ' + version + ' to "' + pattern + '"',
tags: [
'release',
'application:' + application,
'mode:' + mode,
'version:' + version,
'pattern:' + pattern
]
}, function(err) {
if (err) console.error("Error posting release event to datadog" + err.message);
});
}

197
b9/lib/package.sh 100644
Wyświetl plik

@ -0,0 +1,197 @@
readonly B9_PACKAGE_GIT_CACHE=$C9_DIR
b9_package_usage() {
echo "Usage: $B9 package TREEISH [ARG...]"
echo
echo "Package and upload a version of Cloud 9"
echo
echo "Options:"
echo " --settings=[all|beta|deploy|onlinedev] (default: all)"
echo " --type=[newclient|docker] (default: newclient)"
echo " --no-cache"
exit 1
}
b9_package() {
[ "$1" == "--help" ] && b9_package_usage
local TREEISH=$1
local TYPE=newclient
local SETTINGS=all
local STORAGE=gcs
local USE_CACHE=1
[ -z "$TREEISH" ] && b9_package_usage
shift
local ARG
for ARG in "$@"; do
case $ARG in
--settings=*)
SETTINGS="${ARG#*=}"
shift
;;
--type=*)
TYPE="${ARG#*=}"
shift
;;
--docker)
STORAGE=docker
shift
;;
--no-cache)
USE_CACHE=0
shift
;;
*)
b9_package_usage
;;
esac
done
local VERSION
local WORKDIR
[ "$TYPE" == "newclient" ] && SETTINGS=all
if [ "$TYPE" == "docker" ] && [ "$SETTINGS" == "all" ]; then
echo "You must define settings when packaging the docker daemon" 1>&2
exit 1
fi
_b9_package_init_git_cache
VERSION=c9-${TYPE}-${SETTINGS}-$(_b9_get_version $TREEISH)
if [ "$USE_CACHE" == "1" ] && _b9_package_is_cached $STORAGE $VERSION; then
echo $VERSION
return
fi
WORKDIR=$(_d9_package_init_work_dir $VERSION)
_d9_package_sync_workdir $TYPE $WORKDIR $VERSION $SETTINGS
_d9_package_npm_install $WORKDIR
_d9_package_cleanup_workdir $WORKDIR
_d9_package_upload_${STORAGE} $WORKDIR $VERSION
echo $VERSION
}
_b9_package_init_git_cache() {
pushd $B9_PACKAGE_GIT_CACHE &> /dev/null
if [ ! -d .git ]; then
git clone git@github.com:c9/newclient.git .
fi
git fetch origin
popd &> /dev/null
}
_d9_package_init_work_dir() {
local VERSION=$1
local WORK_DIR=$TMP/${VERSION}
mkdir -p $WORK_DIR
echo $WORK_DIR
}
_b9_get_version() {
local TREEISH=$1
pushd $B9_PACKAGE_GIT_CACHE &> /dev/null
echo $(git show $TREEISH:package.json | jq -r .version)-$(git rev-parse --short=8 $TREEISH)
popd &> /dev/null
}
_b9_package_is_cached() {
local STORAGE=$1
local VERSION=$2
case $STORAGE in
gcs)
_b9_package_is_cached_gcs $VERSION
;;
docker)
_b9_package_is_cached_docker $VERSION
;;
*)
echo "Invalid storage type: $STORAGE"
exit 1
;;
esac
}
_d9_package_upload() {
local STORAGE=$1
local $WORKDIR=$2
local $VERSION=$3
case $STORAGE in
gcs)
_d9_package_upload_gcs $WORKDIR $VERSION
;;
docker)
_d9_package_upload_docker $WORKDIR $VERSION
;;
*)
exit 1
;;
esac
}
_d9_package_sync_workdir() {
local TYPE=$1
local WORKDIR=$2
local VERSION=$3
local SETTINGS=$4
case $TYPE in
newclient)
_d9_package_sync_workdir_newclient $WORKDIR $VERSION $SETTINGS
;;
docker)
_d9_package_sync_workdir_docker $WORKDIR $VERSION $SETTINGS
;;
*)
exit 1
;;
esac
}
_d9_package_npm_install() {
local WORKDIR=$1
pushd $WORKDIR &> /dev/null
_b9_install_deps
popd &> /dev/null
}
_d9_package_cleanup_workdir() {
local WORKDIR=$1
local REVISION
[ -z "$WORKDIR" ] && return 1
pushd $WORKDIR &> /dev/null
_d9_package_patch_package_json
rm -rf .git build bin local
popd &> /dev/null
}
_d9_package_patch_package_json() {
[ ! -d .git ] && return 0
REVISION=$(git rev-parse HEAD)
mv package.json _package.json
cat _package.json | jq ".revision=\"$REVISION\"" > package.json
rm _package.json
}
_do_check_package() {
b9_package origin/master --type=newclient --no-cache
b9_package origin/master --type=newclient
b9_package origin/master --type=docker --settings=deploy --no-cache
b9_package origin/master --docker --no-cache
}

Wyświetl plik

@ -0,0 +1,115 @@
_d9_package_sync_workdir_docker() {
local WORKDIR=$1
local VERSION=$2
local SETTINGS=$3
local SOURCE=$WORKDIR/source
pushd $WORKDIR &> /dev/null
_do_package_docker_init_source $WORKDIR $SOURCE $VERSION $SETTINGS
_do_package_docker_init_workdir
_do_package_docker_node_modules $WORKDIR $SOURCE $SETTINGS
_do_package_docker_generate_settings $WORKDIR $SOURCE $SETTINGS
_do_package_docker_include_files $WORKDIR $SOURCE $SETTINGS
_do_package_docker_copy_plugins $WORKDIR $SOURCE $SETTINGS
rm -rf $SOURCE
popd &> /dev/null
}
_do_package_docker_init_source() {
local WORKDIR=$1
local SOURCE=$2
local VERSION=$3
local SETTINGS=$4
rm -rf $WORKDIR
mkdir -p $SOURCE
_d9_package_sync_workdir_newclient $SOURCE $VERSION $SETTINGS
_d9_package_npm_install $SOURCE
}
_do_package_docker_init_workdir() {
mkdir -p plugins
mkdir -p node_modules
mkdir -p settings
mkdir -p configs
}
_do_package_docker_node_modules() {
local WORKDIR=$1
local SOURCE=$2
local SETTINGS=$3
local NODE_MODULES
local MODULE
node $B9_DIR/lib/js/filter_node_modules.js docker --targetFile=$WORKDIR/package.json --source=$SOURCE --settings=$SETTINGS
NODE_MODULES=$(cat $WORKDIR/package.json | jq -r '.dependencies | keys | @sh')
mkdir -p $WORKDIR/node_modules
for MODULE in $NODE_MODULES; do
MODULE=${MODULE:1:-1}
if [ -d $SOURCE/node_modules/$MODULE ]; then
cp -a $SOURCE/node_modules/$MODULE $WORKDIR/node_modules
fi
done
pushd $WORKDIR &> /dev/null
_b9_npm "$WORKDIR" install
popd &> /dev/null
}
_do_package_docker_generate_settings() {
local WORKDIR=$1
local SOURCE=$2
local SETTINGS=$3
node $B9_DIR/lib/js/generate_settings.js docker --targetFile=$WORKDIR/settings/$SETTINGS.js --source=$SOURCE --settings=$SETTINGS
}
_do_package_docker_include_files() {
local WORKDIR=$1
local SOURCE=$2
local SETTINGS=$3
local BUILD_CONFIG
local FILE_MODULES_INCLUDE
local PATTERN
pushd $WORKDIR &> /dev/null
BUILD_CONFIG=$(node -e "console.log(JSON.stringify(require('$SOURCE/configs/docker').buildConfig({mode: '$SETTINGS'})))")
FILE_INCLUDE=$(echo $BUILD_CONFIG | jq -r '.fileInclude | @sh')
for PATTERN in $FILE_INCLUDE; do
PATTERN=${PATTERN:1:-1}
mkdir -p $(dirname $PATTERN)
cp -a -R $SOURCE/$PATTERN $(dirname $PATTERN)
done
for PATTERN in "server.js" "scripts/tail-log.sh" "configs/docker.js"; do
mkdir -p $(dirname $PATTERN)
cp -a -R $SOURCE/$PATTERN $(dirname $PATTERN) || :
done
popd &> /dev/null
}
_do_package_docker_copy_plugins() {
local WORKDIR=$1
local SOURCE=$2
local SETTINGS=$3
local PLUGINS
local PLUGIN
PLUGINS=$(node $B9_DIR/lib/js/list_plugins.js docker --source=$SOURCE --settings=$SETTINGS)
for PLUGIN in $PLUGINS; do
cp -a $SOURCE/plugins/$PLUGIN $WORKDIR/plugins
done
}

Wyświetl plik

@ -0,0 +1,17 @@
_d9_package_sync_workdir_newclient() {
local WORKDIR=$1
local VERSION=$2
local HASH
HASH=$(echo $VERSION | awk -F- '{print $5}')
rm -rf $WORKDIR
mkdir -p $WORKDIR
pushd $WORKDIR &> /dev/null
rsync -qrtv --delete $B9_PACKAGE_GIT_CACHE/.git $WORKDIR/
git reset --hard
git checkout $HASH
popd &> /dev/null
}

Wyświetl plik

@ -0,0 +1,74 @@
readonly B9_DOCKER_REGISTRY=gcr.io/c9.io/cloud9gce
readonly B9_DOCKER_BUCKET=gs://artifacts.cloud9gce.c9.io.a.appspot.com
_b9_dockerize_update_base() {
local TREEISH=origin/master
local CID
local VERSION
# build package
local TMPFILE=$(tempfile)
b9_package $TREEISH --type=newclient | tee $TMPFILE
VERSION=$(cat $TMPFILE | tail -n1)
rm $TMPFILE
# build base image
docker build --rm -t /v $TMP:/tmp $B9_DOCKER_REGISTRY/c9:base $B9_DIR/containers/c9
CID=$(docker run -d $B9_DOCKER_REGISTRY/c9:base sleep 1h)
# copy package to base
docker exec $CID bash -c "
cd /home/ubuntu &&
tar xf $TMP/$VERSION.tar.xz
rm -rf $VERSION.tgz newclient
mv $VERSION newclient"
# commit image
docker stop $CID
docker commit $CID $B9_DOCKER_REGISTRY/c9:base
# push
gcloud docker push $B9_DOCKER_REGISTRY/c9:base
}
_b9_package_is_cached_docker() {
local VERSION=$1
local TAG
TAG=$(echo $VERSION | awk -F- '{printf "%s-%s", $4, $5}')
_b9_dockerize_has_tag c9 $TAG
}
_d9_package_upload_docker() {
local WORKDIR=$1
local VERSION=$2
local CID
local TAG
gcloud docker pull $B9_DOCKER_REGISTRY/c9:base
CID=$(docker run -d -v $WORKDIR:/home/ubuntu/$(basename $WORKDIR):ro $B9_DOCKER_REGISTRY/c9:base sleep 1h)
# copy package
docker exec $CID bash -c "
cd /home/ubuntu &&
rsync -qrt --delete --checksum /home/ubuntu/$(basename $WORKDIR)/* newclient"
# commit image
TAG=$(echo $VERSION | awk -F- '{printf "%s-%s", $4, $5}')
docker stop $CID
docker commit $CID $B9_DOCKER_REGISTRY/c9:$TAG
# push
gcloud docker push $B9_DOCKER_REGISTRY/c9:$TAG
}
_b9_dockerize_has_tag() {
local REPO=$1
local TAG=$2
gsutil ls $B9_DOCKER_BUCKET/containers/repositories/library/${REPO}/tag_${TAG}
}

Wyświetl plik

@ -0,0 +1,36 @@
_b9_package_is_cached_gcs() {
local VERSION=$1
gsutil ls gs://cloud9_ci_cache/$(basename $VERSION).tar.xz &> /dev/null
}
_d9_package_upload_gcs() {
local WORKDIR=$1
local VERSION=$2
local TMP_TAR
local CACHE_FILE
CACHE_FILE=$(basename $WORKDIR)
pushd $WORKDIR/.. &> /dev/null
TMP_TAR=$(mktemp -d b9-package-XXXXXXXXXXXXX --tmpdir=$TMP)/$CACHE_FILE.tar.xz
tar -cJf $TMP_TAR $CACHE_FILE
gsutil cp $TMP_TAR gs://cloud9_ci_cache
mv $TMP_TAR $TMP/$(basename $CACHE_FILE.tar.xz)
popd &> /dev/null
}
_d9_package_download_gcs() {
local VERSION=$1
local CACHE_FILE=$TMP/${VERSION}.tar.xz
if [ -f "$CACHE_FILE" ]; then
echo $CACHE_FILE
return
fi
gsutil cp gs://cloud9_ci_cache/$(basename $CACHE_FILE) $TMP
echo $CACHE_FILE
}

80
b9/lib/prepare.sh 100644
Wyświetl plik

@ -0,0 +1,80 @@
readonly NPMCACHE=$TMP
b9_prepare_usage() {
echo "Usage: $B9 prepare [OPTIONS]"
echo
echo "Prepare checkout for testing"
echo
echo "Options:"
echo " --help show this help message"
exit 1
}
b9_prepare() {
for ARG in "$@"; do
case $ARG in
--help|-h)
usage
;;
*)
usage
;;
esac
done
pushd $C9_DIR &> /dev/null
# npm
rm -rf node_modules
git checkout -- node_modules
_b9_install_deps
git checkout -- node_modules
popd &> /dev/null
}
_b9_install_deps() {
if [ -f plugins/c9.profile/npm-shrinkwrap.json ]; then
_b9_setup_node_modules npm-shrinkwrap.json
fi
if [ -f plugins/c9.profile/npm-shrinkwrap.json ]; then
_b9_setup_node_modules plugins/c9.profile/npm-shrinkwrap.json
fi
}
_b9_setup_node_modules() {
local PACKAGE_FILE=$1
local PACKAGE_PATH=$(dirname $PACKAGE_FILE)
local PACKAGE_MD5=$(cat $PACKAGE_FILE | jq 'del(.version)' | md5sum | awk '{print $1}')
local CACHE_FILE="npm-${PACKAGE_MD5}.tar.xz"
if [ -e "$TMP/$CACHE_FILE" ] || gsutil cp gs://cloud9_ci_cache/$CACHE_FILE $TMP &> /dev/null; then
rm -rf $PACKAGE_PATH/node_modules
tar -xkf $TMP/$CACHE_FILE || (
rm $CACHE_FILE &>/dev/null
_b9_compile_node_modules "$CACHE_FILE" "$PACKAGE_PATH"
)
else
_b9_compile_node_modules "$CACHE_FILE" "$PACKAGE_PATH"
fi
}
_b9_compile_node_modules() {
local CACHE_FILE=$1
local PACKAGE_PATH=$2
local NPM_CMD
local TMP_TAR
if ! _b9_npm "$(pwd)/$PACKAGE_PATH" install; then
rm -rf node_modules
git checkout node_modules
_b9_npm "$(pwd)/$PACKAGE_PATH" install
fi
TMP_TAR=$(mktemp -d b9-npm-XXXXXXXXXXXXX --tmpdir=$TMP)/$CACHE_FILE
tar -cJf $TMP_TAR $PACKAGE_PATH/node_modules
gsutil cp $TMP_TAR gs://cloud9_ci_cache
mv $TMP_TAR $TMP/$CACHE_FILE
}