Commit 38b9ebab authored by Aurélien Vermylen's avatar Aurélien Vermylen

Merge branch 'master' into aurelien-test-wendelin

parents 57b5d1f4 3df26781
......@@ -34,9 +34,9 @@ md5sum = 2202b18f269ad606d70e1864857ed93c
[apache]
# inspired on http://old.aclark.net/team/aclark/blog/a-lamp-buildout-for-wordpress-and-other-php-apps/
recipe = slapos.recipe.cmmi
version = 2.4.27
version = 2.4.29
url = https://archive.apache.org/dist/httpd/httpd-${:version}.tar.bz2
md5sum = 97b6bbfa83c866dbe20ef317e3afd108
md5sum = 0c599404ef6b69eee95bcd9fcd094407
pre-configure =
cp -ar ${apr:location}/apr-${apr:version} srclib/apr/ &&
cp -ar ${apr-util:location}/apr-util-${apr-util:version} srclib/apr-util
......
From 8089fa02122fed0a6394eba14bbedcb1d18e2384 Mon Sep 17 00:00:00 2001
From: Kevin Atkinson <kevina@gnu.org>
Date: Thu, 29 Dec 2016 00:50:31 -0500
Subject: Compile Fixes for GCC 7.
Closes #519.
---
modules/filter/tex.cpp | 2 +-
prog/check_funs.cpp | 2 +-
2 files changed, 2 insertions(+), 2 deletions(-)
diff --git a/modules/filter/tex.cpp b/modules/filter/tex.cpp
index a979539..19ab63c 100644
--- a/modules/filter/tex.cpp
+++ b/modules/filter/tex.cpp
@@ -174,7 +174,7 @@ namespace {
if (c == '{') {
- if (top.in_what == Parm || top.in_what == Opt || top.do_check == '\0')
+ if (top.in_what == Parm || top.in_what == Opt || *top.do_check == '\0')
push_command(Parm);
top.in_what = Parm;
diff --git a/prog/check_funs.cpp b/prog/check_funs.cpp
index db54f3d..89ee09d 100644
--- a/prog/check_funs.cpp
+++ b/prog/check_funs.cpp
@@ -647,7 +647,7 @@ static void print_truncate(FILE * out, const char * word, int width) {
}
}
if (i == width-1) {
- if (word == '\0')
+ if (*word == '\0')
put(out,' ');
else if (word[len] == '\0')
put(out, word, len);
--
2.15.0.rc0
......@@ -5,6 +5,7 @@ parts =
extends =
../ncurses/buildout.cfg
../patch/buildout.cfg
../perl/buildout.cfg
[aspell-dictionary-common]
......@@ -15,8 +16,11 @@ configure-command = ./configure --vars ASPELL=${aspell:location}/bin/aspell PREZ
recipe = slapos.recipe.cmmi
url = http://ftp.gnu.org/gnu/aspell/aspell-0.60.6.1.tar.gz
md5sum = e66a9c9af6a60dc46134fdacf6ce97d7
patch-options = -p1
patches =
${:_profile_base_location_}/Compile-Fixes-for-GCC-7.patch#0159a8841e1241153d2105d157fc8225
environment =
PATH=${perl:location}/bin:%(PATH)s
PATH=${patch:location}/bin:${perl:location}/bin:%(PATH)s
CPPFLAGS=-I${ncurses:location}/include
LDFLAGS=-L${ncurses:location}/lib -Wl,-rpath=${ncurses:location}/lib
......
diff -N -u -r bazel.orig/src/main/java/com/google/devtools/build/lib/rules/SkylarkRuleImplementationFunctions.java bazel/src/main/java/com/google/devtools/build/lib/rules/SkylarkRuleImplementationFunctions.java
--- bazel.orig/src/main/java/com/google/devtools/build/lib/rules/SkylarkRuleImplementationFunctions.java 1980-01-01 00:00:00.000000000 +0900
+++ bazel/src/main/java/com/google/devtools/build/lib/rules/SkylarkRuleImplementationFunctions.java 2017-01-02 22:19:09.326924214 +0900
@@ -172,7 +172,7 @@
@Param(
name = "use_default_shell_env",
type = Boolean.class,
- defaultValue = "False",
+ defaultValue = "True",
named = true,
positional = false,
doc = "whether the action should use the built in shell environment or not"
diff -u -r a/compile.sh b/compile.sh
--- a/compile.sh 1980-01-01 00:00:00.000000000 +0900
+++ b/compile.sh 2017-10-24 10:57:21.469702614 +0900
@@ -49,6 +49,8 @@
# a chance of overriding this in case they want to do so.
: ${VERBOSE:=no}
+export LDFLAGS=$LDFLAGS
+
source scripts/bootstrap/buildenv.sh
mkdir -p output
diff -u -r a/src/main/java/com/google/devtools/build/lib/analysis/skylark/SkylarkRuleImplementationFunctions.java b/src/main/java/com/google/devtools/build/lib/analysis/skylark/SkylarkRuleImplementationFunctions.java
--- a/src/main/java/com/google/devtools/build/lib/analysis/skylark/SkylarkRuleImplementationFunctions.java 1980-01-01 00:00:00.000000000 +0900
+++ b/src/main/java/com/google/devtools/build/lib/analysis/skylark/SkylarkRuleImplementationFunctions.java 2017-10-18 17:44:51.975049512 +0900
@@ -165,7 +165,7 @@
@Param(
name = "use_default_shell_env",
type = Boolean.class,
- defaultValue = "False",
+ defaultValue = "True",
named = true,
positional = false,
doc = "whether the action should use the built in shell environment or not."
# Description:
# The Bazel launcher.
package(
default_visibility = ["//visibility:public"],
)
WIN_LINK_OPTS = [
"-DEFAULTLIB:advapi32.lib", # GetUserNameW
"-DEFAULTLIB:ole32.lib", # CoTaskMemFree
"-DEFAULTLIB:shell32.lib", # SHGetKnownFolderPath
"-DEFAULTLIB:ws2_32.lib", # grpc
]
cc_library(
name = "blaze_util",
srcs = [
"blaze_util.cc",
"global_variables.h",
"startup_options.h",
] + select({
"//src:darwin": [
"blaze_util_darwin.cc",
"blaze_util_posix.cc",
],
"//src:darwin_x86_64": [
"blaze_util_darwin.cc",
"blaze_util_posix.cc",
],
"//src:freebsd": [
"blaze_util_freebsd.cc",
"blaze_util_posix.cc",
],
"//src:windows": [
"blaze_util_windows.cc",
],
"//src:windows_msys": [
"blaze_util_windows.cc",
],
"//src:windows_msvc": [
"blaze_util_windows.cc",
],
"//conditions:default": [
"blaze_util_linux.cc",
"blaze_util_posix.cc",
],
}),
hdrs = [
"blaze_util.h",
"blaze_util_platform.h",
],
linkopts = select({
"//src:darwin": [
"-framework CoreFoundation",
],
"//src:darwin_x86_64": [
"-framework CoreFoundation",
],
"//src:freebsd": [
],
"//src:windows": WIN_LINK_OPTS,
"//src:windows_msvc": WIN_LINK_OPTS,
"//conditions:default": [
"-lrt",
"{{ linkopts }}",
],
}),
deps = [
"//src/main/cpp/util",
"//src/main/cpp/util:blaze_exit_code",
] + select({
"//src:windows": ["//src/main/native/windows:lib-file"],
"//src:windows_msys": ["//src/main/native/windows:lib-file"],
"//src:windows_msvc": ["//src/main/native/windows:lib-file"],
"//conditions:default": [],
}),
)
cc_binary(
name = "client",
srcs = [
"blaze.cc",
"blaze.h",
"global_variables.cc",
"global_variables.h",
"main.cc",
] + select({
"//src:windows": ["//src/main/native/windows:resources.o"],
"//src:windows_msvc": ["//src/main/native/windows:resources.o"],
"//conditions:default": [],
}),
copts = select({
"//src:windows": ["/wd4018"],
"//src:windows_msvc": ["/wd4018"],
"//conditions:default": ["-Wno-sign-compare"],
}),
linkopts = select({
"//src:darwin": [
],
"//src:darwin_x86_64": [
],
"//src:freebsd": [
"-lprocstat",
"-lm",
],
"//src:windows_msvc": [
],
"//conditions:default": [
"-lrt",
"-ldl",
],
}),
visibility = ["//src:__pkg__"],
deps = [
":blaze_util",
":option_processor",
":startup_options",
":workspace_layout",
"//src/main/cpp/util",
"//src/main/cpp/util:errors",
"//src/main/cpp/util:logging",
"//src/main/cpp/util:strings",
"//src/main/protobuf:command_server_cc_proto",
"//third_party/ijar:zip",
],
)
cc_library(
name = "option_processor",
srcs = ["option_processor.cc"],
hdrs = [
"option_processor.h",
"option_processor-internal.h",
],
visibility = [
"//src:__pkg__",
"//src/test/cpp:__pkg__",
],
deps = [
":blaze_util",
":startup_options",
":workspace_layout",
"//src/main/cpp/util",
"//src/main/cpp/util:blaze_exit_code",
"//src/main/cpp/util:logging",
],
)
cc_library(
name = "startup_options",
srcs = ["startup_options.cc"],
hdrs = ["startup_options.h"],
visibility = [
"//src:__pkg__",
"//src/test/cpp:__pkg__",
],
deps = [
":blaze_util",
":workspace_layout",
"//src/main/cpp/util",
"//src/main/cpp/util:blaze_exit_code",
"//src/main/cpp/util:errors",
],
)
cc_library(
name = "workspace_layout",
srcs = ["workspace_layout.cc"],
hdrs = ["workspace_layout.h"],
visibility = [
"//src:__pkg__",
"//src/test/cpp:__pkg__",
],
deps = [
":blaze_util",
"//src/main/cpp/util",
],
)
filegroup(
name = "srcs",
srcs = glob(["**"]) + ["//src/main/cpp/util:srcs"],
visibility = ["//src:__pkg__"],
)
package(default_visibility = ["//src:__subpackages__"])
cc_library(
name = "logging",
srcs = ["logging.cc"],
hdrs = ["logging.h"],
)
cc_library(
name = "process-tools",
srcs = ["process-tools.cc"],
hdrs = ["process-tools.h"],
deps = [":logging"],
)
cc_binary(
name = "process-wrapper",
srcs = select({
"//src:windows": ["process-wrapper-windows.cc"],
"//src:windows_msvc": ["process-wrapper-windows.cc"],
"//conditions:default": [
"process-wrapper.cc",
"process-wrapper.h",
"process-wrapper-legacy.cc",
"process-wrapper-legacy.h",
"process-wrapper-options.cc",
"process-wrapper-options.h",
],
}),
linkopts = ["-lm", "{{ linkopts }}"],
deps = select({
"//src:windows": [],
"//src:windows_msvc": [],
"//conditions:default": [
":process-tools",
":logging",
],
}),
)
cc_binary(
name = "build-runfiles",
srcs = select({
"//src:windows": ["build-runfiles-windows.cc"],
"//src:windows_msvc": ["build-runfiles-windows.cc"],
"//conditions:default": ["build-runfiles.cc"],
}),
linkopts = ["{{ linkopts }}"],
)
cc_binary(
name = "linux-sandbox",
srcs = select({
"//src:darwin": ["dummy-sandbox.c"],
"//src:darwin_x86_64": ["dummy-sandbox.c"],
"//src:freebsd": ["dummy-sandbox.c"],
"//src:windows": ["dummy-sandbox.c"],
"//src:windows_msys": ["dummy-sandbox.c"],
"//src:windows_msvc": ["dummy-sandbox.c"],
"//conditions:default": [
"linux-sandbox.cc",
"linux-sandbox.h",
"linux-sandbox-options.cc",
"linux-sandbox-options.h",
"linux-sandbox-pid1.cc",
"linux-sandbox-pid1.h",
],
}),
linkopts = ["-lm", "{{ linkopts }}"],
deps = select({
"//src:darwin": [],
"//src:darwin_x86_64": [],
"//src:freebsd": [],
"//src:windows": [],
"//src:windows_msys": [],
"//src:windows_msvc": [],
"//conditions:default": [
":logging",
":process-tools",
],
}),
)
filegroup(
name = "jdk-support",
srcs = [
"jdk.BUILD",
],
)
exports_files([
"build_interface_so",
])
filegroup(
name = "srcs",
srcs = glob(["**"]),
visibility = ["//src:__pkg__"],
)
# Description:
# singlejar C++ implementation.
package(default_visibility = ["//src:__subpackages__"])
JAR_TOOL_PATH_COPT_TPL = "-DJAR_TOOL_PATH=\\\"external/local_jdk/bin/jar%s\\\""
JAR_TOOL_PATH_COPTS = select({
"//src:windows": [JAR_TOOL_PATH_COPT_TPL % ".exe"],
"//src:windows_msvc": [JAR_TOOL_PATH_COPT_TPL % ".exe"],
"//src:windows_msys": [JAR_TOOL_PATH_COPT_TPL % ".exe"],
"//conditions:default": [JAR_TOOL_PATH_COPT_TPL % ""],
})
filegroup(
name = "srcs",
srcs = glob(["**"]),
visibility = ["//src:__pkg__"],
)
filegroup(
name = "embedded_tools",
srcs = [
"BUILD",
"combiners.cc",
"combiners.h",
"diag.h",
"input_jar.cc",
"input_jar.h",
"mapped_file.h",
"options.cc",
"options.h",
"output_jar.cc",
"output_jar.h",
"singlejar_main.cc",
"token_stream.h",
"transient_bytes.h",
"zip_headers.h",
"zlib_interface.h",
],
visibility = ["//visibility:public"],
)
cc_binary(
name = "singlejar",
srcs = [
"singlejar_main.cc",
],
linkopts = select({
"//src:freebsd": ["-lm"],
"//conditions:default": ["{{ linkopts }}"],
}),
linkstatic = 1,
visibility = ["//visibility:public"],
deps = [
"options",
"output_jar",
"//third_party/zlib",
],
)
cc_test(
name = "combiners_test",
size = "large",
srcs = [
"combiners_test.cc",
":zip_headers",
":zlib_interface",
],
deps = [
":combiners",
":input_jar",
"//third_party:gtest",
"//third_party/zlib",
],
)
cc_test(
name = "input_jar_empty_jar_test",
srcs = [
"input_jar_empty_jar_test.cc",
],
data = [
"data/empty.zip",
],
deps = [
":input_jar",
":test_util",
"//src/main/cpp/util",
"//third_party:gtest",
],
)
cc_test(
name = "input_jar_preambled_test",
srcs = [
"input_jar_preambled_test.cc",
],
data = [
":test1",
],
deps = [
":input_jar",
":test_util",
"//src/main/cpp/util",
"//third_party:gtest",
],
)
cc_test(
name = "input_jar_scan_jartool_test",
size = "large",
srcs = [
"input_jar_scan_entries_test.h",
"input_jar_scan_jartool_test.cc",
],
copts = JAR_TOOL_PATH_COPTS,
data = [
"@local_jdk//:jar",
"@local_jdk//:jdk",
],
# Timing out, see https://github.com/bazelbuild/bazel/issues/1555
tags = ["manual"],
deps = [
":input_jar",
":test_util",
"//third_party:gtest",
],
)
cc_test(
name = "input_jar_scan_ziptool_test",
size = "large",
srcs = [
"input_jar_scan_entries_test.h",
"input_jar_scan_ziptool_test.cc",
],
# Timing out, see https://github.com/bazelbuild/bazel/issues/1555
tags = ["manual"],
deps = [
":input_jar",
":test_util",
"//third_party:gtest",
],
)
cc_test(
name = "input_jar_bad_jar_test",
srcs = [
"input_jar_bad_jar_test.cc",
],
deps = [
":input_jar",
":test_util",
"//third_party:gtest",
],
)
cc_test(
name = "options_test",
srcs = [
"options.h",
"options_test.cc",
],
deps = [
":options",
":test_util",
":token_stream",
"//src/main/cpp/util",
"//third_party:gtest",
],
)
sh_test(
name = "output_jar_bash_test",
srcs = ["output_jar_shell_test.sh"],
args = [
"src/test/shell",
"$(location :singlejar)",
],
data = [
":singlejar",
],
deps = ["//src/test/shell:bashunit"],
)
cc_test(
name = "output_jar_simple_test",
srcs = [
"output_jar_simple_test.cc",
],
copts = JAR_TOOL_PATH_COPTS,
data = [
":data1",
":data2",
":stored_jar",
":test1",
":test2",
"@local_jdk//:jar",
"@local_jdk//:jdk-default",
],
deps = [
":input_jar",
":options",
":output_jar",
":test_util",
"//src/main/cpp/util",
"//third_party:gtest",
],
)
cc_test(
name = "token_stream_test",
srcs = [
"token_stream_test.cc",
],
deps = [
":test_util",
":token_stream",
"//third_party:gtest",
],
)
cc_test(
name = "transient_bytes_test",
size = "large",
srcs = [
"transient_bytes_test.cc",
":transient_bytes",
":zlib_interface",
],
# Timing out, see https://github.com/bazelbuild/bazel/issues/1555
tags = ["manual"],
deps = [
":input_jar",
":test_util",
"//third_party:gtest",
"//third_party/zlib",
],
)
cc_test(
name = "zip_headers_test",
size = "small",
srcs = [
"zip_headers_test.cc",
":zip_headers",
],
deps = ["//third_party:gtest"],
)
cc_test(
name = "zlib_interface_test",
srcs = [
"zlib_interface_test.cc",
":zlib_interface",
],
deps = [
":test_util",
"//third_party:gtest",
"//third_party/zlib",
],
)
sh_test(
name = "zip64_test",
srcs = ["zip64_test.sh"],
args = [
"src/test/shell",
"$(location :singlejar)",
"$(location @local_jdk//:jar)",
],
data = [
":singlejar",
"//tools/defaults:jdk",
"@local_jdk//:jar",
],
deps = ["//src/test/shell:bashunit"],
)
cc_library(
name = "combiners",
srcs = [
"combiners.cc",
":transient_bytes",
":zip_headers",
],
hdrs = ["combiners.h"],
deps = ["//third_party/zlib"],
)
cc_library(
name = "input_jar",
srcs = [
"diag.h",
"input_jar.cc",
"mapped_file.h",
],
hdrs = [
"input_jar.h",
"zip_headers.h",
],
)
cc_library(
name = "options",
srcs = [
"diag.h",
"options.cc",
"options.h",
],
hdrs = ["options.h"],
deps = [
":token_stream",
],
)
cc_library(
name = "output_jar",
srcs = [
"diag.h",
"mapped_file.h",
"output_jar.cc",
"output_jar.h",
":zip_headers",
],
hdrs = ["output_jar.h"],
deps = [
":combiners",
":input_jar",
":options",
"//src/main/cpp/util",
"//third_party/zlib",
],
)
cc_library(
name = "test_util",
srcs = ["test_util.cc"],
hdrs = ["test_util.h"],
deps = [
"//src/main/cpp/util",
"//third_party:gtest",
],
)
cc_library(
name = "token_stream",
srcs = ["diag.h"],
hdrs = ["token_stream.h"],
)
filegroup(
name = "transient_bytes",
srcs = [
"diag.h",
"transient_bytes.h",
"zlib_interface.h",
":zip_headers",
],
)
filegroup(
name = "zip_headers",
srcs = ["zip_headers.h"],
)
filegroup(
name = "zlib_interface",
srcs = [
"diag.h",
"zlib_interface.h",
],
)
java_library(
name = "test1",
resources = [
"options.cc",
"zip_headers.h",
"zlib_interface.h",
],
)
java_library(
name = "test2",
resources = [
"token_stream.h",
"transient_bytes.h",
],
)
java_library(
name = "data1",
resources = [
"data/extra_file1",
"data/extra_file2",
],
)
java_library(
name = "data2",
resources = [
"data/extra_file1",
"data/extra_file3",
],
)
genrule(
name = "stored_jar",
srcs = [
"output_jar.cc",
"//tools/defaults:jdk",
],
outs = ["stored.jar"],
cmd = "$(location @local_jdk//:jar) -0cf \"$@\" $(location :output_jar.cc)",
tools = ["@local_jdk//:jar"],
)
build --verbose_failures
......@@ -24,36 +24,74 @@ template = ${:_profile_base_location_}/${:filename}.in
rendered = ${:location}/${:filename}
cpp_path = ${gcc:location}/bin/cpp
gcc_path = ${gcc:location}/bin/gcc
binutils_path = ${binutils:location}/bin
gconv_path = ${gcc:location}/bin/gconv
include_path = ${gcc:location}/include
gcc_lib64_path = ${gcc:location}/lib64
context =
key cpp_path template-bazel-crosstool:cpp_path
key gcc_path template-bazel-crosstool:gcc_path
key binutils_path template-bazel-crosstool:binutils_path
key gconv_path template-bazel-crosstool:gconv_path
key include_path template-bazel-crosstool:include_path
key gcc_lib64_path template-bazel-crosstool:gcc_lib64_path
[template-bazel-src-main-cpp-build]
recipe = slapos.recipe.template:jinja2
location = ${buildout:parts-directory}/${:_buildout_section_name_}
mode = 640
filename = bazel_src_main_cpp_BUILD
template = ${:_profile_base_location_}/${:filename}.in
rendered = ${:location}/${:filename}
linkopts = -Wl,-rpath,${gcc:location}/lib64
context =
key linkopts template-bazel-src-main-cpp-build:linkopts
[template-bazel-src-main-tools-build]
<= template-bazel-src-main-cpp-build
filename = bazel_src_main_tools_BUILD
[template-bazel-src-tools-singlejar-build]
<= template-bazel-src-main-cpp-build
filename = bazel_src_tools_singlejar_BUILD
[bazel]
recipe = slapos.recipe.build
url = https://github.com/bazelbuild/bazel/releases/download/0.4.3/bazel-0.4.3-dist.zip
md5sum = cbd53f6f59915506da8998dab2098921
url = https://github.com/bazelbuild/bazel/releases/download/0.6.1/bazel-0.6.1-dist.zip
md5sum = 8c5c827e33d3ff74c263c1299810b485
patch-binary = ${patch:location}/bin/patch
patch-file-path = ${:_profile_base_location_}/bazel-0.4.3.patch
patch-file-path = ${:_profile_base_location_}/bazel-0.6.1.patch
bazel-crosstool-modified-file-path = ${template-bazel-crosstool:rendered}
bazel-src-main-cpp-build-path = ${template-bazel-src-main-cpp-build:rendered}
bazel-src-main-tools-build-path = ${template-bazel-src-main-tools-build:rendered}
bazel-src-tools-singlejar-build-path = ${template-bazel-src-tools-singlejar-build:rendered}
unzip-bin = ${unzip:location}/bin
zip-bin = ${zip:location}/bin
gcc-bin = ${gcc:location}/bin
gcc-lib = ${gcc:location}/lib
gcc-lib64 = ${gcc:location}/lib64
java_home = ${zulu:location}
bazelrc = ${:_profile_base_location_}/bazelrc
script =
extract_dir = self.extract(self.download(self.options['url'], self.options['md5sum']))
crosstool_path = os.path.join(extract_dir, 'tools', 'cpp', 'CROSSTOOL')
os.chmod(crosstool_path, 0644)
shutil.copy(self.options['bazel-crosstool-modified-file-path'],
crosstool_path)
target_path = extract_dir+'/src/main/java/com/google/devtools/build/lib/rules/SkylarkRuleImplementationFunctions.java'
src_main_cpp_build_path = os.path.join(extract_dir, 'src', 'main', 'cpp', 'BUILD')
os.chmod(src_main_cpp_build_path, 0644)
shutil.copy(self.options['bazel-src-main-cpp-build-path'],
src_main_cpp_build_path)
src_main_tools_build_path = os.path.join(extract_dir, 'src', 'main', 'tools', 'BUILD')
os.chmod(src_main_tools_build_path, 0644)
shutil.copy(self.options['bazel-src-main-tools-build-path'],
src_main_tools_build_path)
src_tools_singlejar_build_path = os.path.join(extract_dir, 'src', 'tools', 'singlejar', 'BUILD')
os.chmod(src_tools_singlejar_build_path, 0644)
shutil.copy(self.options['bazel-src-tools-singlejar-build-path'],
src_tools_singlejar_build_path)
target_path = extract_dir+'/src/main/java/com/google/devtools/build/lib/analysis/skylark/SkylarkRuleImplementationFunctions.java'
os.chmod(target_path, 0644)
call([self.buildout['bazel']['patch-binary'], '-p1', '-d', extract_dir, '-i', self.buildout['bazel']['patch-file-path']])
path = ':'.join((
......@@ -65,13 +103,15 @@ script =
env = {'JAVA_HOME':self.options['java_home'],
'PATH':path,
'LD_LIBRARY_PATH':':'.join((
self.options['gcc-lib'],
self.options['gcc-lib64'],
self.options['gcc-lib'],
self.options['gcc-lib64'],
os.environ.get('LD_LIBRARY_PATH', '')
)),
'LDFLAGS':'-Wl,-rpath='+self.options['gcc-lib64'],
'CC':self.options['gcc-bin']+'/gcc',
'CXX':self.options['gcc-bin']+'/g++',
'BAZELRC':self.options['bazelrc'],
'VERBOSE':'yes',
}
bin_dir = os.path.join(self.options['location'], 'bin')
os.makedirs(bin_dir)
......
......@@ -11,9 +11,9 @@ parts =
[ca-certificates]
recipe = slapos.recipe.cmmi
url = http://http.debian.net/debian/pool/main/c/ca-certificates/ca-certificates_20161130+nmu1.tar.xz
url = http://http.debian.net/debian/pool/main/c/ca-certificates/ca-certificates_20170717.tar.xz
patch-binary = ${patch:location}/bin/patch
md5sum = a09e8b63126188fd0ed77f6fbaf5d35f
md5sum = 55a6bb6b98afb16b3cde8e3ad1e262eb
patches =
${:_profile_base_location_}/ca-certificates-any-python.patch#087b5e860c7a4b8ff6656c95c5835ee2
${:_profile_base_location_}/ca-certificates-sbin-dir.patch#0b4e7d82ce768823c01954ee41ef177b
......
......@@ -40,7 +40,7 @@ interpreter = chainer-python
scripts = chainer-python
[versions]
chainer = 1.22.0
filelock = 2.0.7
chainer = 3.0.0
filelock = 2.0.12
nose = 1.3.7
six = 1.10.0
six = 1.11.0
......@@ -6,14 +6,15 @@ extends =
../openssl/buildout.cfg
../perl/buildout.cfg
../pkgconfig/buildout.cfg
../xz-utils/buildout.cfg
../zlib/buildout.cfg
parts =
curl
[curl]
recipe = slapos.recipe.cmmi
url = http://curl.haxx.se/download/curl-7.54.1.tar.bz2
md5sum = 6b6eb722f512e7a24855ff084f54fe55
url = http://curl.haxx.se/download/curl-7.56.0.tar.xz
md5sum = 18ebc36c5dc9317d4a0b5db94a4e12ad
configure-options =
--disable-static
--disable-ldap
......@@ -44,6 +45,6 @@ configure-options =
--without-nghttp2
environment =
PATH=${perl:location}/bin:${pkgconfig:location}/bin:%(PATH)s
PATH=${perl:location}/bin:${pkgconfig:location}/bin:${xz-utils:location}/bin:%(PATH)s
PKG_CONFIG_PATH=${openssl:location}/lib/pkgconfig
LDFLAGS=-Wl,-rpath=${zlib:location}/lib -Wl,-rpath=${openssl:location}/lib
# ethtool - query or control network driver and hardware settings
# https://www.kernel.org/pub/software/network/ethtool
[buildout]
parts = ethtool
[ethtool]
recipe = slapos.recipe.cmmi
url = https://www.kernel.org/pub/software/network/ethtool/ethtool-4.11.tar.xz
md5sum = 16d38f4ebe23e44f96f7d8b38ed3652c
......@@ -19,7 +19,6 @@ configure-options =
--without-libidn
--without-x
--with-drivers=FILES
--with-fontpath=${ghostscript-fonts:location}
# it seems that parallel build sometimes fails for ghostscript.
make-options = -j1
environment =
......@@ -35,9 +34,3 @@ environment =
<= ghostscript-common
url = https://github.com/ArtifexSoftware/ghostpdl-downloads/releases/download/gs920/ghostscript-9.20.tar.xz
md5sum = 8f3d383d48da22345937b66b01ab2960
[ghostscript-fonts]
recipe = slapos.recipe.build:download-unpacked
strip-top-level-dir = false
url = http://downloads.ghostscript.com/public/fonts/urw-base35-v1.10.zip
md5sum = 66e8bbd8228519d5dba82b9433a61bb0
......@@ -6,6 +6,7 @@ extends =
../perl/buildout.cfg
../xz-utils/buildout.cfg
../zlib/buildout.cfg
../python-2.7/buildout.cfg
parts =
glib
......
[buildout]
extends =
../bzip2/buildout.cfg
../file/buildout.cfg
../gnutls/buildout.cfg
../sqlite3/buildout.cfg
parts = gnupg
[npth]
<= gpg-common
version = 1.5
md5sum = 9ba2dc4302d2f32c66737c43ed191b1b
configure-options-extra =
--disable-tests
[libassuan]
<= with-gpg-error
version = 2.4.3
md5sum = 8e01a7c72d3e5d154481230668e6eb5a
[libksba]
<= with-gpg-error
version = 1.3.5
md5sum = 8302a3e263a7c630aa7dea7d341f07a2
[gnupg]
<= libgcrypt
version = 2.2.1
md5sum = f781efae8756f6cf5d500aad8e4b33e2
configure-options +=
--disable-ldap
--disable-card-support
--disable-photo-viewers
--with-bzip2=${bzip2:location}
--with-ksba-prefix=${libksba:location}
--with-libassuan-prefix=${libassuan:location}
--with-libgcrypt-prefix=${libgcrypt:location}
--with-npth-prefix=${npth:location}
--with-readline=${readline:location}
environment =
PATH=${pkgconfig:location}/bin:${gettext:location}/bin:%(PATH)s
PKG_CONFIG_PATH=${gnutls:pkg-config-path}:${gnutls:location}/lib/pkgconfig:${sqlite3:location}/lib/pkgconfig
CPPFLAGS=-I${file:location}/include -I${libgcrypt:location}/include
LDFLAGS=-L${file:location}/lib -Wl,-rpath=${zlib:location}/lib -Wl,-rpath=${readline:location}/lib -Wl,-rpath=${bzip2:location}/lib -Wl,-rpath=${sqlite3:location}/lib -Wl,-rpath=${gettext:location}/lib -Wl,-rpath=${file:location}/lib -Wl,-rpath=${libgpg-error:location}/lib -Wl,-rpath -Wl,${gnutls:location}/lib -Wl,-rpath=${libksba:location}/lib -Wl,-rpath=${libassuan:location}/lib -Wl,-rpath=${libgcrypt:location}/lib -Wl,-rpath=${npth:location}/lib
......@@ -14,7 +14,6 @@ url = http://www.gnupg.org/ftp/gcrypt/${:_buildout_section_name_}/${:_buildout_s
configure-options = ${:configure-options-extra}
--enable-option-checking=fatal
--disable-dependency-tracking
--disable-doc
environment = ${:environment-extra}
PATH=${gettext:location}/bin:%(PATH)s
environment-extra =
......@@ -24,22 +23,30 @@ environment-extra =
version = 1.27
md5sum = 5217ef3e76a7275a2a3b569a12ddc989
configure-options-extra =
--disable-doc
--disable-tests
[libgcrypt]
[with-gpg-error]
<= gpg-common
version = 1.8.1
md5sum = b21817f9d850064d2177285f1073ec55
configure-options-extra =
configure-options-extra = ${:configure-options-extra2}
--with-gpg-error-prefix=${libgpg-error:location}
configure-options-extra2 =
environment-extra =
LDFLAGS=-Wl,-rpath=${libgpg-error:location}/lib
[libgcrypt]
<= with-gpg-error
version = 1.8.1
md5sum = b21817f9d850064d2177285f1073ec55
configure-options-extra2 =
--disable-doc
[gnutls]
<= gpg-common
url = http://www.gnupg.org/ftp/gcrypt/gnutls/v3.5/gnutls-3.5.15.tar.xz
md5sum = bcdcbc65c50a7499617ad9f4d0058de9
configure-options-extra =
--disable-doc
--disable-static
--disable-libdane
--disable-guile
......
# Go language - https://golang.org/
[buildout]
parts = golang
extends =
../findutils/buildout.cfg
../git/buildout.cfg
[golang]
<= golang18
parts = gowork
# ---- Go builds itself ----
[golang-common]
recipe = slapos.recipe.cmmi
......@@ -12,42 +14,108 @@ configure-command = :
location = ${buildout:parts-directory}/${:_buildout_section_name_}
make-binary =
make-targets= cd src && ./all.bash && cp -alf .. ${:location}
# some testdata files have an issue with slapos.extension.strip.
post-install = ${findutils:location}/bin/find ${:location}/src -type d -name testdata -exec rm -rf {} \; || true
environment =
GOROOT_FINAL=${:location}
${:environment-extra}
[golang14-repository]
<= git-repository
repository = https://github.com/golang/go.git
revision = go1.4.3-16-g4d5426a
[golang14]
<= golang-common
url = https://storage.googleapis.com/golang/go1.4-bootstrap-20161024.tar.gz
md5sum = 76e42c8152e8560ded880a6d1d1f53cb
path = ${golang14-repository:location}
environment-extra =
make-targets= cd src && ./make.bash && cp -alf .. ${:location}
[golang15]
[golang18]
<= golang-common
url = https://storage.googleapis.com/golang/go1.5.4.src.tar.gz
md5sum = a04d570515c46e4935c63605cbd3a04e
url = https://storage.googleapis.com/golang/go1.8.3.src.tar.gz
md5sum = 64e9380e07bba907e26a00cf5fcbe77e
# go1.5 needs go1.4 to bootstrap
# go1.8 needs go1.4 to bootstrap
environment-extra =
GOROOT_BOOTSTRAP=${golang14:location}
[golang16]
[golang19]
<= golang-common
url = https://storage.googleapis.com/golang/go1.6.3.src.tar.gz
md5sum = bf3fce6ccaadd310159c9e874220e2a2
url = https://storage.googleapis.com/golang/go1.9.2.src.tar.gz
md5sum = 44105c865a1a810464df79233a05a568
# go1.6 needs go1.4 to bootstrap
# go1.9 needs go1.4 to bootstrap
environment-extra =
GOROOT_BOOTSTRAP=${golang14:location}
[golang18]
<= golang-common
url = https://storage.googleapis.com/golang/go1.8.src.tar.gz
md5sum = 7743960c968760437b6e39093cfe6f67
# go1.8 needs go1.4 to bootstrap
environment-extra =
GOROOT_BOOTSTRAP=${golang14:location}
# ---- infrastructure to build Go workspaces / projects ----
# gowork is a top-level section representing workspace
#
# users should add `install` field to [gowork] to describe packages they want to
# be installed (+ automatically their dependencies are installed too). e.g.
#
# [gowork]
# install =
# lab.nexedi.com/kirr/neo/go/... \
# github.com/pkg/profile \
# golang.org/x/perf/cmd/benchstat
[gowork]
directory = ${buildout:directory}/go.work
src = ${:directory}/src
bin = ${:directory}/bin
depends = ${gowork.goinstall:recipe}
# go version used for the workspace (possible to override in applications)
golang = ${golang19:location}
# everything is done by dependent parts
recipe = plone.recipe.command
command = :
# env.sh for compiling and running go programs
[gowork]
env.sh = ${gowork-env.sh:output}
[gowork-env.sh]
recipe = slapos.recipe.template
url = ${:_profile_base_location_}/goenv.sh.in
output = ${gowork:directory}/env.sh
depends = ${gowork.mkdir:recipe}
md5sum = a9a265135931b3da53f4392870748264
[gowork.mkdir]
# NOTE do not use slapos.cookbook:mkdirectory here - if anything in software (not instance)
# uses slapos.cookbook:* in recipe - slapos.cookbook will get compiled against system
# libxml/libxslt or fail to bootstrap at all if those are not present.
recipe = plone.recipe.command
command = mkdir -p ${gowork:directory}
update-command = ${:command}
stop-on-error = true
# install go packages
# clients should put package list to install to gowork:install ("..." requests installing everything)
[gowork.goinstall]
recipe = plone.recipe.command
command = bash -c ". ${gowork:env.sh} && go install -v ${gowork:install}"
update-command = ${:command}
stop-on-error = true
[git-repository]
recipe = slapos.recipe.build:gitclone
git-executable = ${git:location}/bin/git
# a go package should:
# 1) <= go-git-package
# 2) provide go.importpath
# 3) provide repository (which is not the same as importpath in general case)
#
# the list of go packages for a go workspace state can be automatically
# generated with the help of go-pkg-snapshot tool.
[go-git-package]
<= git-repository
location = ${gowork:src}/${:go.importpath}
# env.sh for a Go workspace
# Usage: env.sh [/path/to/env.sh]
# ---- 8< ---- (buildout substitution here)
# PATH so that go & friends work out of the box
export PATH=${gowork:golang}/bin:${git:location}/bin:${buildout:bin-directory}:$PATH
X=${gowork:directory}
# ---- 8< ----
export GOPATH=$X:$GOPATH
export PATH=$X/bin:$PATH
export PS1="(`basename $X`) $PS1"
# strip trailing : from $GOPATH
GOPATH=$${GOPATH%:}
#!/bin/bash
# gowork-snapshot - find out installed go packages and produce buildout code to install them pinned
# FIXME currently works only with cwd=gowork/src
echo "# Code generated by gowork-snapshot; DO NOT EDIT."
# list installed go git repositories.
#
# this gives something like:
# github.com/cznic/strutil https://github.com/cznic/strutil 529a34b1c1
# golang.org/x/net https://go.googlesource.com/net 1087133bc4
# ...
gogit_list() {
find . -name .git | sort | \
while read repo; do
importpath=${repo#./}
importpath=${importpath%/.git}
echo -ne "${importpath}\t"
echo -ne "`git_upstream_url $repo`\t"
git -C $repo describe --long --always --abbrev=10
done
}
# git_upstream_url <repo> - show current branch upstream URL
git_upstream_url() {
repo=$1
head="`git -C $repo symbolic-ref --short HEAD`" # current branch - e.g. "t"
remote="`git -C $repo config --get branch.$head.remote`" # upstream name, e.g. "kirr"
url="`git -C $repo config --get remote.$remote.url`" # upstream URL
echo "$url"
}
# buildout_safe <name> - canonicalize name to be allowed to be used in buildout section name
# e.g. aaa/bbb -> aaa_bbb
#
# XXX can't use e.g. "go!lab.nexedi.com/kirr/neo" because buildout disallows
# "!" or "/" in section name references.
buildout_safe() {
# see _simple regex in slapos.buildout
echo -n "$1" | tr --complement -- "-a-zA-Z0-9 ._" _
}
echo
echo "# list of go git repositories to fetch"
echo "[gowork.goinstall]"
echo "depends_gitfetch ="
gogit_list | \
while read pkg _ _; do
echo " \${go_`buildout_safe $pkg`:recipe}"
done
echo
gogit_list | \
while read pkg url rev; do
echo
echo "[go_`buildout_safe $pkg`]"
echo "<= go-git-package"
echo "go.importpath = $pkg"
echo "repository = $url"
echo "revision = $rev"
done
......@@ -12,8 +12,8 @@ parts = haproxy
[haproxy]
recipe = slapos.recipe.cmmi
url = http://www.haproxy.org/download/1.7/src/haproxy-1.7.5.tar.gz
md5sum = ed84c80cb97852d2aa3161ed16c48a1c
url = http://www.haproxy.org/download/1.7/src/haproxy-1.7.9.tar.gz
md5sum = a2bbbdd45ffe18d99cdcf26aa992f92d
configure-command = true
# If the system is running on Linux 2.6, we use "linux26" as the TARGET,
# otherwise use "generic".
......
......@@ -89,7 +89,7 @@ input = inline:
recipe = slapos.recipe.cmmi
path = ${helloweb-repository:location}/go/
go = ${golang18:location}/bin/go
go = ${golang19:location}/bin/go
configure-command = :
make-binary =
make-targets= cd ${:path} &&
......
......@@ -54,7 +54,6 @@ configure-options =
--without-wmf
--with-bzlib=${bzip2:location}
--with-zlib=${zlib:location}
--with-gs-font-dir=${ghostscript-fonts:location}
--with-frozenpaths
patch-options = -p1
patches =
......
# ioping - simple disk I/O latency measuring tool
# https://github.com/koct9i/ioping
[buildout]
extends =
../git/buildout.cfg
parts =
ioping
[ioping-repository]
recipe = slapos.recipe.build:gitclone
git-executable = ${git:location}/bin/git
# NOTE we use a bit patched ioping version which shows not only avg latency but also its distribution
# repository = https://github.com/koct9i/ioping.git
repository = https://lab.nexedi.com/kirr/ioping.git
revision = v1.0-9-g34c97f7636
[ioping]
recipe = slapos.recipe.cmmi
path = ${ioping-repository:location}
configure-command = :
make-binary =
# XXX without vvv PREFIX=${:location} does not work
location= ${buildout:parts-directory}/${:_buildout_section_name_}
make-targets= make install PREFIX=${:location}
......@@ -25,7 +25,7 @@ interpreter = keras-python
scripts = keras-python
[versions]
Keras = 2.0.1
tensorflow = 1.0.1
Keras = 2.1.0
tensorflow = 1.4.0
h5py = 2.7.0rc2
Cython = 0.25.2
......@@ -18,15 +18,15 @@ environment =
[libpng12]
<= libpng-common
url = http://download.sourceforge.net/libpng/libpng-1.2.57.tar.xz
md5sum = 307052e5e8af97b82b17b64fb1b3677a
url = http://download.sourceforge.net/libpng/libpng-1.2.58.tar.xz
md5sum = 1fe68fa3cdab99dbcfd2a6b4de95645f
[libpng15]
<= libpng-common
url = http://download.sourceforge.net/libpng/libpng-1.5.28.tar.xz
md5sum = 847aa2a1b231c07466d7f4167537424a
url = http://download.sourceforge.net/libpng/libpng-1.5.29.tar.xz
md5sum = b9e5452ee9681c313638efedb16c12a6
[libpng]
<= libpng-common
url = http://download.sourceforge.net/libpng/libpng-1.6.28.tar.xz
md5sum = 425354f86c392318d31aedca71019372
url = http://download.sourceforge.net/libpng/libpng-1.6.32.tar.xz
md5sum = e01be057a9369183c959b793a685ad15
# LMbench - Tools for Performance Analysis
# http://www.bitmover.com/lmbench/
[buildout]
extends =
../git/buildout.cfg
../golang/buildout.cfg
parts =
lmbench
[lmbench-repository]
<= git-repository
# NOTE we use a bit patched lmbench version with fixes to lat_tcp for errors not to go unnoticed and other addons
# repository = https://svn.code.sf.net/p/lmbench/code
repository = https://lab.nexedi.com/kirr/lmbench.git
revision = 9b108b6ff3
[lmbench]
recipe = slapos.recipe.cmmi
path = ${lmbench-repository:location}
configure-command = :
make-binary =
# XXX without vvv BASE=${:location} does not work
location= ${buildout:parts-directory}/${:_buildout_section_name_}
make-targets= cd ${lmbench-repository:location}/lmbench3/src
&& make install BASE=${:location}
&& bash -c ". ${gowork:env.sh} && go build -o ${:location}/bin/lat_tcp_go lat_tcp.go"
......@@ -20,14 +20,15 @@ extends =
# The following lines are only for mariarocks.cfg
# to be extended last without touching 'parts'.
../gcc/buildout.cfg
../zstd/buildout.cfg
parts =
mariadb
[mariadb]
recipe = slapos.recipe.cmmi
url = https://downloads.mariadb.org/f/mariadb-10.1.26/source/mariadb-10.1.26.tar.gz/from/http%3A//fr.mirror.babylon.network/mariadb/?serve
md5sum = bb88afb72434c6d567c742896dd50d41
url = https://downloads.mariadb.org/f/mariadb-10.1.28/source/mariadb-10.1.28.tar.gz/from/http%3A//fr.mirror.babylon.network/mariadb/?serve
md5sum = 38acd5b44c56791701d80fddf088ef38
patch-options = -p0
patches =
${:_profile_base_location_}/mariadb_10.1.21_create_system_tables__no_test.patch#3c76aa9564a162f13aced7c0a3f783b3
......@@ -61,12 +62,14 @@ configure-options =
-DCMAKE_C_FLAGS="${:CMAKE_CFLAGS}"
-DCMAKE_CXX_FLAGS="${:CMAKE_CFLAGS}"
-DCMAKE_INSTALL_RPATH=${:CMAKE_LIBRARY_PATH}
CMAKE_CFLAGS = -I${bzip2:location}/include -I${jemalloc:location}/include -I${libaio:location}/include -I${libxml2:location}/include -I${ncurses:location}/include -I${openssl:location}/include -I${pcre:location}/include -I${readline5:location}/include -I${xz-utils:location}/include -I${zlib:location}/include
CMAKE_CFLAGS = -I${bzip2:location}/include -I${jemalloc:location}/include -I${libaio:location}/include -I${libxml2:location}/include -I${ncurses:location}/include -I${openssl:location}/include -I${pcre:location}/include -I${readline5:location}/include -I${xz-utils:location}/include -I${zlib:location}/include ${:extra_cflags}
CMAKE_LIBRARY_PATH = ${bzip2:location}/lib:${jemalloc:location}/lib:${libaio:location}/lib:${libxml2:location}/lib:${ncurses:location}/lib:${openssl:location}/lib:${pcre:location}/lib:${readline5:location}/lib:${xz-utils:location}/lib:${zlib:location}/lib${:extra_library_path}
extra_cflags =
extra_include_path =
extra_library_path =
environment =
CMAKE_PROGRAM_PATH=${cmake:location}/bin
CMAKE_INCLUDE_PATH=${bzip2:location}/include:${libaio:location}/include:${libaio:location}/include:${libxml2:location}/include:${ncurses:location}/include:${openssl:location}/include:${pcre:location}/include:${readline5:location}/include:${xz-utils:location}/include:${zlib:location}/include
CMAKE_INCLUDE_PATH=${bzip2:location}/include:${libaio:location}/include:${libaio:location}/include:${libxml2:location}/include:${ncurses:location}/include:${openssl:location}/include:${pcre:location}/include:${readline5:location}/include:${xz-utils:location}/include:${zlib:location}/include${:extra_include_path}
CMAKE_LIBRARY_PATH=${:CMAKE_LIBRARY_PATH}
LDFLAGS=-L${bzip2:location}/lib -L${jemalloc:location}/lib -L${libaio:location}/lib -L${pcre:location}/lib -L${xz-utils:location}/lib -L${zlib:location}/lib
PATH=${patch:location}/bin:%(PATH)s
......
# Do not extend any file that touch buildout:parts.
[mariadb]
url = https://downloads.mariadb.org/f/mariadb-10.2.8/source/mariadb-10.2.8.tar.gz/from/http%3A//fr.mirror.babylon.network/mariadb/?serve
md5sum = f93cbd5bfde3c0d082994764ff7db580
url = https://downloads.mariadb.org/f/mariadb-10.2.9/source/mariadb-10.2.9.tar.gz/from/http%3A//fr.mirror.babylon.network/mariadb/?serve
md5sum = c59999bd182ddeb3db3d55250aecd8f8
stable-patches =
configure-options +=
-DPLUGIN_DAEMON_EXAMPLE=NO
......@@ -10,4 +10,6 @@ configure-options +=
-DPLUGIN_MROONGA=NO
-DCMAKE_C_COMPILER=${gcc:location}/bin/gcc
-DCMAKE_CXX_COMPILER=${gcc:location}/bin/g++
extra_library_path = :${gcc:location}/lib:${gcc:location}/lib64
extra_cflags = -I${zstd:location}/include
extra_include_path = :${zstd:location}/include
extra_library_path = :${zstd:location}/lib:${gcc:location}/lib:${gcc:location}/lib64
[buildout]
extends =
../pkgconfig/buildout.cfg
../protobuf/buildout.cfg
../ncurses/buildout.cfg
../perl/buildout.cfg
../zlib/buildout.cfg
../openssl/buildout.cfg
[mosh]
recipe = slapos.recipe.cmmi
url = https://mosh.org/mosh-1.3.0.tar.gz
md5sum = d961276995936953bf2d5a794068b076
configure-options =
--with-curses=${ncurses:location}
environment =
PATH=${perl:location}/bin:${pkgconfig:location}/bin:${protobuf-cpp:location}/bin:%(PATH)s
CXXFLAGS =-I${zlib:location}/include
LDFLAGS=-L${zlib:location}/lib -Wl,-rpath=${zlib:location}/lib -L${protobuf-cpp:location}/lib -Wl,-rpath=${protobuf-cpp:location}/lib -L${openssl:location}/lib -Wl,-rpath=${openssl:location}/lib -L${ncurses:location}/lib -Wl,-rpath=${ncurses:location}/lib
PKG_CONFIG_PATH=${protobuf-cpp:location}/lib/pkgconfig:${openssl:location}/lib/pkgconfig:${ncurses:location}/lib/pkgconfig:${zlib:location}/lib/pkgconfig
......@@ -27,4 +27,13 @@ environment =
configure-options =
--prefix=${buildout:parts-directory}/${:_buildout_section_name_}
--exec-prefix=${buildout:parts-directory}/${:_buildout_section_name_}
--with-privsep-path=${buildout:parts-directory}/${:_buildout_section_name_}/var/empty
\ No newline at end of file
--with-privsep-path=${buildout:parts-directory}/${:_buildout_section_name_}/var/empty
[openssh-output]
# Shared binary location to ease migration
recipe = plone.recipe.command
stop-on-error = true
update-command = ${:command}
command = ${coreutils-output:test} -x ${:ssh} -a -x ${:keygen}
ssh = ${openssh:location}/bin/ssh
keygen = ${openssh:location}/bin/ssh-keygen
\ No newline at end of file
......@@ -16,8 +16,8 @@ parts =
[openssl]
recipe = slapos.recipe.cmmi
url = https://www.openssl.org/source/openssl-1.0.2k.tar.gz
md5sum = f965fc0bf01bf882b31314b61391ae65
url = https://www.openssl.org/source/openssl-1.0.2m.tar.gz
md5sum = 10e9e37f492094b9ef296f68f24a7666
location = ${buildout:parts-directory}/${:_buildout_section_name_}
# 'prefix' option to override --openssldir/--prefix (which is useful
# when combined with INSTALL_PREFIX). Used by slapos.package.git/obs
......
......@@ -13,8 +13,8 @@ recipe = slapos.recipe.cmmi
depends =
${perl-DBI:location}
${perl-Devel-CheckLib:location}
url = http://www.cpan.org/modules/by-module/DBD/DBD-mysql-4.042.tar.gz
md5sum = a144bd950b55af68835d44bc4ea6e5aa
url = http://www.cpan.org/modules/by-module/DBD/DBD-mysql-4.043.tar.gz
md5sum = 4a00dd7f1c057931147c65dfc4901c36
patches =
${:_profile_base_location_}/DBD-mysql-4.027.rpathsupport.patch#a932982b7725e6621cfce3a3d7917e03
${:_profile_base_location_}/DBD-mysql-4.042.mariadb.patch#5864d36d19c4a05034b3a4873f7c659a
......
......@@ -7,9 +7,9 @@ parts =
[perl]
recipe = slapos.recipe.cmmi
version = 5.26.0
version = 5.26.1
url = http://www.cpan.org/src/5.0/perl-${:version}.tar.bz2
md5sum = 8d34cb5a4eccd66e7a6a80e62b7b4aec
md5sum = 467cd0c43514b9b5e397c8b385581f53
siteprefix = ${buildout:parts-directory}/site_${:_buildout_section_name_}
patch-options = -p1
patches =
......
......@@ -35,5 +35,5 @@ environment =
[postgresql92]
<= postgresql-common
url = http://ftp.postgresql.org/pub/source/v9.2.22/postgresql-9.2.22.tar.bz2
md5sum = c5d3fb5229baf9e94ee2287980c55321
url = http://ftp.postgresql.org/pub/source/v9.2.23/postgresql-9.2.23.tar.bz2
md5sum = c972e32b7f17dbc652d2462b7690d116
......@@ -3,6 +3,15 @@ parts = protobuf
[protobuf]
recipe = slapos.recipe.cmmi
url = https://github.com/google/protobuf/releases/download/v3.1.0/protobuf-python-3.1.0.tar.gz
md5sum = 7a227a21379a2ea08cc5d7ba1fb1ba5b
url = https://github.com/google/protobuf/releases/download/v3.4.0/protobuf-python-3.4.0.tar.gz
md5sum = 0820cc2e56d71aef8e99794fcbd184cd
location = ${buildout:parts-directory}/${:_buildout_section_name_}
[protobuf-cpp]
recipe = slapos.recipe.cmmi
url = https://github.com/google/protobuf/releases/download/v3.4.0/protobuf-cpp-3.4.0.tar.gz
md5sum = 6d59dad503bea5ad420fd09ddad84481
configure-command =
./autogen.sh
./configure --prefix=${buildout:parts-directory}/${:_buildout_section_name_}
......@@ -28,9 +28,9 @@ python = python2.7
[python2.7]
recipe = slapos.recipe.cmmi
package_version = 2.7.13
package_version = 2.7.14
package_version_suffix =
md5sum = 53b43534153bb2a0363f08bae8b9d990
md5sum = 1f6db41ad91d9eb0a6f0c769b8613c5b
# This is actually the default setting for prefix, but we can't use it in
# other settings in this part if we don't set it explicitly here.
......
[buildout]
extends =
../../stack/slapos.cfg
../gcc/buildout.cfg
../cython/buildout.cfg
../scipy/buildout.cfg
parts =
python-cocoapi-build-install-egg
[python-cocoapi-repository]
recipe = plone.recipe.command
stop-on-error = true
repository = https://github.com/cocodataset/cocoapi.git
tag = master
git-binary = ${git:location}/bin/git
patch-binary = ${patch:location}/bin/patch
location = ${buildout:parts-directory}/${:_buildout_section_name_}
command = export HOME=${:location}; (${:git-binary} clone --quiet -b ${:tag} ${:repository} ${:location}; cd ${:location}; ${:patch-binary} -p1 -d . < ${:_profile_base_location_}/setup.py.patch ) || (rm -fr ${:location}; exit 1)
[python-cocoapi-build-interpreter]
recipe = zc.recipe.egg
initialization =
import scipy.spatial.ckdtree # load our own libstdc++ explicitly at the very beginning
eggs =
setuptools
${cython:egg}
${scipy:egg}
${numpy:egg}
interpreter = python-cocoapi-build-interpreter
scripts = python-cocoapi-build-interpreter
location = ${buildout:parts-directory}/${:_buildout_section_name_}
[python-cocoapi-build]
recipe = slapos.recipe.build
location = ${buildout:parts-directory}/${:_buildout_section_name_}
workdir = ${python-cocoapi-repository:location}/PythonAPI
python-bin = ${buildout:bin-directory}/${python-cocoapi-build-interpreter:interpreter}
gcc-location = ${gcc:location}
script =
os.makedirs(location)
workdir = self.options['workdir']
python_bin = self.options['python-bin']
gcc_location = self.options['gcc-location']
env = {'PATH':':'.join([gcc_location+'/bin',
os.environ['PATH']]),
'CC':gcc_location+'/bin/gcc',
'CXX':gcc_location+'/bin/g++',
}
call([python_bin, 'setup.py', 'build_ext'], cwd=workdir, env=env)
call([python_bin, 'setup.py', 'bdist_egg'], cwd=workdir, env=env)
[python-cocoapi-build-install-egg]
recipe = slapos.recipe.build
unzip-binary = ${unzip:location}/bin/unzip
python-cocoapi-repository-path = ${python-cocoapi-repository:location}
location = ${buildout:parts-directory}/${:_buildout_section_name_}
need-python-cocoapi-build = ${python-cocoapi-build:location}
egg = pycocotools
script =
os.makedirs(location)
workdir = self.options['python-cocoapi-repository-path']+'/PythonAPI'
egg_name = 'pycocotools-2.0-py2.7-linux-x86_64.egg'
dist_dir = os.path.join(workdir, 'dist')
dest_dir = os.path.join(self.buildout['buildout']['eggs-directory'], egg_name)
call([self.options['unzip-binary'], '-o', os.path.join(dist_dir, egg_name), '-d', dest_dir])
diff --git a/PythonAPI/setup.py b/PythonAPI/setup.py
index eb3d508..d619b3c 100644
--- a/PythonAPI/setup.py
+++ b/PythonAPI/setup.py
@@ -1,4 +1,5 @@
-from distutils.core import setup
+#from distutils.core import setup
+from setuptools import setup
from Cython.Build import cythonize
from distutils.extension import Extension
import numpy as np
@@ -21,4 +22,4 @@ setup(name='pycocotools',
version='2.0',
ext_modules=
cythonize(ext_modules)
- )
\ No newline at end of file
+ )
......@@ -2,7 +2,6 @@
extends =
../mariadb/buildout.cfg
../openssl/buildout.cfg
../patch/buildout.cfg
../zlib/buildout.cfg
parts =
......@@ -14,11 +13,6 @@ PATH =${mariadb:location}/bin:%(PATH)s
[python-mysqlclient]
recipe = zc.recipe.egg:custom
egg = mysqlclient
patches =
https://github.com/PyMySQL/mysqlclient-python/commit/1693848c9f6ca863868d94d63499830f7f4f3a1f.diff#a493a91f9263243eb331fcab9901b8b0
${:_profile_base_location_}/mysqlclient-1.3.10-mariadb-10.2.8.patch#807b694fcd7ade4da4bcca321b1a29d2
patch-options = -p1
patch-binary = ${patch:location}/bin/patch
environment = python-mysqlclient-env
library-dirs =
${zlib:location}/lib/
......
diff -ur mysqlclient-1.3.10.orig/_mysql.c mysqlclient-1.3.10/_mysql.c
--- mysqlclient-1.3.10.orig/_mysql.c 2017-01-04 13:47:08.000000000 +0100
+++ mysqlclient-1.3.10/_mysql.c 2017-08-21 10:53:21.014929937 +0200
@@ -1060,7 +1060,7 @@
if (self && PyModule_Check((PyObject*)self))
self = NULL;
if (self && self->open) {
-#if MYSQL_VERSION_ID >= 50707 && !defined(MARIADB_BASE_VERSION)
+#if MYSQL_VERSION_ID >= 50707 && (!defined(MARIADB_BASE_VERSION) && !defined(MARIADB_VERSION_ID))
len = mysql_real_escape_string_quote(&(self->connection), out, in, size, '\'');
#else
len = mysql_real_escape_string(&(self->connection), out, in, size);
@@ -1118,7 +1118,7 @@
out = PyBytes_AS_STRING(str);
check_server_init(NULL);
if (self && self->open) {
-#if MYSQL_VERSION_ID >= 50707 && !defined(MARIADB_BASE_VERSION)
+#if MYSQL_VERSION_ID >= 50707 && (!defined(MARIADB_BASE_VERSION) && !defined(MARIADB_VERSION_ID))
len = mysql_real_escape_string_quote(&(self->connection), out+1, in, size, '\'');
#else
len = mysql_real_escape_string(&(self->connection), out+1, in, size);
diff -ur mysqlclient-1.3.10.orig/setup_posix.py mysqlclient-1.3.10/setup_posix.py
--- mysqlclient-1.3.10.orig/setup_posix.py 2016-07-26 10:12:24.000000000 +0200
+++ mysqlclient-1.3.10/setup_posix.py 2017-08-21 10:54:59.643071374 +0200
@@ -63,7 +63,7 @@
if extra_compile_args[i] == '-arch':
extra_link_args += ['-arch', extra_compile_args[i + 1]]
- include_dirs = [dequote(i[2:])
+ include_dirs = [dequote(os.path.isdir(i[2:]+'/server') and i[2:]+'/server' or i[2:])
for i in mysql_config('include') if i.startswith('-I')]
if static:
[buildout]
extends =
../../stack/slapos.cfg
../gcc/buildout.cfg
../openblas/buildout.cfg
../cmake/buildout.cfg
../python-cffi/buildout.cfg
../python-PyYAML/buildout.cfg
../python-cocoapi/buildout.cfg
../pillow/buildout.cfg
../scipy/buildout.cfg
../matplotlib/buildout.cfg
../unzip/buildout.cfg
parts =
pytorch-egg
[pytorch-repository]
recipe = plone.recipe.command
stop-on-error = true
repository = https://github.com/pytorch/pytorch
tag = master
commit = 4af66c43045a317b477918c503d105f565b4a66b
git-binary = ${git:location}/bin/git
patch-binary = ${patch:location}/bin/patch
location = ${buildout:parts-directory}/${:_buildout_section_name_}
command = export HOME=${:location}; (${:git-binary} clone --recursive --quiet -b ${:tag} ${:repository} ${:location}; cd ${:location}; ${:git-binary} checkout ${:commit}; ${:patch-binary} -p1 -d . < ${:_profile_base_location_}/pytorch.4af66c4.patch ) || (rm -fr ${:location}; exit 1)
[pytorch-build-interpreter]
recipe = zc.recipe.egg
initialization =
import scipy.spatial.ckdtree # load our own libstdc++ explicitly at the very beginning
import sys
sys.path.append('.')
sys.path.append('${pytorch-repository:location}/torch/lib/ATen')
eggs =
setuptools
${scipy:egg}
${numpy:egg}
${python-cffi:egg}
${python-PyYAML:egg}
interpreter = pytorch-build-interpreter
scripts = pytorch-build-interpreter
location = ${buildout:parts-directory}/${:_buildout_section_name_}
[pytorch-build]
recipe = slapos.recipe.build
location = ${buildout:parts-directory}/${:_buildout_section_name_}
workdir = ${pytorch-repository:location}
python-bin = ${buildout:bin-directory}/${pytorch-build-interpreter:interpreter}
no-cuda = 0
cmake-bin = ${cmake:location}/bin
git-bin = ${git:location}/bin
binutils-location = ${binutils:location}
gcc-location = ${gcc:location}
openblas-location = ${openblas:location}
script =
os.makedirs(location)
workdir = self.options['workdir']
python_bin = self.options['python-bin']
binutils_location = self.options['binutils-location']
gcc_location = self.options['gcc-location']
openblas_location = self.options['openblas-location']
env = {'PYTHONPATH':workdir,
'PATH':':'.join([self.options['cmake-bin'],
self.options['git-bin'],
binutils_location+'/bin',
gcc_location+'/bin',
os.environ['PATH']]),
'CMAKE_INCLUDE_PATH':':'.join([gcc_location+'/include',
binutils_location+'/include',
openblas_location+'/include',
]),
'CMAKE_LIBRARY_PATH':':'.join([gcc_location+'/lib',
gcc_location+'/lib64',
binutils_location+'/lib',
openblas_location+'/lib',
]),
'CC':gcc_location+'/bin/gcc',
'CXX':gcc_location+'/bin/g++',
'PYTORCH_PYTHON':python_bin,
'SLAPOS_COMPILE_ARGS':' '.join(['-Wl,-rpath,'+gcc_location+'/lib',
'-Wl,-rpath,'+gcc_location+'/lib64',
'-Wl,-rpath,'+binutils_location+'/lib',
'-Wl,-rpath,'+openblas_location+'/lib',
])
}
if self.options.get('no-cuda') == '1':
env['NO_CUDA'] = '1'
import os.path
call([python_bin, 'setup.py', 'build'], cwd=workdir, env=env)
call([python_bin, 'setup.py', 'bdist_egg'], cwd=workdir, env=env)
[pytorch-build-install-egg]
recipe = slapos.recipe.build
unzip-binary = ${unzip:location}/bin/unzip
pytorch-repository-path = ${pytorch-repository:location}
location = ${buildout:parts-directory}/${:_buildout_section_name_}
need-pytorch-build = ${pytorch-build:location}
egg = torch
script =
os.makedirs(location)
workdir = self.options['pytorch-repository-path']
egg_name = 'torch-0.2.0+4af66c4-py2.7-linux-x86_64.egg'
dist_dir = os.path.join(workdir, 'dist')
dest_dir = os.path.join(self.buildout['buildout']['eggs-directory'], egg_name)
call([self.options['unzip-binary'], '-o', os.path.join(dist_dir, egg_name), '-d', dest_dir])
[pytorch-egg]
recipe = zc.recipe.egg
initialization =
import scipy.spatial.ckdtree # load our own libstdc++ explicitly at the very beginning
eggs =
${scipy:egg}
${numpy:egg}
${python-PyYAML:egg}
${pytorch-build-install-egg:egg}
${pillow-python:egg}
${python-cocoapi-build-install-egg:egg}
${matplotlib:egg}
six
torchvision
interpreter = pytorch-python
scripts = pytorch-python
[versions]
torchvision = 0.1.6
diff --git a/setup.py b/setup.py
index 1d9a765..a50e9cb 100644
--- a/setup.py
+++ b/setup.py
@@ -90,7 +90,7 @@ def build_libs(libs):
assert lib in dep_libs, 'invalid lib: {}'.format(lib)
build_libs_cmd = ['bash', 'torch/lib/build_libs.sh']
my_env = os.environ.copy()
- my_env["PYTORCH_PYTHON"] = sys.executable
+ #my_env["PYTORCH_PYTHON"] = sys.executable
if WITH_SYSTEM_NCCL:
my_env["NCCL_ROOT_DIR"] = NCCL_ROOT_DIR
if WITH_CUDA:
diff --git a/torch/lib/build_libs.sh b/torch/lib/build_libs.sh
index af7020e..a91bd05 100755
--- a/torch/lib/build_libs.sh
+++ b/torch/lib/build_libs.sh
@@ -24,7 +24,7 @@ C_FLAGS=" -DTH_INDEX_BASE=0 -I$INSTALL_DIR/include \
-I$INSTALL_DIR/include/THS -I$INSTALL_DIR/include/THCS \
-I$INSTALL_DIR/include/THPP -I$INSTALL_DIR/include/THNN \
-I$INSTALL_DIR/include/THCUNN"
-LDFLAGS="-L$INSTALL_DIR/lib "
+LDFLAGS="-L$INSTALL_DIR/lib $SLAPOS_COMPILE_ARGS"
LD_POSTFIX=".so.1"
LD_POSTFIX_UNVERSIONED=".so"
if [[ $(uname) == 'Darwin' ]]; then
......@@ -22,6 +22,13 @@ rpath =
# and pin the egg in the [versions] section of the stack or SR.
find-links = http://pkgs.fedoraproject.org/repo/pkgs/rdiff-backup/rdiff-backup-1.0.5.tar.gz/fa2a165fa07a94be52c52e3545bc7758/rdiff-backup-1.0.5.tar.gz
[rdiff-backup-build-1.3.4]
<= rdiff-backup-build
# use our own version
find-links = http://www.nexedi.org/static/packages/source/rdiff-backup-1.3.4nxd2.tar.gz
patches =
${:_profile_base_location_}/rdiff-backup-1.3.4-librsync-1.0.0.patch#31fafc8bc4a00f002f52008a9f3b671f
[rdiff-backup]
# Scripts only generation part for rdiff-backup
recipe = zc.recipe.egg
......
Patch by Roman Tereshonkov and Kari Hautio for rdiff-backup <= 1.2.8 to avoid a build failure with
librsync >= 1.0.0 (which is a security bugfix release). The discussion and solution finding can be
found at https://bugs.launchpad.net/duplicity/+bug/1416344 (for duplicity).
--- rdiff-backup-1.3.4/rdiff_backup/_librsyncmodule.c 2009-03-16 15:36:21.000000000 +0100
+++ rdiff-backup-1.3.4/rdiff_backup/_librsyncmodule.c.librsync-1.0.0 2015-03-02 00:54:24.000000000 +0100
@@ -59,8 +59,13 @@
if (sm == NULL) return NULL;
sm->x_attr = NULL;
+#ifdef RS_DEFAULT_STRONG_LEN
sm->sig_job = rs_sig_begin((size_t)blocklen,
(size_t)RS_DEFAULT_STRONG_LEN);
+#else
+ sm->sig_job = rs_sig_begin((size_t)blocklen,
+ (size_t)8, RS_MD4_SIG_MAGIC);
+#endif
return (PyObject*)sm;
}
......@@ -4,6 +4,12 @@
# /opt/slapos folder, adapt this script as you please.
#
# Be carefull to not run this script were the script is already installed.
#
# Before run this script, ensure dependencies are installed, on debian, you can
# use the command bellow:
#
# apt-get install python gcc g++ make uml-utilities bridge-utils patch wget
#
# Use sudo or superuser and create slapos directory (you can pick a different directory)
mkdir -p /opt/slapos/log/
......
diff --git a/tensorboard/pip_package/build_pip_package.sh b/tensorboard/pip_package/build_pip_package.sh
index b386d59..f03b056 100755
--- a/tensorboard/pip_package/build_pip_package.sh
+++ b/tensorboard/pip_package/build_pip_package.sh
@@ -26,6 +26,7 @@ function main() {
DEST=$1
TMPDIR=$(mktemp -d -t tmp.XXXXXXXXXX)
RUNFILES="bazel-bin/tensorboard/pip_package/build_pip_package.runfiles/org_tensorflow_tensorboard"
+ WORKDIR=$(pwd)
echo $(date) : "=== Using tmpdir: ${TMPDIR}"
@@ -45,8 +46,8 @@ function main() {
rm -f MANIFEST
echo $(date) : "=== Building wheel"
echo $(pwd)
- python setup.py bdist_wheel >/dev/null
- python3 setup.py bdist_wheel >/dev/null
+ PYTHONPATH=${WORKDIR}/${RUNFILES} $PYTHON_BIN_PATH setup.py bdist_egg >/dev/null
+ #python3 setup.py bdist_wheel >/dev/null
mkdir -p ${DEST}
cp dist/* ${DEST}
popd
[buildout]
extends =
../zip/buildout.cfg
../bazel/buildout.cfg
parts =
slapos-cookbook-develop
slapos-cookbook
tensorboard-build-install-egg
[tensorboard-repository]
recipe = plone.recipe.command
stop-on-error = true
repository = https://github.com/tensorflow/tensorboard
tag = 0.4
git-binary = ${git:location}/bin/git
patch-binary = ${patch:location}/bin/patch
location = ${buildout:parts-directory}/${:_buildout_section_name_}
command = export HOME=${:location}; (${:git-binary} clone --quiet -b ${:tag} ${:repository} ${:location}; cd ${buildout:parts-directory} ; ${:patch-binary} -p1 -d ${:_buildout_section_name_} < ${:_profile_base_location_}/0.4.patch ) || (rm -fr ${:location}; exit 1)
[tensorboard-build]
recipe = slapos.recipe.build
location = ${buildout:parts-directory}/${:_buildout_section_name_}
workdir = ${tensorboard-repository:location}
gcc-bin = ${gcc:location}/bin
gcc-lib = ${gcc:location}/lib
gcc-lib64 = ${gcc:location}/lib64
numpy-python-command = ${buildout:bin-directory}/${numpy-egg:interpreter}
python27-lib = ${python2.7:location}/lib
java_home_bin = ${bazel:java_home}/bin
bazel-bin = ${bazel:location}/bin
script =
os.makedirs(location)
workdir = self.options['workdir']
env = {'PATH':':'.join([self.options['gcc-bin'],
self.options['java_home_bin'],
self.options['bazel-bin'],
os.environ['PATH']]),
'COMPILER_PATH':':'.join([self.options['gcc-bin'],
os.environ.get('COMPILER_PATH') or '']),
'LIBRARY_PATH':':'.join([self.options['gcc-lib'],
self.options['gcc-lib64'],
os.environ.get('LIBRARY_PATH') or '']),
'PYTHON_BIN_PATH':self.options['numpy-python-command'],
'PYTHON_LIB_PATH':self.options['python27-lib'],
}
import os.path
env['LD_LIBRARY_PATH'] = env['LIBRARY_PATH']
bazel_command = ['bazel', 'build', '--spawn_strategy=standalone', '--verbose_failures', '--sandbox_debug', '//tensorboard/pip_package:build_pip_package']
call(bazel_command, cwd=workdir, env=env)
[tensorboard-build-install-egg]
recipe = slapos.recipe.build
unzip-binary = ${unzip:location}/bin/unzip
tensorboard-repository-path = ${tensorboard-repository:location}
location = ${buildout:parts-directory}/${:_buildout_section_name_}
need-tensorboard-build = ${tensorboard-build:location}
egg = tensorflow-tensorboard
bazel-bin = ${bazel:location}/bin
java_home_bin = ${bazel:java_home}/bin
numpy-python-command = ${buildout:bin-directory}/${numpy-egg:interpreter}
script =
os.makedirs(location)
workdir = self.options['tensorboard-repository-path']
egg_name = 'tensorflow_tensorboard-0.4.0rc3-py2.7.egg'
dist_dir = os.path.join(workdir, 'dist')
dest_dir = os.path.join(self.buildout['buildout']['eggs-directory'], egg_name)
env = {'PATH':':'.join([self.options['bazel-bin'],
self.options['java_home_bin'],
os.environ['PATH']]),
'PYTHON_BIN_PATH':self.options['numpy-python-command'],
}
call(['tensorboard/pip_package/build_pip_package.sh', dist_dir], cwd=workdir, env=env)
call([self.options['unzip-binary'], '-o', os.path.join(dist_dir, egg_name), '-d', dest_dir])
[versions]
......@@ -5,6 +5,7 @@ extends =
../zip/buildout.cfg
../bazel/buildout.cfg
../protobuf-python/buildout.cfg
../tensorboard/buildout.cfg
parts =
slapos-cookbook-develop
slapos-cookbook
......@@ -26,11 +27,11 @@ location = ${buildout:parts-directory}/${:_buildout_section_name_}
recipe = plone.recipe.command
stop-on-error = true
repository = https://github.com/tensorflow/tensorflow
tag = v1.0.1
tag = v1.4.0
git-binary = ${git:location}/bin/git
patch-binary = ${patch:location}/bin/patch
location = ${buildout:parts-directory}/${:_buildout_section_name_}
command = export HOME=${:location}; (${:git-binary} clone --recurse-submodules --quiet -b ${:tag} ${:repository} ${:location}; cd ${buildout:parts-directory} ; ${:patch-binary} -p1 -d ${:_buildout_section_name_} < ${:_profile_base_location_}/tensorflow-v1.0.1.patch ) || (rm -fr ${:location}; exit 1)
command = export HOME=${:location}; (${:git-binary} clone --recurse-submodules --quiet -b ${:tag} ${:repository} ${:location}; cd ${buildout:parts-directory} ; ${:patch-binary} -p1 -d ${:_buildout_section_name_} < ${:_profile_base_location_}/tensorflow-r1.4.patch ) || (rm -fr ${:location}; exit 1)
[cuda]
tf_need_cuda = 1
......@@ -72,6 +73,10 @@ script =
'TF_NEED_GCP':'0',
'TF_NEED_HDFS':'0',
'TF_NEED_OPENCL':'0',
'TF_NEED_MKL':'0',
'TF_NEED_VERBS':'0',
'TF_CUDA_CLANG':'0',
'TF_NEED_MPI':'0',
'TF_NEED_CUDA':self.buildout['cuda']['tf_need_cuda'],
##### FOR CUDA #####
'GCC_HOST_COMPILER_PATH':os.path.join(self.options['gcc-bin'], 'gcc'),
......@@ -98,18 +103,17 @@ recipe = slapos.recipe.build
unzip-binary = ${unzip:location}/bin/unzip
tensorflow-repository-path = ${tensorflow-repository:location}
location = ${buildout:parts-directory}/${:_buildout_section_name_}
need-tensorboard-build = ${tensorboard-build:location}
need-tensorflow-build = ${tensorflow-build:location}
need-protobuf-python = ${protobuf-python:egg}
egg = tensorflow
script =
os.makedirs(location)
workdir = self.options['tensorflow-repository-path']
egg_name = 'tensorflow-1.0.1-py2.7-linux-x86_64.egg'
egg_name = 'tensorflow-1.4.0-py2.7-linux-x86_64.egg'
dist_dir = os.path.join(workdir, 'dist')
dest_dir = os.path.join(self.buildout['buildout']['eggs-directory'], egg_name)
call(['bazel-bin/tensorflow/tools/pip_package/build_pip_package', dist_dir], cwd=workdir)
call([self.options['unzip-binary'], '-o', os.path.join(dist_dir, egg_name), '-d', dest_dir])
[versions]
protobuf = 3.1.0.post1
wheel = 0.30.0a0
diff --git a/tensorflow/tools/pip_package/build_pip_package.sh b/tensorflow/tools/pip_package/build_pip_package.sh
index e2bee21..1df90c1 100755
index cbf06a9..226a423 100755
--- a/tensorflow/tools/pip_package/build_pip_package.sh
+++ b/tensorflow/tools/pip_package/build_pip_package.sh
@@ -136,7 +136,7 @@ function main() {
@@ -162,7 +162,7 @@ function main() {
pushd ${TMPDIR}
rm -f MANIFEST
echo $(date) : "=== Building wheel"
- "${PYTHON_BIN_PATH:-python}" setup.py bdist_wheel ${GPU_FLAG} >/dev/null
+ "${PYTHON_BIN_PATH:-python}" setup.py bdist_egg ${GPU_FLAG} >/dev/null
- "${PYTHON_BIN_PATH:-python}" setup.py bdist_wheel ${PKG_NAME_FLAG} >/dev/null
+ "${PYTHON_BIN_PATH:-python}" setup.py bdist_egg ${PKG_NAME_FLAG} >/dev/null
mkdir -p ${DEST}
cp dist/* ${DEST}
popd
[buildout]
extends =
../libevent/buildout.cfg
../ncurses/buildout.cfg
parts +=
tmux
[tmux]
recipe = slapos.recipe.cmmi
url = https://github.com/tmux/tmux/releases/download/2.0/tmux-2.0.tar.gz
md5sum = 9fb6b443392c3978da5d599f1e814eaa
environment =
CFLAGS=-I${ncurses:location}/include -I${libevent:location}/include/
LDFLAGS=-L${ncurses:location}/lib/ -L${libevent:location}/lib/ -Wl,-rpath=${ncurses:location}/lib/ -Wl,-rpath=${libevent:location}/lib/
configure-options =
--prefix=${buildout:parts-directory}/${:_buildout_section_name_}
......@@ -4,12 +4,13 @@ parts =
extends =
../ncurses/buildout.cfg
../gettext/buildout.cfg
[vim]
recipe = slapos.recipe.cmmi
version = 7.4
url = http://ftp.vim.org/pub/vim/unix/vim-7.4.tar.bz2
md5sum = 607e135c559be642f210094ad023dc65
url = ftp://ftp.vim.org/pub/vim/unix/vim-8.0.586.tar.bz2
md5sum = b35e794140c196ff59b492b56c1e73db
environment=
CFLAGS=-I${ncurses:location}/include
LDFLAGS=-L${ncurses:location}/lib/ -Wl,-rpath=${ncurses:location}/lib/
\ No newline at end of file
CPPFLAGS=-I${ncurses:location}/include
LDFLAGS=-L${ncurses:location}/lib -Wl,-rpath=${ncurses:location}/lib
LD_LIBRARY_PATH=${ncurses:location}/lib:${gettext:location}/lib
......@@ -4,5 +4,5 @@ parts =
[zlib]
recipe = slapos.recipe.cmmi
url = http://downloads.sourceforge.net/project/libpng/zlib/1.2.8/zlib-1.2.8.tar.gz
md5sum = 44d667c142d7cda120332623eab69f40
url = http://downloads.sourceforge.net/project/libpng/zlib/1.2.11/zlib-1.2.11.tar.gz
md5sum = 1c9f62f0778697a09d36121ead88e08e
[buildout]
parts =
zstd
[zstd]
recipe = slapos.recipe.cmmi
location = ${buildout:parts-directory}/${:_buildout_section_name_}
url = https://github.com/facebook/zstd/archive/v1.3.1.tar.gz
md5sum = e849ceef2f090240f690c13fba6ca70b
configure-command = :
make-options = PREFIX=${:location}
......@@ -55,11 +55,12 @@ setup(name=name,
packages=find_packages(),
include_package_data=True,
install_requires=[
'enum34', # for inotify-simple
'jsonschema',
'hexagonit.recipe.download',
'netaddr', # to manipulate on IP addresses
'setuptools', # namespaces
'inotifyx', # to watch filesystem changes (used in lockfile)
'inotify_simple',
'lock_file', #another lockfile implementation for multiprocess
'slapos.core', # uses internally
'zc.buildout', # plays with buildout
......@@ -133,7 +134,6 @@ setup(name=name,
'lamp.static = slapos.recipe.lamp:Static',
'libcloud = slapos.recipe.libcloud:Recipe',
'libcloudrequest = slapos.recipe.libcloudrequest:Recipe',
'lockfile = slapos.recipe.lockfile:Recipe',
'logrotate = slapos.recipe.logrotate:Recipe',
'logrotate.d = slapos.recipe.logrotate:Part',
'memcached = slapos.recipe.memcached:Recipe',
......
......@@ -80,7 +80,7 @@ class Recipe(GenericBaseRecipe):
mysql_script_list = []
# user defined functions
udf_registration = ""
udf_registration = "DROP FUNCTION IF EXISTS last_insert_grn_id;\nDROP FUNCTION IF EXISTS mroonga_snippet;\nDROP FUNCTION IF EXISTS mroonga_command;\n"
mroonga = self.options.get('mroonga', 'ha_mroonga.so')
if mroonga:
udf_registration += "CREATE FUNCTION last_insert_grn_id RETURNS " \
......
......@@ -3,47 +3,31 @@ import os
import signal
import subprocess
import time
import inotifyx
from collections import defaultdict
from inotify_simple import INotify, flags
def _wait_files_creation(file_list):
# Etablish a list of directory and subfiles
directories = dict()
for dirname, filename in [os.path.split(f) for f in file_list]:
directories.setdefault(dirname, dict())
directories[dirname][filename] = False
# Establish a list of directory and subfiles.
# and test existence before watching, so that we don't miss an event.
directories = defaultdict(dict)
for f in file_list:
dirname, filename = os.path.split(f)
directories[dirname][filename] = os.path.lexists(f)
def all_files_exists():
return all([all(files.values()) for files in directories.values()])
return all(all(files.itervalues()) for files in directories.itervalues())
fd = inotifyx.init()
try:
# Watch every directories where the file are
watchdescriptors = dict()
for dirname in directories.keys():
wd = inotifyx.add_watch(fd,
dirname,
inotifyx.IN_CREATE | inotifyx.IN_DELETE | inotifyx.IN_MOVE)
watchdescriptors[wd] = dirname
# Set to True the file wich exists
for dirname, filename in [os.path.split(f) for f in file_list]:
directories[dirname][filename] = os.path.exists(os.path.join(dirname,
filename))
# Let's wait for every file creation
while not all_files_exists():
events_list = inotifyx.get_events(fd)
for event in events_list:
dirname = watchdescriptors[event.wd]
if event.name in directories[dirname]:
# One of watched file was created or deleted
if event.mask & inotifyx.IN_DELETE:
directories[dirname][event.name] = False
else:
directories[dirname][event.name] = True
with INotify() as inotify:
watchdescriptors = {inotify.add_watch(dirname,
flags.CREATE | flags.DELETE | flags.MOVED_TO | flags.MOVED_FROM
): dirname
for dirname in directories}
finally:
os.close(fd)
while not all_files_exists():
for event in inotify.read():
directory = directories[watchdescriptors[event.wd]]
if event.name in directory:
directory[event.name] = event.mask & (flags.CREATE | flags.MOVED_TO)
def execute(args):
"""Portable execution with process replacement"""
......@@ -83,8 +67,8 @@ def generic_exec(args):
os.execve(exec_list[0], exec_list + sys.argv[1:], exec_env)
def sig_handler(signal, frame):
print 'Received signal %r, killing children and exiting' % signal
def sig_handler(sig, frame):
print 'Received signal %r, killing children and exiting' % sig
if child_pg is not None:
os.killpg(child_pg, signal.SIGHUP)
os.killpg(child_pg, signal.SIGTERM)
......@@ -97,6 +81,7 @@ signal.signal(signal.SIGTERM, sig_handler)
def execute_with_signal_translation(args):
"""Run process as children and translate from SIGTERM to another signal"""
global child_pg
child = subprocess.Popen(args, close_fds=True, preexec_fn=os.setsid)
child_pg = child.pid
try:
......
......@@ -25,8 +25,7 @@
#
##############################################################################
import os
import inotifyx
from inotify_simple import INotify, flags
def subfiles(directory):
"""Return the list of subfiles of a directory, and wait for the newly created
......@@ -34,18 +33,12 @@ def subfiles(directory):
CAUTION : *DONT TRY TO CONVERT THE RESULT OF THIS FUNCTION INTO A LIST !
ALWAYS ITERATE OVER IT !!!*"""
watchfd = inotifyx.init()
inotifyx.add_watch(watchfd, directory, inotifyx.IN_CREATE)
try:
subfiles = set(os.listdir(directory))
subfiles |= set([file_.name for file_ in inotifyx.get_events(watchfd, 0)])
with INotify() as inotify:
inotify.add_watch(directory, flags.CLOSE_WRITE | flags.MOVED_TO)
names = os.listdir(directory)
while True:
for file_ in subfiles:
yield os.path.join(directory, file_)
subfiles = [file_.name for file_ in inotifyx.get_events(watchfd)]
finally:
os.close(watchfd)
for name in names:
yield os.path.join(directory, name)
names = (event.name for event in inotify.read())
##############################################################################
#
# Copyright (c) 2010 Vifib SARL and Contributors. All Rights Reserved.
#
# WARNING: This program as such is intended to be used by professional
# programmers who take the whole responsibility of assessing all potential
# consequences resulting from its eventual inadequacies and bugs
# End users who are looking for a ready-to-use solution with commercial
# guarantees and support are strongly adviced to contract a Free Software
# Service Company
#
# This program is Free Software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 3
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
#
##############################################################################
import os
import sys
import subprocess
import inotifyx
from slapos.recipe.librecipe import GenericBaseRecipe
class LockFile(object):
class LockException(Exception):
pass
def __init__(self, filename, wait=True, exit=False):
self.filename = filename
if wait:
self.callback = lambda: self.waitDeletion()
elif not exit:
self.callback = lambda: self.raiseException()
else:
self.callback = lambda: sys.exit(1)
def raiseException(self):
raise LockFile.LockException("Not able to lock the file")
def waitDeletion(self):
inotify_fd = inotifyx.init()
try:
inotifyx.add_watch(inotify_fd, self.filename, inotifyx.IN_DELETE)
inotifyx.get_events(inotify_fd)
except IOError: # add_watch failed
pass
finally:
os.close(inotify_fd)
self.__enter__()
def __enter__(self):
try:
# Atomic file acquisition
self._fd = os.open(self.filename, os.O_CREAT | os.O_EXCL)
except OSError:
self.callback()
def __exit__(self, exc_type, exc_value, traceback):
os.close(self._fd)
os.unlink(self.filename)
def locked_run(args):
with LockFile(args['filename'], wait=args['wait'], exit=True):
subprocess.check_call([args['binary']])
class Recipe(GenericBaseRecipe):
def install(self):
wrapper = self.createPythonScript(self.options['wrapper'],
__name__ + '.locked_run',
dict(
filename=self.options['lock-file'],
wait=self.optionIsTrue('wait', False),
binary=self.options['binary'],
)
)
return [wrapper]
......@@ -92,6 +92,8 @@ class Storage(NeoBaseRecipe):
engine = self.options.get('engine')
if engine: # old versions of NEO don't support -e
r += '-e', engine
if self.options.get('dedup'):
r.append('--dedup')
if self.options.get('disable-drop-partitions'):
r.append('--disable-drop-partitions')
return r
......
import os, shutil, tempfile, threading, unittest
from slapos.recipe.librecipe import execute, inotify
class TestInotify(unittest.TestCase):
def setUp(self):
self.tmp = tempfile.mkdtemp()
def tearDown(self):
shutil.rmtree(self.tmp)
def test_subfiles(self):
p = lambda x: os.path.join(self.tmp, x)
def create(name, text):
a = open(p(name), 'w')
a.write(text)
a.flush()
return a
def check(name, text):
path = next(notified)
self.assertEqual(path, p(name))
with open(path) as f:
self.assertEqual(f.read(), text)
a = create('first', 'blah')
a.write('...')
notified = inotify.subfiles(self.tmp)
check('first', 'blah')
os.link(p(a.name), p('a hard link')) # ignored
b = create('other', 'hello')
b.close()
check('other', 'hello')
c = create('last', '!!!')
a.close()
check('first', 'blah...')
os.rename(p(a.name), p(b.name))
check('other', 'blah...')
c.close()
check('last', '!!!')
def test_wait_files_creation(self):
file_list = (
'foo',
'bar',
'hello/world',
'hello/world!',
'a/b/c',
)
create = lambda x: open(x, 'w').close()
p = lambda x: os.path.join(self.tmp, x)
P = lambda x: p(file_list[x])
create(P(1))
os.mkdir(p('hello'))
os.makedirs(p('a/b'))
t = threading.Thread(target=execute._wait_files_creation,
args=(map(p, file_list),))
t.daemon = True
t.start()
def check():
t.join(.2)
self.assertTrue(t.is_alive())
check()
for x in P(3), p('a/b/d'), P(0):
create(x)
check()
os.rename(P(3), P(2))
os.rename(p('a/b/d'), P(4))
check()
os.remove(P(1))
for x in P(3), P(1):
create(x)
t.join(10)
self.assertFalse(t.is_alive())
......@@ -3,6 +3,7 @@ import json
import mock
import os
import unittest
import tempfile
from collections import defaultdict
from slapos.recipe import slapconfiguration
from slapos import format as slapformat
......@@ -12,8 +13,7 @@ class SlapConfigurationTest(unittest.TestCase):
def setUp(self):
"""Prepare files on filesystem."""
self.instance_root = "/tmp/instance_test_resourcefile"
os.mkdir(self.instance_root)
self.instance_root = tempfile.mkdtemp()
# create testing resource file
self.resource_file = os.path.join(self.instance_root, slapformat.Partition.resource_file)
self.resource = {
......
......@@ -50,8 +50,8 @@ gitdb = 0.6.4
pycrypto = 2.6.1
pycurl = 7.43.0
slapos.recipe.download = 1.0
slapos.recipe.template = 3.0
slapos.toolbox = 0.71
slapos.recipe.template = 4.1
slapos.toolbox = 0.73
smmap = 0.9.0
# Required by:
......
......@@ -39,7 +39,7 @@ md5sum = 665e83d660c9b779249b2179d7ce4b4e
[template-apache-frontend-configuration]
filename = templates/apache.conf.in
md5sum = 82cdb4ab02fec36285b9c1ce502f82f0
md5sum = a56045e7b53ff00ab34d2a8f911fc1a1
[template-custom-slave-list]
filename = templates/apache-custom-slave-list.cfg.in
......
......@@ -10,8 +10,8 @@ gitdb = 0.6.4
plone.recipe.command = 1.1
pycrypto = 2.6.1
rdiff-backup = 1.0.5+SlapOSPatched001
slapos.recipe.template = 3.0
slapos.toolbox = 0.71
slapos.recipe.template = 4.1
slapos.toolbox = 0.73
smmap = 0.9.0
numpy = 1.11.2
pyasn1 = 0.2.3
......
......@@ -166,8 +166,8 @@ SSLProxyCheckPeerCN off
SSLProxyCheckPeerExpire off
include {{frontend_configuration.get('log-access-configuration')}}
include {{ slave_configuration_directory }}/*.conf
include {{ slave_with_cache_configuration_directory }}/*.conf
includeoptional {{ slave_configuration_directory }}/*.conf
includeoptional {{ slave_with_cache_configuration_directory }}/*.conf
ErrorDocument 404 /notfound.html
RewriteRule (.*) /notfound.html [R=404,L]
......@@ -27,6 +27,7 @@ statistic = $${:srv}/statistic
backupscript = $${:etc}/backup
www = $${:srv}/www
home = $${:etc}/home
promises = $${:etc}/promise
ssl = $${:etc}/ssl
ssh = $${:home}/.ssh
......@@ -49,12 +50,13 @@ logfile = $${directory:log}/crond.log
{% set frequency = slave_instance.get('frequency', '') -%}
{% set hostname = slave_instance.get('hostname', '') -%}
{% set connection = slave_instance.get('connection', '') -%}
{% set connection_port = slave_instance.get('connection_port', '22') -%}
{% set include = slave_instance.get('include', '') -%}
{% set include_string = "' --include='".join(include.split(' ')) -%}
{% set exclude = slave_instance.get('exclude', '') -%}
{% set exclude_string = '' -%}
{% set sudo = slave_instance.get('sudo', 'False') -%}
{% set remote_schema = 'rdiff-backup --server --restrict-read-only / -- "$@"' -%}
{% set remote_schema = slave_instance.get('remote_rdiff_path', 'rdiff-backup') + ' --server --restrict-read-only / -- "$@"' -%}
{% if (exclude != '') -%}
{% set exclude_string = "' --exclude='".join(exclude.split(' ')) -%}
......@@ -73,23 +75,19 @@ directory = $${directory:backup}/$${:_buildout_section_name_}
[{{ slave_reference }}-backup-private_key]
recipe = plone.recipe.command
stop-on-error = false
command = ${dropbear-output:keygen} -t $${:type} -s 2048 -f $${:key}
stop-on-error = true
command = ${coreutils-output:rm} -f $${:key} $${:public_key} && ${openssh-output:keygen} -t $${:type} -b 2048 -f $${:key} -q -N ""
key = $${directory:ssh}/$${:_buildout_section_name_}
public_key = $${:key}.pub
location = $${:public_key}
type = rsa
[{{ slave_reference }}-backup-public_key]
recipe = plone.recipe.command
stop-on-error = true
command = ${coreutils-output:rm} -f $${:key} && ${dropbear-output:keygen} -y -f {{ '$${' ~ slave_reference }}-backup-private_key:key} | ${grep-output:grep} {{ '$${' ~ slave_reference }}-backup-private_key:type} > $${:key}
key = {{ '$${' ~ slave_reference }}-backup-private_key:key}.pub
location = $${:key}
# Insert as a beginning part, to ensure that all public keys are generated before trying to publish. This will reduce the number of slapgrid-cp run.
{% do part_list.insert(0, "%s-backup-public_key" % slave_reference) -%}
{% do part_list.insert(0, "%s-backup-private_key" % slave_reference) -%}
[{{ slave_reference }}-backup-read-public_key]
recipe = slapos.cookbook:readline
storage-path = {{ '$${' ~ slave_reference }}-backup-public_key:key}
storage-path = {{ '$${' ~ slave_reference }}-backup-private_key:public_key}
# Publish slave {{ slave_reference }} information
[{{ slave_reference }}-backup-publish]
......@@ -107,6 +105,7 @@ mode = 0700
datadirectory = {{ '$${' ~ slave_reference }}-backup-directory:directory}
sshkey = {{ '$${' ~ slave_reference }}-backup-private_key:key}
connection = {{ connection }}
connection_port = {{ connection_port }}
hostname = {{ hostname }}
include = {{ include_string }}
exclude_string = {{ exclude_string }}
......@@ -169,6 +168,12 @@ mode = 0700
virtual-depends =
$${nginx-configuration:ip}
[nginx-listen-promise]
recipe = slapos.cookbook:check_port_listening
hostname = $${nginx-configuration:ip}
port = $${nginx-configuration:port}
path = $${directory:promises}/nginx_listen
[nginx-configuration]
recipe = slapos.recipe.template
url = ${template-nginx-configuration:output}
......@@ -187,6 +192,7 @@ ssl_crt = $${directory:ssl}/nginx.crt
parts =
dcron-service
nginx-service
nginx-listen-promise
activate-crontab-file
publish-global-rss
{% for part in part_list -%}
......
......@@ -10,7 +10,7 @@ extends =
# ../../component/git/buildout.cfg
# ../../component/subversion/buildout.cfg
../../component/rsync/buildout.cfg
../../component/dropbear/buildout.cfg
../../component/openssh/buildout.cfg
../../component/grep/buildout.cfg
../../component/findutils/buildout.cfg
# ../../stack/flask.cfg
......@@ -67,7 +67,7 @@ mode = 0644
[template-backup-script]
recipe = slapos.recipe.template
url = ${:_profile_base_location_}/template-backup-script.sh.in
md5sum = 47b20031db3b575651d8515d5add23e6
md5sum = fa79e0307e12e2f5b1f2adbd261995fc
output = ${buildout:directory}/template-backup-script.sh.in
mode = 0644
......@@ -105,7 +105,7 @@ mode = 0644
[template-pullrdiffbackup]
recipe = slapos.recipe.template
url = ${:_profile_base_location_}/instance-pullrdiffbackup.cfg.in
md5sum = 061b98d001b501c9e1beb424e8802d3d
md5sum = a2fb7b0cdd944be99da4122eb6f07749
output = ${buildout:directory}/template-pullrdiffbackup.cfg
mode = 0644
......@@ -116,8 +116,14 @@ md5sum = 42021b325159dff29e4bd4e33b8ff2f3
output = ${buildout:directory}/template.cfg
mode = 0644
[rdiff-backup]
eggs =
${rdiff-backup-build-1.3.4:egg}
[versions]
rdiff-backup = 1.0.5+SlapOSPatched001
# 1.3.4nxd2 is invalid version string, thus pached version string is not '1.3.4nxd2+SlapOSPatched001'
# but '1.3.4nxd2-SlapOSPatched001'.
rdiff-backup = 1.3.4nxd2-SlapOSPatched001
gunicorn = 19.1.1
plone.recipe.command = 1.1
slapos.recipe.template = 2.4.2
......
......@@ -18,7 +18,7 @@ ${rdiff-backup-output:rdiff-backup} \
$${:exclude_string} \
--include='$${:include}' \
--exclude='**' \
--remote-schema '${dropbear-output:ssh} -T -y -i $${:sshkey} %s $${:remote_schema}' \
--remote-schema '${openssh-output:ssh} -6 -q -T -y -o "StrictHostKeyChecking no" -i $${:sshkey} -p $${:connection_port} %s $${:remote_schema}' \
$${:connection}::/ ./
RESULT=$?
......
#!{{runTestSuite_py}}
from __future__ import print_function
import argparse, os, re, subprocess, sys
from time import gmtime, strftime, time
from erp5.util import taskdistribution
from erp5.util.testsuite import SubprocessError, TestSuite
from zc.buildout.buildout import Buildout
slapos_buildout = {{repr(slapos_buildout)}}
test_dict = {
'zc.buildout': slapos_buildout,
'zc.recipe.egg': os.path.join(slapos_buildout, 'zc.recipe.egg_'),
}
class DummyTestResult:
class DummyTestResultLine:
def stop(self, **kw):
pass
done = 0
def __init__(self, test_name_list):
self.test_name_list = test_name_list
def start(self):
test_result_line = self.DummyTestResultLine()
try:
test_result_line.name = self.test_name_list[self.done]
except IndexError:
return
self.done += 1
return test_result_line
class BuildoutTestSuite(TestSuite):
RUN_RE = re.compile(
r'Ran (?P<all_tests>\d+) tests with'
' (?P<failures>\d+) failures,'
' (?P<errors>\d+) errors and'
' (?P<skips>\d+) skipped in')
def run(self, test):
start = time()
try:
status_dict = self.spawn(os.path.join('bin', 'zope-testrunner'),
'--test-path', os.path.join(test_dict[test], 'src'))
except SubprocessError, e:
status_dict = e.status_dict
end = time()
status_dict.update(
date = strftime("%Y/%m/%d %H:%M:%S", gmtime(end)),
duration = end - start)
search = self.RUN_RE.search(status_dict['stdout'])
if search:
groupdict = search.groupdict()
status_dict.update(
test_count = int(groupdict['all_tests']),
error_count = int(groupdict['errors']),
failure_count = int(groupdict['failures']),
skip_count = int(groupdict['skips']))
return status_dict
def main():
os.environ['TEMP'] = {{repr(temp_directory)}}
parser = argparse.ArgumentParser(description='Run a test suite.')
parser.add_argument('--test_suite', help='The test suite name')
parser.add_argument('--test_suite_title', help='The test suite title')
parser.add_argument('--test_node_title', help='The test node title')
parser.add_argument('--project_title', help='The project title')
parser.add_argument('--revision', help='The revision to test',
default='dummy_revision')
parser.add_argument('--master_url',
help='The Url of Master controling many suites')
args = parser.parse_args()
test_title = args.test_suite_title or args.test_suite
if args.master_url:
tool = taskdistribution.TaskDistributionTool(args.master_url)
test_result = tool.createTestResult(args.revision,
list(test_dict),
args.test_node_title,
test_title=test_title,
project_title=args.project_title)
if test_result is None:
return
else:
test_result = DummyTestResult(list(test_dict))
fd = os.open('buildout.cfg', os.O_CREAT | os.O_EXCL | os.O_WRONLY, 0666)
try:
os.write(fd, """\
[buildout]
extends = %s
develop =%s
parts = test
newest = false
[versions]
%s
[bootstrap]
recipe = zc.recipe.egg
eggs = zc.buildout
[test]
recipe = zc.recipe.egg
eggs +=
zope.testrunner
scripts =
zope-testrunner
""" % (os.path.join(slapos_buildout, 'buildout.cfg'),
''.join('\n ' + x for x in test_dict.itervalues()),
'\n'.join(x + ' =' for x in test_dict)))
finally:
os.close(fd)
Buildout('buildout.cfg', {}).install(['bootstrap'])
subprocess.check_call((os.path.join('bin', 'buildout'),))
test_suite = BuildoutTestSuite(1)
while 1:
test_result_line = test_result.start()
if not test_result_line:
break
test_result_line.stop(**test_suite.run(test_result_line.name))
if __name__ == "__main__":
main()
[buildout]
extends = software.cfg
[versions]
setuptools = 36.6.0
[buildout]
extends =
../../stack/slapos.cfg
parts =
slapos-cookbook
template
[slapos.buildout-repository]
recipe = slapos.recipe.build:gitclone
repository = https://lab.nexedi.com/nexedi/slapos.buildout.git
git-executable = ${git:location}/bin/git
[runTestSuite_py]
recipe = zc.recipe.egg
eggs = erp5.util
zc.buildout
scripts = ${:interpreter}
interpreter = ${:_buildout_section_name_}
[template]
recipe = slapos.recipe.template:jinja2
# XXX: "template.cfg" is hardcoded in instanciation recipe
rendered = ${buildout:directory}/template.cfg
template =
inline:
[buildout]
eggs-directory = ${buildout:eggs-directory}
develop-eggs-directory = ${buildout:develop-eggs-directory}
parts = runTestSuite
[slap-configuration]
recipe = slapos.cookbook:slapconfiguration.serialised
computer = $${slap-connection:computer-id}
partition = $${slap-connection:partition-id}
url = $${slap-connection:server-url}
key = $${slap-connection:key-file}
cert = $${slap-connection:cert-file}
[directory]
recipe = slapos.cookbook:mkdirectory
bin = $${buildout:directory}/bin
tmp = $${buildout:directory}/tmp
[slapos.buildout-repository]
recipe = slapos.recipe.build:gitclone
repository = ${slapos.buildout-repository:location}
git-executable = ${git:location}/bin/git
shared = true
[runTestSuite]
recipe = slapos.recipe.template:jinja2
rendered = $${directory:bin}/$${:_buildout_section_name_}
template = ${:_profile_base_location_}/$${:_buildout_section_name_}.in
mode = 0755
context =
key slapparameter_dict slap-configuration:configuration
key slapos_buildout slapos.buildout-repository:location
key temp_directory directory:tmp
raw runTestSuite_py ${buildout:bin-directory}/${runTestSuite_py:interpreter}
[buildout]
extends =
../../stack/caddy/buildout.cfg
\ No newline at end of file
......@@ -50,13 +50,13 @@
"crl-life-period": {
"title": "CRL life time period",
"description": "Number of individual certificate validity periods during which the CRL is valid. Default: 1/50.0",
"type": "float",
"type": "number",
"default": 0.2
},
"ca-life-period": {
"title": "CA Certificate life period",
"description": "Number of individual certificate validity periods during which the CA certificate is valid. Default: 10",
"type": "float",
"type": "number",
"default": 10
},
"crt-keep-time": {
......
......@@ -40,4 +40,4 @@ cns.recipe.symlink = 0.2.3
collective.recipe.environment = 0.2.0
erp5.util = 0.4.49
plone.recipe.command = 1.1
slapos.recipe.template = 3.0
slapos.recipe.template = 4.1
......@@ -48,12 +48,12 @@ scipy = 0.13.3
simpy = 3.0.5
zope.dottedname = 4.1.0
tablib = 0.10.0
mysqlclient = 1.3.10+SlapOSPatched002
mysqlclient = 1.3.12
# indirect dependancies
cp.recipe.cmd = 0.5
plone.recipe.command = 1.1
slapos.recipe.template = 3.0
slapos.recipe.template = 4.1
zope.exceptions = 4.0.7
zope.testing = 4.1.3
zc.recipe.testrunner = 2.0.0
......
......@@ -17,6 +17,14 @@
"default": "erp5",
"type": "string"
},
"bt5": {
"description": "Business Template to install at automatic site creation. By default, all configurators are installed.",
"type": "string"
},
"id-store-interval": {
"description": "Set Store Interval of default SQL Non Continuous Increasing Id Generator at automatic site creation. If unset, the value from the erp5_core Business Template is not touched.",
"type": "integer"
},
"timezone": {
"description": "Zope's timezone. Possible values are determined by host's libc, and typically come from a separate package (tzdata, ...)",
"default": "UTC",
......
......@@ -27,10 +27,9 @@ eggs =
zc.buildout
slapos.libnetworkcache
slapos.core
supervisor
jsonschema
hexagonit.recipe.download
netaddr
inotifyx
lock_file
pytz
erp5.util
......@@ -60,7 +59,7 @@ mode = 0644
[versions]
PyXML = 0.8.5
erp5.util = 0.4.49
slapos.recipe.template = 3.0
slapos.recipe.template = 4.1
ipython = 5.3.0
apache-libcloud = 2.1.0
gitdb2 = 2.0.2
......
......@@ -55,10 +55,20 @@ if [[ ! -s "$DEPLOYMENT_SCRIPT" ]] ; then
echo "exit 1" > $DEPLOYMENT_SCRIPT
fi
function add_log ()
{
LOG_FILE=$1
for f in /opt/slapos/log/slapos-node-{software,instance}.log ; do
echo "Tail of '$f':" >> $LOG_FILE
tail -n 500 $f >> $LOG_FILE
done
}
function upload ()
{
try=$1
LOG_FILE=$2
add_log $LOG_FILE
t=`date '+%Y%m%d%H%S'`
mv $LOG_FILE ${LOG_FILE}.$t
# just to be sure flush all disk operations before uploading
......
......@@ -55,7 +55,7 @@ output = ${buildout:directory}/template-original.kvm.cfg
[deploy-script-controller-script]
filename = deploy-script-controller
location = ${:_profile_base_location_}/${:filename}
md5sum = d2b92f45257a52e5a7ff5c311d46d4ae
md5sum = 31aadc895acf9fc2fc6e1cbe815339c6
# configuration
waittime = 360
tries = 80
......
......@@ -14,4 +14,4 @@ md5sum = efd3b712a2294207f265a9c45648d5cf
mode = 0644
[versions]
slapos.recipe.template = 3.0
slapos.recipe.template = 4.1
......@@ -19,5 +19,5 @@ context =
[fluentd]
gems +=
fluent-plugin-wendelin==0.1
fluent-plugin-wendelin==0.3
fluent-plugin-bin
......@@ -14,8 +14,19 @@ passwd = root1234
environment =
GITLAB_ROOT_PASSWORD=${root-password:passwd}
[backend-info]
# host = ${instance-parameter:ipv4-random}
[service-postgresql]
pgdata-directory = ${directory:srv}/pg
[gitlab-workhorse-dir]
recipe = slapos.cookbook:mkdirectory
srv = ${directory:srv}/glab-wh
[gitlab-workhorse]
srv = ${gitlab-workhorse-dir:srv}
socket = ${gitlab-workhorse:srv}/wh.socket
[unicorn]
socket = ${:srv}/unc.socket
[publish-instance-info]
password = ${root-password:passwd}
......
......@@ -27,7 +27,7 @@ extends =
parts =
ruby2.1
golang16
golang19
git
postgresql92
redis28
......@@ -211,7 +211,7 @@ make-targets= cd ${git2go-repository:location}
&& cp -a ${git-backup-repository:location}/contrib/gitlab-backup ${gopath:bin}
environment =
PKG_CONFIG_PATH=${openssl:location}/lib/pkgconfig:${zlib:location}/lib/pkgconfig
PATH=${cmake:location}/bin:${pkgconfig:location}/bin:${git:location}/bin:${golang16:location}/bin:${buildout:bin-directory}:%(PATH)s
PATH=${cmake:location}/bin:${pkgconfig:location}/bin:${git:location}/bin:${golang19:location}/bin:${buildout:bin-directory}:%(PATH)s
GOPATH=${gopath:directory}
[xnice-repository]
......@@ -245,7 +245,7 @@ configure-command = :
make-targets= ${:_buildout_section_name_}
environment =
PATH=${golang16:location}/bin:%(PATH)s
PATH=${golang19:location}/bin:%(PATH)s
###############################
......@@ -336,7 +336,7 @@ md5sum = 319d7dbe3ad9b260c1e292cfc0d13b11
[instance-gitlab-test.cfg.in]
<= download-file
md5sum = cc8065104458af311c2ffa9ae20235a6
md5sum = a4ad76856db98e508af7e773d9ff78f9
[macrolib.cfg.in]
<= download-file
......@@ -381,5 +381,5 @@ cns.recipe.symlink = 0.2.3
docutils = 0.12
plone.recipe.command = 1.1
rubygemsrecipe = 0.2.2+slapos001
slapos.recipe.template = 3.0
slapos.recipe.template = 4.1
z3c.recipe.scripts = 1.0.1
......@@ -72,7 +72,7 @@ async = 0.6.1
gitdb = 0.5.4
pycrypto = 2.6
rdiff-backup = 1.0.5+SlapOSPatched001
slapos.recipe.template = 2.4.2
slapos.recipe.template = 4.1
slapos.toolbox = 0.40.4
smmap = 0.8.2
plone.recipe.command = 1.1
......
......@@ -48,4 +48,4 @@ md5sum = 8cde04bfd0c0e9bd56744b988275cfd8
PyRSS2Gen = 1.1
cns.recipe.symlink = 0.2.3
plone.recipe.command = 1.1
slapos.recipe.template = 3.0
slapos.recipe.template = 4.1
......@@ -113,5 +113,5 @@ mode = 0644
[versions]
erp5.util = 0.4.49
slapos.recipe.template = 3.0
slapos.recipe.template = 4.1
selenium = 2.53.1
......@@ -85,7 +85,7 @@ pyzmq = 16.0.2
scikit-learn = 0.18.1
seaborn = 0.7.1
simplegeneric = 0.8.1
slapos.recipe.template = 3.0
slapos.recipe.template = 4.1
statsmodels = 0.8.0
terminado = 0.6
tornado = 4.4.2
......
......@@ -89,7 +89,7 @@ command =
[template]
recipe = slapos.recipe.template
url = ${:_profile_base_location_}/instance.cfg.in
md5sum = f40a938400e789361c95d5a9246bf0ef
md5sum = bf5ef731c0d8da0267a4939882b4eeee
output = ${buildout:directory}/template.cfg
mode = 0644
......@@ -98,7 +98,7 @@ recipe = hexagonit.recipe.download
ignore-existing = true
url = ${:_profile_base_location_}/instance-kvm.cfg.jinja2
mode = 644
md5sum = a849d4a6060fdb4e9e86917fb77ef153
md5sum = e2b8f86bdc12c86e7d959b55c6d54f6d
download-only = true
on-update = true
......@@ -107,7 +107,7 @@ recipe = hexagonit.recipe.download
ignore-existing = true
url = ${:_profile_base_location_}/instance-kvm-cluster.cfg.jinja2.in
mode = 644
md5sum = d9745bc9bd1d22a640a628c005f88ffb
md5sum = 05b6004e8c7a94de14f247affcef4971
download-only = true
on-update = true
......@@ -143,7 +143,7 @@ recipe = hexagonit.recipe.download
ignore-existing = true
url = ${:_profile_base_location_}/instance-kvm-export.cfg.jinja2
mode = 644
md5sum = 13387d37bbf430f1d2b827c8f1acc804
md5sum = fbad91193be6ebde5fc4c05a38a55e7b
download-only = true
on-update = true
......
......@@ -157,12 +157,6 @@
"description": "Text content which will be written in a file data of cluster http server. All VM will be able to download that file via the static URL of cluster HTTP server: https://10.0.2.101/FOLDER_HASH/data.",
"type": "string"
},
"enable-monitor": {
"title": "Enable Monitoring on this cluster",
"description": "Deploy monitor instance to this kvm instance. It help to check instance status, log and promise results.",
"type": "boolean",
"default": true
},
"monitor-interface-url": {
"title": "Monitor Web Interface URL",
"description": "Give Url of HTML web interface that will be used to render this monitor instance.",
......@@ -341,6 +335,20 @@
"default": "qcow2",
"enum": ["qcow2", "raw", "vdi", "vmdk", "cloop", "qed"]
},
"wipe-disk-ondestroy": {
"title": "Wipe disks when destroy the VM",
"description": "Say if disks should be wiped by writing new data over every single bit before delete them. This option is used to securely delete VM disks",
"type": "boolean",
"default": false
},
"wipe-disk-iterations": {
"title": "Wipe disk iterations",
"description": "Number of disk overwrite iterations with random data. Default is 1. WARNING: Increase this value will slow down partition destruction and increase IO.",
"type": "integer",
"default": 1,
"minimum": 1,
"maximum": 5
},
"use-tap": {
"title": "Enable QEMU TAP network interface",
"description": "Use QEMU TAP network interface, might require a bridge on SlapOS Node.",
......
......@@ -9,7 +9,13 @@
{% set kvm_instance_dict = {} -%}
{% set kvm_hostname_list = [] -%}
{% set monitor_url_list = [] -%}
{% set enable_monitoring = slapparameter_dict.get('enable-monitor', True) -%}
{% macro setconfig(name, value) -%}
{# will set a config-name = value if value is not empty -#}
{% if value and value != '' -%}
config-{{ name }} = {{ dumps(value) }}
{% endif -%}
{% endmacro -%}
[request-common]
recipe = slapos.cookbook:request
......@@ -37,36 +43,38 @@ state = stopped
config-frontend-instance-name = {{ instance_name ~ ' VNC Frontend' }}
config-frontend-software-type = {{ dumps(frontend_dict.get('software-type', 'frontend')) }}
config-frontend-software-url = {{ dumps(frontend_dict.get('software-url', 'http://git.erp5.org/gitweb/slapos.git/blob_plain/refs/tags/slapos-0.92:/software/kvm/software.cfg')) }}
config-frontend-instance-guid = {{ dumps(frontend_dict.get('instance-guid', '')) }}
{{ setconfig('frontend-instance-guid', kvm_parameter_dict.get('instance-guid', '')) }}
config-name = {{ instance_name }}
{% if slapparameter_dict.get('authorized-keys', []) -%}
config-authorized-key = {{ dumps(slapparameter_dict.get('authorized-keys') | join('\n')) }}
{% endif -%}
config-nbd-port = {{ dumps(kvm_parameter_dict.get('nbd-port', 1024)) }}
config-nbd-host = {{ dumps(kvm_parameter_dict.get('nbd-host', '')) }}
config-nbd2-port = {{ dumps(kvm_parameter_dict.get('nbd-port2', 1024)) }}
config-nbd2-host = {{ dumps(kvm_parameter_dict.get('nbd-host2', '')) }}
config-ram-size = {{ dumps(kvm_parameter_dict.get('ram-size', 1024)) }}
config-disk-size = {{ dumps(kvm_parameter_dict.get('disk-size', 10)) }}
config-disk-type = {{ dumps(kvm_parameter_dict.get('disk-type', 'virtio')) }}
config-cpu-count = {{ dumps(kvm_parameter_dict.get('cpu-count', 1)) }}
config-cpu-options = {{ dumps(kvm_parameter_dict.get('cpu-options', '')) }}
config-numa = {{ dumps(kvm_parameter_dict.get('numa', '')) }}
config-disk-cache = {{ dumps(kvm_parameter_dict.get('disk-cache', '')) }}
config-disk-aio = {{ dumps(kvm_parameter_dict.get('disk-aio', '')) }}
{{ setconfig('numa', kvm_parameter_dict.get('numa', '')) }}
{{ setconfig('machine-options', kvm_parameter_dict.get('machine-options', '')) }}
{{ setconfig('cpu-options', kvm_parameter_dict.get('cpu-options', '')) }}
{{ setconfig('nbd-host', kvm_parameter_dict.get('nbd-host', '')) }}
{{ setconfig('host2', kvm_parameter_dict.get('host2', '')) }}
config-auto-ballooning = {{ dumps(kvm_parameter_dict.get('auto-ballooning', True)) }}
config-machine-options = {{ dumps(kvm_parameter_dict.get('machine-options', '')) }}
config-cpu-model = {{ dumps(kvm_parameter_dict.get('cpu-model', '')) }}
{{ setconfig('disk-cache', kvm_parameter_dict.get('disk-cache', '')) }}
{{ setconfig('disk-aio', kvm_parameter_dict.get('disk-aio', '')) }}
{{ setconfig('cpu-model', kvm_parameter_dict.get('cpu-model', '')) }}
{{ setconfig('disk-cache', kvm_parameter_dict.get('disk-cache', '')) }}
{% set nat_rules_list = kvm_parameter_dict.get('nat-rules', []) -%}
config-nat-rules = {{ nat_rules_list | join(' ') }}
{{ setconfig('nat-rules', ' '.join(nat_rules_list)) }}
config-publish-nat-url = True
config-use-nat = {{ use_nat }}
config-use-tap = {{ dumps(kvm_parameter_dict.get('use-tap', True)) }}
config-nat-restrict-mode = {{ dumps(kvm_parameter_dict.get('nat-restrict-mode', False)) }}
config-enable-vhost = {{ dumps(kvm_parameter_dict.get('enable-vhost', False)) }}
config-virtual-hard-drive-url = {{ dumps(kvm_parameter_dict.get('virtual-hard-drive-url', '')) }}
config-virtual-hard-drive-md5sum = {{ dumps(kvm_parameter_dict.get('virtual-hard-drive-md5sum', '')) }}
{{ setconfig('virtual-hard-drive-url', kvm_parameter_dict.get('virtual-hard-drive-url', '')) }}
{{ setconfig('virtual-hard-drive-md5sum', kvm_parameter_dict.get('virtual-hard-drive-md5sum', '')) }}
config-virtual-hard-drive-gzipped = {{ dumps(kvm_parameter_dict.get('virtual-hard-drive-gzipped', False)) }}
config-hard-drive-url-check-certificate = {{ dumps(kvm_parameter_dict.get('hard-drive-url-check-certificate', True)) }}
config-external-disk-number = {{ dumps(kvm_parameter_dict.get('external-disk-number', 0)) }}
......@@ -74,15 +82,18 @@ config-external-disk-size = {{ dumps(kvm_parameter_dict.get('external-disk-size'
config-external-disk-format = {{ dumps(kvm_parameter_dict.get('external-disk-format', 'qcow2')) }}
config-enable-http-server = {{ dumps(kvm_parameter_dict.get('enable-http-server', True)) }}
config-httpd-port = {{ dumps(kvm_parameter_dict.get('httpd-port', 8081)) }}
{% if kvm_parameter_dict.get('data-to-vm', '') -%}
config-data-to-vm = {{ dumps(kvm_parameter_dict.get('data-to-vm', '')) }}
{% endif -%}
{{ setconfig('data-to-vm', kvm_parameter_dict.get('data-to-vm', '')) }}
config-disable-ansible-promise = {{ dumps(kvm_parameter_dict.get('disable-ansible-promise', False)) }}
config-enable-monitor = {{ enable_monitoring }}
config-monitor-cors-domains = {{ slapparameter_dict.get('monitor-cors-domains', 'monitor.app.officejs.com') }}
config-monitor-username = ${monitor-instance-parameter:username}
config-monitor-password = ${monitor-htpasswd:passwd}
# Enable disk wipe options
{% if kvm_parameter_dict.get('wipe-disk-ondestroy', False) -%}
config-wipe-disk-ondestroy = True
config-wipe-disk-iterations = {{ dumps(kvm_parameter_dict.get('wipe-disk-iterations', 1)) }}
{% endif -%}
# Enable simple http server on ipv6 so all VMs will access it
config-document-host = ${apache-conf:ip}
config-document-port = ${apache-conf:port}
......@@ -109,13 +120,11 @@ return =
{{ ' ' }}nat-rule-url-{{ port }}
{% endfor -%}
{% endif -%}
{% if enable_monitoring -%}
{{ ' ' }}monitor-base-url
{% do monitor_url_list.append('${' ~ section ~ ':connection-monitor-base-url}') -%}
{% endif -%}
{% if str(kvm_parameter_dict.get('use-tap', 'True')).lower() == 'true' -%}
{{ ' ' }}tap-ipv4
{% do monitor_url_list.append('${' ~ section ~ ':connection-monitor-base-url}') -%}
{% do publish_dict.__setitem__('lan-' ~ instance_name, '${' ~ section ~ ':connection-tap-ipv4}') -%}
{% do kvm_hostname_list.append(instance_name ~ ' ' ~ '${' ~ section ~ ':connection-tap-ipv4}') -%}
{% endif -%}
......@@ -151,7 +160,7 @@ name = Frontend {{ name }}
software-type = {{ slave_frontend_stype }}
slave = true
config-url = {{ url }}
config-custom_domain = {{ dumps(frontend_parameter_dict.get('domain', '')) }}
{{ setconfig('custom_domain', kvm_parameter_dict.get('domain', '')) }}
config-enable_cache = {{ dumps(frontend_parameter_dict.get('enable-cache', False)) }}
config-https-only = {{ dumps(frontend_parameter_dict.get('https-only', False)) }}
{% if frontend_parameter_dict.get('type', '') -%}
......@@ -245,18 +254,14 @@ recipe = slapos.cookbook:publish
{% for name, value in publish_dict.items() -%}
{{ name }} = {{ value }}
{% endfor %}
{% if enable_monitoring -%}
{% set monitor_interface_url = slapparameter_dict.get('monitor-interface-url', 'https://monitor.app.officejs.com') -%}
{% do part_list.append('monitor-base') -%}
{% set monitor_interface_url = slapparameter_dict.get('monitor-interface-url', 'https://monitor.app.officejs.com') -%}
monitor-setup-url = {{ monitor_interface_url }}/#page=settings_configurator&url=${monitor-publish-parameters:monitor-url}&username=${monitor-publish-parameters:monitor-user}&password=${monitor-publish-parameters:monitor-password}
{% endif -%}
{% do part_list.append('monitor-base') -%}
[buildout]
extends =
{{ template_httpd_cfg }}
{% if enable_monitoring -%}
{{ ' ' ~ template_monitor }}
{% endif -%}
parts =
httpd
......
......@@ -34,17 +34,17 @@ context =
# Extends publish section with resilient parameters
[publish-connection-information]
<= resilient-publish-connection-parameter
monitor-base-url = ${monitor-publish:monitor-base-url}
monitor-setup-url = ${monitor-publish:monitor-setup-url}
{% if str(slapparameter_dict.get('enable-monitor', True)).lower() == 'true' -%}
[monitor-instance-parameter]
monitor-httpd-port = 8026
monitor-title = {{ slapparameter_dict.get('name', 'KVM Standalone') }}
cors-domains = {{ slapparameter_dict.get('monitor-cors-domains', 'monitor.app.officejs.com') }}
{% if slapparameter_dict.get('monitor-username', '') -%}
{% if slapparameter_dict.get('monitor-username', '') -%}
username = {{ slapparameter_dict['monitor-username'] }}
{% endif -%}
{% if slapparameter_dict.get('monitor-password', '') -%}
{% endif -%}
{% if slapparameter_dict.get('monitor-password', '') -%}
password = {{ slapparameter_dict['monitor-password'] }}
{% endif -%}
{% endif -%}
......@@ -159,6 +159,21 @@
"enum": ["qcow2", "raw", "vdi", "vmdk", "cloop", "qed"]
},
"wipe-disk-ondestroy": {
"title": "Wipe disks when destroy the VM",
"description": "Say if disks should be wiped by writing new data over every single bit before delete them. This option is used to securely delete VM disks",
"type": "boolean",
"default": false
},
"wipe-disk-iterations": {
"title": "Wipe disk iterations",
"description": "Number of disk overwrite iterations with random data. Default is 1. WARNING: Increase this value will slow down partition destruction and increase IO.",
"type": "integer",
"default": 1,
"minimum": 1,
"maximum": 5
},
"use-tap": {
"title": "Use QEMU TAP network interface",
"description": "Use QEMU TAP network interface, might require a bridge on SlapOS Node.",
......@@ -188,12 +203,6 @@
"type": "boolean",
"default": false
},
"enable-monitor": {
"title": "Deploy monitoring tools",
"description": "Deploy monitor instance to this kvm instance. It help to check instance status, log and promise results.",
"type": "boolean",
"default": true
},
"monitor-interface-url": {
"title": "Monitor Web Interface URL",
"description": "Give Url of HTML web interface that will be used to render this monitor instance.",
......
{% set enable_http = slapparameter_dict.get('enable-http-server', 'False').lower() -%}
{% set use_tap = slapparameter_dict.get('use-tap', 'False').lower() -%}
{% set use_nat = slapparameter_dict.get('use-nat', 'True').lower() -%}
{% set wipe_disk = slapparameter_dict.get('wipe-disk-ondestroy', 'False').lower() -%}
{% set nat_restrict = slapparameter_dict.get('nat-restrict-mode', 'False').lower() -%}
{% set name = slapparameter_dict.get('name', 'localhost') -%}
{% set disable_ansible_promise = slapparameter_dict.get('disable-ansible-promise', 'True').lower() -%}
......@@ -9,7 +10,6 @@
{% set frontend_software_type = 'default' -%}
{% set extends_list = [] -%}
{% set part_list = [] -%}
{% set monitor = str(slapparameter_dict.get('enable-monitor', True)).lower() == 'true' -%}
{% set bootstrap_url = '' -%}
{% set bootstrap_url_md5sum = '' -%}
......@@ -26,10 +26,9 @@
{% set nat_rule_list = '' %}
{% endif -%}
{% if monitor -%}
{% do extends_list.append(template_monitor) -%}
{% endif -%}
{% do extends_list.append(logrotate_cfg) -%}
[directory]
recipe = slapos.cookbook:mkdirectory
etc = ${buildout:directory}/etc
......@@ -42,6 +41,7 @@ services = ${:etc}/service
promises = ${:etc}/promise
novnc-conf = ${:etc}/novnc
run = ${:var}/run
prerm = ${:etc}/prerm
ca-dir = ${:srv}/ssl
public = ${:srv}/public/
cron-entries = ${:etc}/cron.d
......@@ -219,6 +219,19 @@ input = inline:#!/bin/sh
output = ${directory:promises}/kvm-disk-image-corruption
mode = 700
{% if wipe_disk == 'true' -%}
{% do part_list.append('wipe-disk-wrapper') -%}
{% set wipe_file_list = '${kvm-parameter-dict:disk-path}' -%}
{% if storage_dict -%}
{% set wipe_file_list = '${kvm-parameter-dict:disk-path}' ~ ' ' ~ '/* '.join(storage_dict.values()) ~ '/*' -%}
{% endif -%}
[wipe-disk-wrapper]
recipe = slapos.cookbook:wrapper
wrapper-path = ${directory:prerm}/slapos_wipe_qemu_disk
command-line =
{{ wipe_disk_wrapper }} -n {{ slapparameter_dict.get('wipe-disk-iterations', 1) }} -suz --check-pid-file ${kvm-parameter-dict:pid-file-path} --file {{ wipe_file_list }}
{% endif -%}
[kvm-started-promise]
recipe = slapos.recipe.template:jinja2
template = {{ qemu_start_promise_tpl }}
......@@ -343,22 +356,20 @@ hostname = ${httpd:host}
port = ${httpd:port}
{% endif %}
{% if monitor -%}
[monitor-instance-parameter]
monitor-httpd-port = 8026
monitor-title = {{ slapparameter_dict.get('name', 'KVM Standalone') }}
cors-domains = {{ slapparameter_dict.get('monitor-cors-domains', 'monitor.app.officejs.com') }}
{% if slapparameter_dict.get('monitor-username', '') -%}
{% if slapparameter_dict.get('monitor-username', '') -%}
username = {{ slapparameter_dict['monitor-username'] }}
{% endif -%}
{% if slapparameter_dict.get('monitor-password', '') -%}
{% endif -%}
{% if slapparameter_dict.get('monitor-password', '') -%}
password = {{ slapparameter_dict['monitor-password'] }}
{% endif -%}
{% endif -%}
interface-url = {{ slapparameter_dict.get('monitor-interface-url', 'https://monitor.app.officejs.com') }}
[publish-connection-information]
<= monitor-publish
recipe = slapos.cookbook:publish
ipv6 = ${slap-network-information:global-ipv6}
backend-url = https://[${novnc-instance:ip}]:${novnc-instance:port}/vnc_auto.html?host=[${novnc-instance:ip}]&port=${novnc-instance:port}&encrypt=1&password=${kvm-controller-parameter-dict:vnc-passwd}
......@@ -392,13 +403,6 @@ tap-ipv4 = ${slap-network-information:tap-ipv4}
7_info = Get the publick key file in your VM with the command: wget {{ kvm_http }}/authorized_keys
{% endif %}
{% endif %}
{% if monitor -%}
monitor-base-url = ${monitor-publish-parameters:monitor-base-url}
{% set monitor_interface_url = slapparameter_dict.get('monitor-interface-url', 'https://monitor.app.officejs.com') -%}
{% if monitor_interface_url -%}
monitor-setup-url = {{ monitor_interface_url }}/#page=settings_configurator&url=${monitor-publish-parameters:monitor-url}&username=${monitor-publish-parameters:monitor-user}&password=${monitor-publish-parameters:monitor-password}
{% endif -%}
{% endif -%}
{% if use_tap == 'true' and tap_network_dict.has_key('ipv4') -%}
1_info = Use these configurations below to configure interface {{ iface }} in your VM.
......@@ -631,10 +635,8 @@ parts =
cron
cron-entry-logrotate
frontend-promise
{% if monitor -%}
# monitor parts
monitor-base
{% endif -%}
# Complete parts with sections
{{ part_list | join('\n ') }}
......
......@@ -103,6 +103,7 @@ context =
raw template_kvm_run ${template-kvm-run:location}/${template-kvm-run:filename}
raw template_monitor ${monitor2-template:rendered}
raw websockify_executable_location ${buildout:directory}/bin/websockify
raw wipe_disk_wrapper ${buildout:directory}/bin/securedelete
template-parts-destination = ${template-parts:target}
template-replicated-destination = ${template-replicated:target}
import-list = file parts :template-parts-destination
......
......@@ -5,13 +5,13 @@ extends = common.cfg
# XXX - use websockify = 0.5.1 for compatibility with kvm frontend
websockify = 0.5.1
slapos.toolbox = 0.71
slapos.toolbox = 0.73
erp5.util = 0.4.49
apache-libcloud = 1.1.0
collective.recipe.environment = 0.2.0
gitdb = 0.6.4
pycurl = 7.43.0
slapos.recipe.template = 3.0
slapos.recipe.template = 4.1
smmap = 0.9.0
# websockify = 0.8.0
......
......@@ -17,7 +17,7 @@ parts +=
versions = versions
[versions]
slapos.recipe.template = 3.0
slapos.recipe.template = 4.1
[template-instance]
......
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment