diff --git a/src/codegen/src/main/scala/CodeGen.scala b/src/codegen/src/main/scala/CodeGen.scala
index 08bf53400e..be6273bb87 100644
--- a/src/codegen/src/main/scala/CodeGen.scala
+++ b/src/codegen/src/main/scala/CodeGen.scala
@@ -47,15 +47,14 @@ object CodeGen {
| outputDir: $outputDir
| toZipDir: $toZipDir
| pyTestDir: $pyTestDir
- | docDir: $docDir""".stripMargin)
+ | pyDocDir: $pyDocDir""".stripMargin)
val roots = // note: excludes the toplevel project
if (!rootsFile.exists) sys.error(s"Could not find roots file at $rootsFile")
else readFile(rootsFile, _.getLines.toList).filter(_ != ".")
println("Creating temp folders")
toZipDir.mkdirs
pyTestDir.mkdirs
- docDir.mkdirs
- inDocDir.mkdirs
+ pyDocDir.mkdirs
println("Copy jar files to output directory")
copyAllFilesFromRoots(srcDir, roots, jarRelPath,
(Pattern.quote("-" + mmlVer + ".jar") + "$").r,
diff --git a/src/codegen/src/main/scala/Config.scala b/src/codegen/src/main/scala/Config.scala
index 343e6d4259..5648d6ed30 100644
--- a/src/codegen/src/main/scala/Config.scala
+++ b/src/codegen/src/main/scala/Config.scala
@@ -15,8 +15,7 @@ object Config {
val toZipDir = new File(srcDir, "src/main/resources/mmlspark")
val zipFile = new File(outputDir, "mmlspark.zip")
val pyTestDir = new File(topDir, "TestResults/generated_pytests")
- val docDir = new File(topDir, "BuildArtifacts/docs")
- val inDocDir = new File(docDir, "source")
+ val pyDocDir = new File(topDir, "BuildArtifacts/pydocsrc")
val jarRelPath = "target/scala-" + sys.env("SCALA_VERSION")
val pyRelPath = "src/main/python"
val mmlVer = sys.env.getOrElse("MML_VERSION",
diff --git a/src/codegen/src/main/scala/DocGen.scala b/src/codegen/src/main/scala/DocGen.scala
index 83ba09e795..304710a694 100644
--- a/src/codegen/src/main/scala/DocGen.scala
+++ b/src/codegen/src/main/scala/DocGen.scala
@@ -43,11 +43,11 @@ object DocGen {
val pattern = "^[A-Z]\\w*[.]py$".r
val moduleString = allFiles(toZipDir, (f => pattern.findFirstIn(f.getName) != None))
.map(f => s" ${getBaseName(f.getName)}\n").mkString("")
- writeFile(new File(inDocDir, "modules.rst"), rstFileLines(moduleString))
+ writeFile(new File(pyDocDir, "modules.rst"), rstFileLines(moduleString))
// Generate .rst file for each PySpark wrapper - for documentation generation
allFiles(toZipDir, (f => pattern.findFirstIn(f.getName) != None))
- .foreach{x => writeFile(new File(inDocDir, getBaseName(x.getName) + ".rst"),
+ .foreach{x => writeFile(new File(pyDocDir, getBaseName(x.getName) + ".rst"),
contentsString(getBaseName(x.getName)))
}
}
diff --git a/src/project/build.scala b/src/project/build.scala
index 3827134773..83e3419e5a 100644
--- a/src/project/build.scala
+++ b/src/project/build.scala
@@ -46,6 +46,7 @@ object Extras {
def testsDir = file(env("TEST_RESULTS", "../TestResults"))
def mavenDir = artifactsDir / "packages" / "m2"
def docsDir = artifactsDir / "docs" / "scala"
+ val topDocHtml = file(".") / "project" / "top-doc.html"
def scalacOpts = Seq(
"-encoding", "UTF-8",
@@ -114,6 +115,7 @@ object Extras {
dependencyOverrides in ThisBuild ++= overrideLibs,
scalacOptions in ThisBuild ++= scalacOpts,
scalacOptions in (Compile, doc) += "-groups",
+ scalacOptions in (Compile, doc) ++= Seq("-doc-root-content", topDocHtml.getPath()),
// Don't run tests in parallel, and fork subprocesses for them
parallelExecution in (ThisBuild, Test) := false,
fork in (ThisBuild, Test) := true,
diff --git a/src/project/top-doc.html b/src/project/top-doc.html
new file mode 100644
index 0000000000..ae96f72f63
--- /dev/null
+++ b/src/project/top-doc.html
@@ -0,0 +1,4 @@
+These are the API documentation pages for the Scala side of
+MMLSpark.
+
+See also the pyspark level API documentation.
diff --git a/tools/config.sh b/tools/config.sh
index b062a09e66..06f4e37c47 100644
--- a/tools/config.sh
+++ b/tools/config.sh
@@ -140,7 +140,7 @@ deftag extended
deftag e2e extended
deftag linuxonly
# Tag definitions for $PUBLISH
-map deftag storage maven pip demo docker
+map deftag storage maven pip docs demo docker
defvar -p SRCDIR "$BASEDIR/src"
defvar -p BUILD_ARTIFACTS "$BASEDIR/BuildArtifacts"
@@ -192,7 +192,8 @@ Conda.setup() {
_ ./bin/conda install --name "root" --no-update-deps --no-deps --yes \
--quiet --file "mmlspark-packages.spec"
if [[ "$BUILDMODE" != "runtime" ]]; then
- ./bin/pip install "xmlrunner" "wheel"
+ # xmlrunner: tests; wheel: pip builds; sphinx*, recommonmark: pydoc builds
+ ./bin/pip install "xmlrunner" "wheel" "sphinx" "sphinx_rtd_theme" "recommonmark"
else
show section "Minimizing conda directory"
collect_log=2 _ ./bin/conda uninstall -y tk
@@ -231,7 +232,9 @@ CNTK.init() {
defvar STORAGE_CONTAINER "buildartifacts"
defvar STORAGE_URL "$(_main_url "$STORAGE_CONTAINER")"
-# Container for maven/pip packages
+# Container for docs and maven/pip packages
+defvar DOCS_CONTAINER "docs"
+defvar DOCS_URL "$(_main_url "$DOCS_CONTAINER")"
defvar MAVEN_CONTAINER "maven"
defvar -x MAVEN_URL "$(_main_url "$MAVEN_CONTAINER")"
defvar -d MAVEN_PACKAGE "com.microsoft.ml.spark:mmlspark_$SCALA_VERSION:<{MML_VERSION}>"
diff --git a/tools/docker/build-env b/tools/docker/build-env
index afa0607fe6..237aa91ad2 100755
--- a/tools/docker/build-env
+++ b/tools/docker/build-env
@@ -18,8 +18,9 @@ echo 'PS1='\''\u:\w\$ '\' >> "/etc/skel/.bashrc"
useradd -c "Microsoft ML for Apache Spark" -U -d "$HOME" -m "$USER"
cd "$HOME"
+# avoid running git
/mkenv/src/runme BUILDMODE=runtime INSTALLER_CACHE_DIR=/mkenv/cache \
- MML_VERSION="???" MML_BUILD_INFO="???"
+ MML_VERSION="???" MML_BUILD_INFO="???" MML_LATEST="???"
chown -R "$USER:$USER" "$HOME"
cd /home
diff --git a/tools/misc/container-gc b/tools/misc/container-gc
index f8d508bdf5..c9dbbe2bc6 100755
--- a/tools/misc/container-gc
+++ b/tools/misc/container-gc
@@ -4,7 +4,7 @@
. "$(dirname "${BASH_SOURCE[0]}")/../../runme"
-types=(S M P)
+types=(S M P D)
declare -A S=([container]="$STORAGE_CONTAINER"
[path]=""
[suffix]="/")
@@ -15,6 +15,10 @@ declare -A P=([container]="$PIP_CONTAINER"
[path]=""
[prefix]="mmlspark-"
[suffix]="-py2.py3-none-any.whl")
+declare -A D=([container]="$DOCS_CONTAINER"
+ [path]=""
+ [suffix]=""
+ [listsuffix]="/")
set -e
shopt -s nullglob
@@ -71,9 +75,13 @@ get_versions_for() {
local IFS=$'\n\r'
Xs=($(IFS=""; azls "/${X[container]}/${X[path]}${X[path]:+/}" | \
while read -r l; do
- l="${l#${X[prefix]}}"; l="${l%${X[suffix]}}"; echo "$l"
+ # [listsuffix] can override [suffix] for this listing
+ l="${l#${X[prefix]}}"; l="${l%${X[listsuffix]:-${X[suffix]}}}"
+ # ignore things that don't look like a version (eg, in docs)
+ if [[ "$l" = *[0-9].[0-9]* ]]; then echo "$l"; fi
done | sort -V))
IFS=" "
+ all+=("${Xs[@]}")
X[vers_]=" ${Xs[*]} "
}
@@ -226,9 +234,9 @@ do_requests() {
done
}
-types_="${types[*]}"; types_="${types_// /}"
+types_="${types[*]}"; types_="${types_// /}"; all=()
map get_versions_for "${types[@]}"
-all=($(printf '%s\n' "${Ss[@]}" "${Ms[@]}" "${Ps[@]}" | sort -V -u))
+all=($(printf '%s\n' "${all[@]}" | sort -V -u))
echo "Versions found: ${#all[@]}"
diff --git a/tools/pip/generate-pip.sh b/tools/pip/generate-pip
similarity index 100%
rename from tools/pip/generate-pip.sh
rename to tools/pip/generate-pip
diff --git a/tools/pydocs/build b/tools/pydocs/build
new file mode 100755
index 0000000000..aa9dae0b2a
--- /dev/null
+++ b/tools/pydocs/build
@@ -0,0 +1,35 @@
+#!/usr/bin/env bash
+# Copyright (C) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See LICENSE in project root for information.
+
+. "$(dirname "${BASH_SOURCE[0]}")/../../runme" "$@"
+main() (
+
+show section "Building Python Documentation"
+
+here="$(cd $(dirname "${BASH_SOURCE[0]}"); pwd)"
+pydocsrc="$BUILD_ARTIFACTS/pydocsrc"
+
+cd "$BUILD_ARTIFACTS/docs"
+_rm "pyspark"; _md "pyspark"
+
+show - "Copying source files"
+srcs=( "$here/src"/* )
+for f in "${srcs[@]}"; do t="$pydocsrc/$(basename "$f")"
+ if [[ -e "$t" ]]; then failwith "target file exists: $t"; else cp "$f" "$t"; fi
+done
+
+PATH+=":$CONDA_HOME/bin"
+PYTHONPATH="$BUILD_ARTIFACTS/sdk/mmlspark.zip:$PYTHONPATH"
+PYTHONPATH="$BASEDIR/src/src/main/resources/mmlspark:$PYTHONPATH"
+
+_ -a sphinx-build -D version="${MML_VERSION%%+*}" -D release="$MML_VERSION" \
+ "$pydocsrc" "pyspark"
+ret=$?
+
+for f in "${srcs[@]}"; do rm -f "$pydocsrc/$(basename "$f")"; done
+
+if (($ret)); then failwith "documentation build failure"; fi
+
+)
+__ main "$@"
diff --git a/tools/pydocs/publish b/tools/pydocs/publish
new file mode 100755
index 0000000000..cbf0e79321
--- /dev/null
+++ b/tools/pydocs/publish
@@ -0,0 +1,115 @@
+#!/usr/bin/env bash
+# Copyright (C) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See LICENSE in project root for information.
+
+# publish [dirname | --top]
+# Copies the built documentation in $BUILD_ARTIFACTS/docs to the docs container
+# at the specified directory name which defaults to $MML_VERSION; use `--top` to
+# copy it to the toplevel so it's the default docs content.
+
+. "$(dirname "${BASH_SOURCE[0]}")/../../runme" "$@"
+main() (
+
+target="$1"; shift
+if [[ -z "$target" ]]; then target="$MML_VERSION"; fi
+if [[ "x$target" = "x--top" ]]; then
+ show section "Publishing docs to toplevel"; target=""
+else
+ show section "Publishing docs to $target"
+fi
+
+default="index.html"
+mime_types=(
+ # note: glob patterns are matched against the whole copied path, so they
+ # should always start with a `*`
+ "*.html;text/html"
+ "*.png;image/png"
+ "*.gif;image/gif"
+ "*.svg;image/svg+xml"
+ "*.js;application/javascript"
+ "*.css;text/css"
+ "*.ttf;application/font-sfnt"
+ "*.woff;application/font-woff"
+ "*.eot;application/vnd.ms-fontobject"
+ "*.txt;text/plain"
+ "*.doctree;application/octet-stream"
+ "*/.buildinfo;application/octet-stream"
+ "*/objects.inv;application/octet-stream"
+ "*/environment.pickle;application/octet-stream"
+)
+
+cd "$BUILD_ARTIFACTS/docs"
+restore=$(shopt -p dotglob); shopt -s dotglob; files=( ** ); $restore
+
+# Make $default files when missing
+fst=1
+for f in . "${files[@]}"; do
+ x="$f/$default"
+ if [[ ! -d "$f" || -e "$x" ]]; then continue; fi
+ if ((fst)); then show - "creating default $default files"; fst=0; fi
+ show - " $x"
+ { echo "
"
+ for y in "$f"/*; do
+ u="$(basename "$y")"; if [[ -d "$y" ]]; then u+="/"; fi
+ if [[ "$u" = "$default" ]]; then continue; fi
+ echo "$u"
+ done
+ echo "
"
+ } > "$x"
+ files+=("$x")
+done
+
+len=${#files[@]}
+
+# copy all files with a proper type
+for mt in "${mime_types[@]}"; do
+ glob="${mt%;*}"; type="${mt##*;}"
+ show - "Copying $glob as $type"
+ for ((i=0; i /dev/null
+ # create "foo" for redirections to "foo/"
+ show command "... html redirect to $redirect ... > $(qstr "$tmp")"
+ { echo ""
+ echo ""
+ echo "
"
+ echo "Moved here"
+ echo ""
+ } > "$tmp"
+ collect_log=1 _ azblob upload \
+ --container "$DOCS_CONTAINER" --content-type "text/html" \
+ --file "$tmp" --name "$webdir" \
+ 2> /dev/null
+done
+rm -f "$tmp"
+
+if ((${#files[@]} > 0)); then
+ echo "Leftovers: ${#files[@]}"
+ printf ' %s\n' "${files[@]}"
+ failwith "the above paths were not in any known patterns"
+fi
+
+)
+__ main "$@"
diff --git a/tools/pydocs/src/conf.py b/tools/pydocs/src/conf.py
index fb4dd3cc3f..6de7e3b60a 100644
--- a/tools/pydocs/src/conf.py
+++ b/tools/pydocs/src/conf.py
@@ -1,24 +1,12 @@
+# Copyright (C) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See LICENSE in project root for information.
+
+# Based on code generated by `sphinx-autogen`.
# This file is execfile()d with the current directory set to its
# containing dir.
-#
-# Note that not all possible configuration values are present in this
-# autogenerated file.
-
-# If extensions (or modules to document with autodoc) are in another directory,
-# add these directories to sys.path here. If the directory is relative to the
-# documentation root, use os.path.abspath to make it absolute, like shown here.
-#
-import os
-import sys
-sys.path.insert(0, os.path.abspath("../../../BuildArtifacts/sdk/mmlspark.zip"))
-sys.path.insert(1, os.path.abspath("../pyspark"))
# -- General configuration ------------------------------------------------
-# If your documentation needs a minimal Sphinx version, state it here.
-#
-# needs_sphinx = "1.0"
-
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named "sphinx.ext.*") or your custom
# ones.
@@ -27,24 +15,15 @@
"sphinx.ext.mathjax",
"sphinx.ext.ifconfig",
"sphinx.ext.viewcode",
- "sphinxcontrib.scaladomain",
"sphinx.ext.napoleon"]
# Add any paths that contain templates here, relative to this directory.
-templates_path = ["_templates"]
-
-# The suffix(es) of source filenames.
-# You can specify multiple suffix as a list of string:
-#
-# source_suffix = [".rst", ".md"]
+# templates_path = ["_templates"]
from recommonmark.parser import CommonMarkParser
-source_parsers = {
- ".md": CommonMarkParser,
-}
-
source_suffix = [".rst", ".md"]
+source_parsers = { ".md": CommonMarkParser }
# The master toctree document.
master_doc = "index"
@@ -57,11 +36,8 @@
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
-#
-# The short X.Y version.
-version = "1.0"
-# The full version, including alpha/beta/rc tags.
-release = "1.0.0"
+# version = "1.0" # The short X.Y version.
+# release = "1.0.0" # The full version, including alpha/beta/rc tags.
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
@@ -96,13 +72,13 @@
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
-html_static_path = ["_static"]
+# html_static_path = ["_static"]
# -- Options for HTMLHelp output ------------------------------------------
# Output file base name for HTML help builder.
-htmlhelp_basename = "AzureMLdoc"
+htmlhelp_basename = "MMLSparkdoc"
# -- Options for manual page output ---------------------------------------
@@ -133,7 +109,7 @@
try:
from unittest.mock import MagicMock # python >= 3.3
except ImportError:
- from mock import Mock as MagicMock # older
+ from mock import Mock as MagicMock # older
class Mock(MagicMock):
@classmethod
@@ -142,3 +118,19 @@ def __getattr__(cls, name):
MOCK_MODULES = ["numpy", "pandas"]
sys.modules.update((mod_name, Mock()) for mod_name in MOCK_MODULES)
+
+# -- Setup AutoStructify --------------------------------------------------
+# Use this if we ever want to use markdown pages instead of rst pages.
+# (note: currently, this requires pip-installing "sphinx==1.5.6" because of an
+# obscure bug, see rtfd/recommonmark#73 and sphinx-doc/sphinx#3800)
+# from recommonmark.transform import AutoStructify
+# def mmlspark_doc_resolver(path):
+# return path # github_doc_root + url
+# def setup(app):
+# app.add_config_value("recommonmark_config", {
+# "url_resolver": mmlspark_doc_resolver,
+# "auto_toc_tree_section": "Contents",
+# "enable_eval_rst": True,
+# "enable_auto_doc_ref": True,
+# }, True)
+# app.add_transform(AutoStructify)
diff --git a/tools/pydocs/src/index.rst b/tools/pydocs/src/index.rst
index 864de2ecae..bdfc53c348 100644
--- a/tools/pydocs/src/index.rst
+++ b/tools/pydocs/src/index.rst
@@ -1,10 +1,7 @@
-.. MicrosoftMLSpark documentation master file, created by
- sphinx-quickstart on Fri Apr 7 16:40:35 2017.
- You can adapt this file completely to your liking, but it should at least
- contain the root `toctree` directive.
+.. MicrosoftMLSpark documentation master file
-Welcome to Microsoft Machine Learning for Apache Spark documentation!
-=====================================================================
+Microsoft Machine Learning for Apache Spark
+===========================================
Contents:
diff --git a/tools/pydocs/src/scala.rst b/tools/pydocs/src/scala.rst
index 31d1d6a753..9fa4724749 100644
--- a/tools/pydocs/src/scala.rst
+++ b/tools/pydocs/src/scala.rst
@@ -1,5 +1,4 @@
Scala Library
=============
-The API documentation for the Microsoft ML for Apache Spark can be found in
-`Scala Library `_
+See the `Scala API documentation for MMLSpark <../scala/>`_.
diff --git a/tools/runme/build.sh b/tools/runme/build.sh
index 29d34b83ef..4595907aca 100644
--- a/tools/runme/build.sh
+++ b/tools/runme/build.sh
@@ -22,8 +22,8 @@ _add_to_description() { # fmt arg...
_publish_description() {
# note: this does not depend on "should publish storage"
if [[ ! -e "$BUILD_ARTIFACTS/Build.md" ]]; then return; fi
- _ az storage blob upload --account-name "$MAIN_STORAGE" -c "$STORAGE_CONTAINER" \
- -f "$BUILD_ARTIFACTS/Build.md" -n "$MML_VERSION/Build.md"
+ _ azblob upload -c "$STORAGE_CONTAINER" \
+ -f "$BUILD_ARTIFACTS/Build.md" -n "$MML_VERSION/Build.md"
}
_postprocess_sbt_log() {
@@ -61,6 +61,7 @@ _postprocess_sbt_log() {
_prepare_build_artifacts() {
show section "Preparing Build"
_rm "$BUILD_ARTIFACTS" "$TEST_RESULTS"
+ _reset_build_info
_ mkdir -p "$BUILD_ARTIFACTS/sdk" "$TEST_RESULTS"
_ cp -a "$BASEDIR/LICENSE" "$BUILD_ARTIFACTS"
_ cp -a "$BASEDIR/LICENSE" "$BUILD_ARTIFACTS/sdk"
@@ -106,10 +107,9 @@ _sbt_build() {
cd "$owd"
}
-_upload_to_storage() { # name, pkgdir, container
+_upload_package_to_storage() { # name, pkgdir, container
show section "Publishing $1 Package"
- _ az storage blob upload-batch --account-name "$MAIN_STORAGE" \
- --source "$BUILD_ARTIFACTS/packages/$2" --destination "$3"
+ _ azblob upload-batch --source "$BUILD_ARTIFACTS/packages/$2" --destination "$3"
case "$1" in
( "Maven" )
_add_to_description '* **Maven** package uploaded, use `%s` and `%s`.\n' \
@@ -177,11 +177,31 @@ _publish_to_demo_cluster() {
_add_to_description '* Demo cluster updated.\n'
}
+_publish_docs() {
+ @ "../pydocs/publish"
+ _add_to_description '* Documentation [uploaded](%s).\n' "$DOCS_URL/$MML_VERSION"
+ if [[ "$MML_LATEST" = "yes" ]]; then
+ # there is no api for copying to a different path, so re-do the whole thing,
+ # but first, delete any paths that are not included in the new contents
+ local d f
+ for d in "scala" "pyspark"; do
+ __ azblob list --container-name "$DOCS_CONTAINER" --prefix "$d/" -o tsv | cut -f 3
+ done | while read -r f; do
+ if [[ -e "$BUILD_ARTIFACTS/docs/$f" ]]; then continue; fi
+ echo -n "deleting $f..."
+ if collect_log=1 __ azblob delete --container-name "$DOCS_CONTAINER" -n "$f" > /dev/null
+ then echo " done"; else echo " failed"; failwith "deletion of $f failed"; fi
+ done
+ @ "../pydocs/publish" --top
+ _add_to_description '* Also copied as [toplevel documentation](%s).\n' "$DOCS_URL"
+ fi
+}
+
_publish_to_dockerhub() {
@ "../docker/build-docker"
local itag="mmlspark:latest" otag otags
otag="microsoft/mmlspark:$MML_VERSION"; otag="${otag//+/_}"; otags=("$otag")
- if [[ "$MML_VERSION" = *([0-9.]) ]]; then otags+=( "microsoft/mmlspark:latest" ); fi
+ if [[ "$MML_LATEST" = "yes" ]]; then otags+=( "microsoft/mmlspark:latest" ); fi
show section "Pushing to Dockerhub as ${otags[*]}"
show - "Image info:"
local info="$(docker images "$itag")"
@@ -234,8 +254,7 @@ _upload_artifacts_to_storage() {
txt="${txt//<=<=fill-in-url=>=>/$STORAGE_URL/$MML_VERSION}"
echo "$txt" > "$tmp/$(basename "$f")"
done
- _ az storage blob upload-batch --account-name "$MAIN_STORAGE" \
- --source "$tmp" --destination "$STORAGE_CONTAINER/$MML_VERSION"
+ _ azblob upload-batch --source "$tmp" --destination "$STORAGE_CONTAINER/$MML_VERSION"
_rm "$tmp"
_add_to_description \
'* **HDInsight**: Copy the link to %s to setup this build on a cluster.\n' \
@@ -251,16 +270,21 @@ _full_build() {
_sbt_build
_ ln -sf "$(realpath --relative-to="$HOME/bin" "$TOOLSDIR/bin/mml-exec")" \
"$HOME/bin"
+ @ "../pydocs/build"
+ @ "../pip/generate-pip"
if [[ "$PUBLISH" != "none" ]]; then
_ az account show > /dev/null # fail if not logged-in to azure
fi
- should publish maven && _upload_to_storage "Maven" "m2" "$MAVEN_CONTAINER"
+ # basic publish steps that happen before testing
+ should publish maven && _upload_package_to_storage "Maven" "m2" "$MAVEN_CONTAINER"
+ should publish pip && _upload_package_to_storage "PIP" "pip" "$PIP_CONTAINER"
+ should publish storage && _upload_artifacts_to_storage
+ # tests
should test python && @ "../pytests/auto-tests"
should test python && @ "../pytests/notebook-tests"
- should publish pip && @ "../pip/generate-pip.sh"
- should publish pip && _upload_to_storage "PIP" "pip" "$PIP_CONTAINER"
- should publish storage && _upload_artifacts_to_storage
should test e2e && _e2e_tests
+ # publish steps that should happen only for successful tests
+ should publish docs && _publish_docs
should publish demo && _publish_to_demo_cluster
should publish docker && _publish_to_dockerhub
_upload_artifacts_to_VSTS
diff --git a/tools/runme/utils.sh b/tools/runme/utils.sh
index 6dd081ceb6..f95d1bc0b0 100644
--- a/tools/runme/utils.sh
+++ b/tools/runme/utils.sh
@@ -341,6 +341,13 @@ get_runtime_hash() {
echo "${hash%% *}"
}
+# ---< azblob verb arg... >-----------------------------------------------------
+# Same as "az storage blob --account-name $MAIN_STORAGE arg..."
+azblob() {
+ local verb="$1"; shift
+ az storage blob "$verb" --account-name "$MAIN_STORAGE" "$@"
+}
+
# ------------------------------------------------------------------------------
# Internal functions follow
@@ -369,11 +376,12 @@ _parse_TESTS() { _parse_tags TESTS _test_info; }
_parse_PUBLISH() { _parse_tags PUBLISH _publish_info; }
# Defines $MML_VERSION and $MML_BUILD_INFO
+_used_preexisting_version=0
_set_build_info() {
- local info version
+ local info version is_latest
# make it possible to avoid running git
- if [[ ! -z "$MML_BUILD_INFO" && ! -z "$MML_VERSION" ]]; then
- info="$MML_BUILD_INFO"; version="$MML_VERSION"
+ if [[ -n "$MML_BUILD_INFO" && -n "$MML_VERSION" && -n "$MML_LATEST" ]]; then
+ info="$MML_BUILD_INFO"; version="$MML_VERSION"; is_latest="$MML_LATEST"
else
local owd="$PWD"; cd "$BASEDIR"
# sanity checks for version tags
@@ -381,9 +389,13 @@ _set_build_info() {
for t in $(git tag -l); do
if [[ ! "$t" =~ $rx ]]; then failwith "found a bad tag name \"$t\""; fi
done
+ local tag="$(git describe --abbrev=0)" # needed also for is_latest
+ # MML_VERSION
if [[ -r "$BUILD_ARTIFACTS/version" ]]; then
- # if there is a built version, use it, so that we don't get a new
- # version after commits are made
+ # if there is a built version, use it, so that we don't get a new version
+ # after commits are made (but it'll get reset in ./runme since it
+ # recreates the $BUILD_ARTIFACTS directory)
+ _used_preexisting_version=1
version="$(< "$BUILD_ARTIFACTS/version")"
else
# generate a version string (that works for pip wheels too) as follows:
@@ -391,7 +403,6 @@ _set_build_info() {
# (note: prefer origin/master since VSTS doesn't update master)
local branch="$(git merge-base HEAD refs/remotes/origin/master 2> /dev/null \
|| git merge-base HEAD refs/heads/master)"
- local tag="$(git describe --abbrev=0)"
local tagref="$(git rev-parse "refs/tags/$tag^{commit}")"
# 1. main version, taken from the most recent version tag
# (that's all if we're building this tagged version)
@@ -411,6 +422,7 @@ _set_build_info() {
if [[ "$BUILDMODE" != "server" ]]; then version+=".local"; fi
if ! git diff-index --quiet HEAD --; then version+=".dirty"; fi
fi
+ # MML_BUILD_INFO
if [[ "$BUILDMODE" != "server" || "$AGENT_ID" = "" ]]; then
info="Local build: ${USERNAME:-$USER} ${BASEDIR:-$PWD}"
local line
@@ -433,10 +445,23 @@ _set_build_info() {
info+="; $BUILD_DEFINITIONNAME#$BUILD_BUILDNUMBER"
fi
info="$version: $info"
+ # MML_LATEST
+ # "yes" when building an exact version which is the latest on master
+ local latest="$(git describe --abbrev=0 master)"
+ if [[ "$version" = "${tag#v}" && "$tag" = "$latest" ]]
+ then is_latest="yes"; else is_latest="no"; fi
+ #
cd "$owd"
fi
defvar -x MML_VERSION "$version"
defvar -x MML_BUILD_INFO "$info"
+ defvar MML_LATEST "$is_latest"
+}
+# To be called when re-creating $BUILD_ARTIFACTS
+_reset_build_info() {
+ if ((!_used_preexisting_version)); then return; fi
+ unset MML_BUILD_INFO MML_VERSION MML_LATEST; _used_preexisting_version=0
+ _set_build_info
}
# Parse $INSTALLATIONS info