Skip to content

Commit e49d196

Browse files
committed
Add pkgpanda.util.MessageLogger
* MessageLogger provides a set of methods that improve logging when ran in the context of TeamCity. * The first type of method is `scope`, which allows the user to define a "block" where all other log messages created within the scope of the context managed method will be "in" that block and will be collapsed when viewing the log in TeamCity. * The second type of methods set specific log levels on the messages being logged, and in the case of error allows providing some error details. * When any of these methods are used in a context determined not to be TeamCity (determined by `is_running_uder_teamcity()`) the scope messages will not be printed, and the log level messages will be simplified removing the teamcity specific elements Refactorings ============ 1. create new package `teamcity-messages`. Rather than teamcity-messages only being added by tox, it is now a primary dependency of pkgpanda and dcos-image. 2. Add usage of new "scope" and "level" messages into pkgpanda and dcos-image for builds
1 parent a9a58fd commit e49d196

File tree

15 files changed

+217
-100
lines changed

15 files changed

+217
-100
lines changed

build_teamcity

+24
Original file line numberDiff line numberDiff line change
@@ -2,9 +2,28 @@
22
set -x
33
set -o errexit -o pipefail
44

5+
function now { date +"%Y-%m-%dT%H:%M:%S.000" | tr -d '\n' ;}
6+
function _scope_msg() {
7+
# Here we want to turn off `-x` that is present when running the bash script
8+
# before we echo the message to teamcity, if we do not do this a "duplicate" echo will
9+
# be printed to stderr and teamcity will error trying to parse the message because
10+
# the format is invalid.
11+
set +x
12+
echo "##teamcity[block$1 timestamp='$(now)' name='$2']"
13+
# Turn `-x` back on now that we're done sending the message to teamcity
14+
set -x
15+
}
16+
function _scope_opened() {
17+
_scope_msg "Opened" $1
18+
}
19+
function _scope_closed() {
20+
_scope_msg "Closed" $1
21+
}
22+
523
# Fail quickly if docker daemon is not up
624
systemctl status docker
725

26+
_scope_opened "cleanup"
827
# cleanup from previous builds
928
# *active.json and *.bootstrap.tar.xz must be cleaned up, otherwise
1029
# Teamcity starts picking up artifacts from previous builds.
@@ -20,7 +39,9 @@ rm -f CHANNEL_NAME
2039
rm -rf build/env
2140
rm -f dcos_generate_config*.sh
2241
rm -rf wheelhouse/
42+
_scope_closed "cleanup"
2343

44+
_scope_opened "setup"
2445
# Force Python stdout/err to be unbuffered.
2546
export PYTHONUNBUFFERED="notemtpy"
2647

@@ -42,14 +63,17 @@ CHANNEL_NAME=testing/$TAG
4263
echo tag: "$TAG"
4364
echo channel: "$CHANNEL_NAME"
4465

66+
set +x
4567
echo "##teamcity[setParameter name='env.CHANNEL_NAME' value='$CHANNEL_NAME']"
4668
echo "##teamcity[setParameter name='env.TAG' value='$TAG']"
69+
set -x
4770

4871
cp config/dcos-release.config.yaml dcos-release.config.yaml
4972

5073
DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
5174

5275
"$DIR"/prep_teamcity
76+
_scope_closed "setup"
5377

5478
release create $TAG $TAG
5579

gen/installer/aws.py

+3-2
Original file line numberDiff line numberDiff line change
@@ -15,6 +15,7 @@
1515
import pkgpanda.util
1616
import release
1717
import release.storage
18+
from pkgpanda.util import logger
1819

1920
aws_region_names = [
2021
{
@@ -481,8 +482,8 @@ def gen_templates(arguments):
481482
slave_cloud_config=variant_cloudconfig['slave'],
482483
slave_public_cloud_config=variant_cloudconfig['slave_public'])
483484

484-
print("Validating CloudFormation")
485-
validate_cf(cloudformation)
485+
with logger.scope("Validating CloudFormation"):
486+
validate_cf(cloudformation)
486487

487488
return gen.Bunch({
488489
'cloudformation': cloudformation,

gen/installer/bash.py

+19-17
Original file line numberDiff line numberDiff line change
@@ -10,7 +10,7 @@
1010
import gen.installer.util as util
1111
import gen.template
1212
import pkgpanda.util
13-
13+
from pkgpanda.util import logger
1414

1515
file_template = """mkdir -p `dirname {filename}`
1616
cat <<'EOF' > "{filename}"
@@ -659,26 +659,28 @@ def do_create(tag, build_name, reproducible_artifact_path, commit, variant_argum
659659
660660
Outputs the generated dcos_generate_config.sh as it's artifacts.
661661
"""
662-
663662
# TODO(cmaloney): Build installers in parallel.
664663
# Variants are sorted for stable ordering.
665-
for variant, bootstrap_info in sorted(variant_arguments.items(), key=lambda kv: pkgpanda.util.variant_str(kv[0])):
666-
print("Building installer for variant:", pkgpanda.util.variant_name(variant))
667-
bootstrap_installer_name = '{}installer'.format(pkgpanda.util.variant_prefix(variant))
668-
bootstrap_installer_id = all_bootstraps[bootstrap_installer_name]
669-
670-
installer_filename = make_installer_docker(variant, bootstrap_info['bootstrap_id'], bootstrap_installer_id)
671-
672-
yield {
673-
'channel_path': 'dcos_generate_config.{}sh'.format(pkgpanda.util.variant_prefix(variant)),
674-
'local_path': installer_filename
675-
}
664+
for variant, bootstrap_info in sorted(variant_arguments.items(),
665+
key=lambda kv: pkgpanda.util.variant_str(kv[0])):
666+
with logger.scope("Building installer for variant: ".format(pkgpanda.util.variant_name(variant))):
667+
bootstrap_installer_name = '{}installer'.format(pkgpanda.util.variant_prefix(variant))
668+
bootstrap_installer_id = all_bootstraps[bootstrap_installer_name]
669+
670+
installer_filename = make_installer_docker(variant, bootstrap_info['bootstrap_id'],
671+
bootstrap_installer_id)
672+
673+
yield {
674+
'channel_path': 'dcos_generate_config.{}sh'.format(pkgpanda.util.variant_prefix(variant)),
675+
'local_path': installer_filename
676+
}
676677

677678
# Build dcos-launch
678679
# TODO(cmaloney): This really doesn't belong to here, but it's the best place it fits for now.
679680
# dcos-launch works many places which aren't bash / on-premise installers.
680681
# It also isn't dependent on the "reproducible" artifacts at all. Just the commit...
681-
yield {
682-
'channel_path': 'dcos-launch',
683-
'local_path': make_dcos_launch()
684-
}
682+
with logger.scope("building dcos-launch"):
683+
yield {
684+
'channel_path': 'dcos-launch',
685+
'local_path': make_dcos_launch()
686+
}

packages/dcos-image-deps/buildinfo.json

+3-2
Original file line numberDiff line numberDiff line change
@@ -12,8 +12,9 @@
1212
"python-pyyaml",
1313
"python-requests",
1414
"python-retrying",
15-
"six"
16-
],
15+
"six",
16+
"teamcity-messages"
17+
],
1718
"sources": {
1819
"aiohttp": {
1920
"kind": "url_extract",

packages/pkgpanda-api/buildinfo.json

+2-1
Original file line numberDiff line numberDiff line change
@@ -2,6 +2,7 @@
22
"requires": [
33
"dcos-image",
44
"python",
5-
"python-gunicorn"
5+
"python-gunicorn",
6+
"teamcity-messages"
67
]
78
}

packages/pytest/build

+1-3
Original file line numberDiff line numberDiff line change
@@ -3,6 +3,4 @@ source /opt/mesosphere/environment.export
33
export LIB_INSTALL_DIR="$PKG_PATH/lib/python3.5/site-packages"
44
mkdir -p "$LIB_INSTALL_DIR"
55

6-
for package in pytest teamcity-messages; do
7-
pip3 install --no-deps --install-option="--prefix=$PKG_PATH" --root=/ /pkg/src/$package
8-
done
6+
pip3 install --no-deps --install-option="--prefix=$PKG_PATH" --root=/ /pkg/src/$PKG_NAME

packages/pytest/buildinfo.json

+5-12
Original file line numberDiff line numberDiff line change
@@ -1,15 +1,8 @@
11
{
2-
"requires": ["python"],
3-
"sources": {
4-
"pytest": {
5-
"kind": "url_extract",
6-
"url": "https://pypi.python.org/packages/a0/2f/076c816e2402c4911ccee4b93ba0475145b7cffd0320ca8efa0add7c469c/pytest-3.0.3.tar.gz",
7-
"sha1": "e7fb899ecac031a84e66f991289203d3c1f7c6cb"
8-
},
9-
"teamcity-messages": {
10-
"kind": "url_extract",
11-
"url": "https://pypi.python.org/packages/69/0f/7ba789bcc0ad43b7d88582722b2dbeb0595d5f9b66e55fbc3e01bb771459/teamcity-messages-1.19.tar.gz",
12-
"sha1": "6b7fe243a3db67577f3b5b3134850256e3df5aa2"
13-
}
2+
"requires": ["python", "teamcity-messages"],
3+
"single_source": {
4+
"kind": "url_extract",
5+
"url": "https://pypi.python.org/packages/a0/2f/076c816e2402c4911ccee4b93ba0475145b7cffd0320ca8efa0add7c469c/pytest-3.0.3.tar.gz",
6+
"sha1": "e7fb899ecac031a84e66f991289203d3c1f7c6cb"
147
}
158
}

packages/teamcity-messages/build

+6
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,6 @@
1+
#!/bin/bash
2+
source /opt/mesosphere/environment.export
3+
export LIB_INSTALL_DIR="$PKG_PATH/lib/python3.5/site-packages"
4+
mkdir -p "$LIB_INSTALL_DIR"
5+
6+
pip3 install --no-deps --install-option="--prefix=$PKG_PATH" --root=/ /pkg/src/$PKG_NAME
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,8 @@
1+
{
2+
"requires": ["python"],
3+
"single_source": {
4+
"kind": "url_extract",
5+
"url": "https://pypi.python.org/packages/69/0f/7ba789bcc0ad43b7d88582722b2dbeb0595d5f9b66e55fbc3e01bb771459/teamcity-messages-1.19.tar.gz",
6+
"sha1": "6b7fe243a3db67577f3b5b3134850256e3df5aa2"
7+
}
8+
}

pkgpanda/build/__init__.py

+30-26
Original file line numberDiff line numberDiff line change
@@ -20,8 +20,8 @@
2020
from pkgpanda.constants import RESERVED_UNIT_NAMES
2121
from pkgpanda.exceptions import FetchError, PackageError, ValidationError
2222
from pkgpanda.util import (check_forbidden_services, download_atomic, load_json,
23-
load_string, make_file, make_tar, rewrite_symlinks,
24-
write_json, write_string)
23+
load_string, logger, make_file, make_tar,
24+
rewrite_symlinks, write_json, write_string)
2525

2626

2727
class BuildError(Exception):
@@ -713,13 +713,13 @@ def visit_packages(package_tuples):
713713
else:
714714
package_sets = package_store.get_all_package_sets()
715715

716-
# Build all required packages for all tree variants.
717-
for package_set in package_sets:
718-
visit_packages(package_set.all_packages)
716+
with logger.scope("resolve package graph"):
717+
# Build all required packages for all tree variants.
718+
for package_set in package_sets:
719+
visit_packages(package_set.all_packages)
719720

720721
built_packages = dict()
721722
for (name, variant) in build_order:
722-
print("Building: {} variant {}".format(name, pkgpanda.util.variant_str(variant)))
723723
built_packages.setdefault(name, dict())
724724

725725
# Run the build, store the built package path for later use.
@@ -732,16 +732,16 @@ def visit_packages(package_tuples):
732732

733733
# Build bootstrap tarballs for all tree variants.
734734
def make_bootstrap(package_set):
735-
print("Making bootstrap variant:", pkgpanda.util.variant_name(package_set.variant))
736-
package_paths = list()
737-
for name, pkg_variant in package_set.bootstrap_packages:
738-
package_paths.append(built_packages[name][pkg_variant])
735+
with logger.scope("Making bootstrap variant: {}".format(pkgpanda.util.variant_name(package_set.variant))):
736+
package_paths = list()
737+
for name, pkg_variant in package_set.bootstrap_packages:
738+
package_paths.append(built_packages[name][pkg_variant])
739739

740-
if mkbootstrap:
741-
return make_bootstrap_tarball(
742-
package_store,
743-
list(sorted(package_paths)),
744-
package_set.variant)
740+
if mkbootstrap:
741+
return make_bootstrap_tarball(
742+
package_store,
743+
list(sorted(package_paths)),
744+
package_set.variant)
745745

746746
# Build bootstraps and and package lists for all variants.
747747
# TODO(cmaloney): Allow distinguishing between "build all" and "build the default one".
@@ -826,8 +826,13 @@ def get_build_ids(self):
826826

827827

828828
def build(package_store, name, variant, clean_after_build, recursive=False):
829+
msg = "Building package {} variant {}".format(name, pkgpanda.util.variant_name(variant))
830+
with logger.scope(msg):
831+
return _build(package_store, name, variant, clean_after_build, recursive)
832+
833+
834+
def _build(package_store, name, variant, clean_after_build, recursive):
829835
assert isinstance(package_store, PackageStore)
830-
print("Building package {} variant {}".format(name, pkgpanda.util.variant_str(variant)))
831836
tmpdir = tempfile.TemporaryDirectory(prefix="pkgpanda_repo")
832837
repository = Repository(tmpdir.name)
833838

@@ -1230,17 +1235,16 @@ def clean():
12301235
# TODO(cmaloney): Move to an RAII wrapper.
12311236
check_call(['rm', '-rf', install_dir])
12321237

1233-
print("Building package tarball")
1234-
1235-
# Check for forbidden services before packaging the tarball:
1236-
try:
1237-
check_forbidden_services(cache_abs("result"), RESERVED_UNIT_NAMES)
1238-
except ValidationError as ex:
1239-
raise BuildError("Package validation failed: {}".format(ex))
1238+
with logger.scope("Build package tarball"):
1239+
# Check for forbidden services before packaging the tarball:
1240+
try:
1241+
check_forbidden_services(cache_abs("result"), RESERVED_UNIT_NAMES)
1242+
except ValidationError as ex:
1243+
raise BuildError("Package validation failed: {}".format(ex))
12401244

1241-
# TODO(cmaloney): Updating / filling last_build should be moved out of
1242-
# the build function.
1243-
write_string(package_store.get_last_build_filename(name, variant), str(pkg_id))
1245+
# TODO(cmaloney): Updating / filling last_build should be moved out of
1246+
# the build function.
1247+
write_string(package_store.get_last_build_filename(name, variant), str(pkg_id))
12441248

12451249
# Bundle the artifacts into the pkgpanda package
12461250
tmp_name = pkg_path + "-tmp.tar.xz"

pkgpanda/build/src_fetchers.py

+3-3
Original file line numberDiff line numberDiff line change
@@ -4,7 +4,7 @@
44
from subprocess import CalledProcessError, check_call, check_output
55

66
from pkgpanda.exceptions import ValidationError
7-
from pkgpanda.util import download_atomic, sha1
7+
from pkgpanda.util import download_atomic, logger, sha1
88

99

1010
# Ref must be a git sha-1. We then pass it through get_sha1 to make
@@ -104,8 +104,8 @@ def checkout_to(self, directory):
104104
except Exception as ex:
105105
raise ValidationError("Unable to find sha1 of ref_origin {}: {}".format(self.ref_origin, ex))
106106
if self.ref != origin_commit:
107-
print(
108-
"WARNING: Current ref doesn't match the ref origin. "
107+
logger.warning(
108+
"Current ref doesn't match the ref origin. "
109109
"Package ref should probably be updated to pick up "
110110
"new changes to the code:" +
111111
" Current: {}, Origin: {}".format(self.ref,

0 commit comments

Comments
 (0)