From b1747d20b897fee59e6ae4b5bb5d2f0a8ab7df17 Mon Sep 17 00:00:00 2001 From: Ben Andre Date: Fri, 22 Dec 2017 10:22:53 -0700 Subject: [PATCH 01/31] Squashed 'manage_externals/' content from commit d29760f7 git-subtree-dir: manage_externals git-subtree-split: d29760f7afc039f2c8a1bd13a02886de43b26454 --- .dir_locals.el | 12 + .github/ISSUE_TEMPLATE.md | 6 + .github/PULL_REQUEST_TEMPLATE.md | 17 + .gitignore | 14 + .travis.yml | 32 + LICENSE.txt | 34 + README.md | 166 +++ README_FIRST | 54 + checkout_externals | 42 + manic/__init__.py | 9 + manic/checkout.py | 295 ++++ manic/externals_description.py | 375 +++++ manic/externals_status.py | 126 ++ manic/global_constants.py | 14 + manic/repository.py | 73 + manic/repository_factory.py | 29 + manic/repository_git.py | 668 +++++++++ manic/repository_svn.py | 232 ++++ manic/sourcetree.py | 296 ++++ manic/utils.py | 237 ++++ test/.coveragerc | 7 + test/.gitignore | 7 + test/.pylint.rc | 426 ++++++ test/Makefile | 118 ++ test/README.md | 77 ++ test/doc/.gitignore | 2 + test/doc/Makefile | 20 + test/doc/conf.py | 172 +++ test/doc/develop.rst | 202 +++ test/doc/index.rst | 22 + test/doc/testing.rst | 123 ++ test/repos/container.git/HEAD | 1 + test/repos/container.git/config | 6 + test/repos/container.git/description | 1 + test/repos/container.git/info/exclude | 6 + .../41/1de5d96ee418c1c55f3e96e6e6e7c06bb95801 | Bin 0 -> 133 bytes .../71/5b8f3e4afe1802a178e1d603af404ba45d59de | Bin 0 -> 136 bytes .../b0/f87705e2b9601cb831878f3d51efa78b910d7b | Bin 0 -> 89 bytes .../f9/e08370a737e941de6f6492e3f427c2ef4c1a03 | Bin 0 -> 81 bytes test/repos/container.git/refs/heads/master | 1 + test/repos/error/readme.txt | 3 + test/repos/mixed-cont-ext.git/HEAD | 1 + test/repos/mixed-cont-ext.git/config | 6 + test/repos/mixed-cont-ext.git/description | 1 + test/repos/mixed-cont-ext.git/info/exclude | 6 + .../06/ea30b03ffa2f8574705f8b9583f7ca7e2dccf7 | Bin 0 -> 136 bytes .../37/f0e70b609adc90f4c09ee21d82ed1d79c81d69 | Bin 0 -> 89 bytes .../41/1de5d96ee418c1c55f3e96e6e6e7c06bb95801 | Bin 0 -> 133 bytes .../fd/15a5ad5204356229c60a831d2a8120a43ac901 | 2 + .../mixed-cont-ext.git/refs/heads/master | 1 + test/repos/simple-ext-fork.git/HEAD | 1 + test/repos/simple-ext-fork.git/config | 8 + test/repos/simple-ext-fork.git/description | 1 + test/repos/simple-ext-fork.git/info/exclude | 6 + .../00/fd13e76189f9134b0506b4b8ed3172723b467f | Bin 0 -> 89 bytes .../0b/15e8af3d4615b42314216efeae3fff184046a8 | Bin 0 -> 89 bytes .../0b/67df4e7e8e6e1c6e401542738b352d18744677 | Bin 0 -> 167 bytes .../11/a76e3d9a67313dec7ce1230852ab5c86352c5c | 2 + .../16/5506a7408a482f50493434e13fffeb44af893f | Bin 0 -> 89 bytes .../32/7e97d86e941047d809dba58f2804740c6c30cf | Bin 0 -> 89 bytes .../36/418b4e5665956a90725c9a1b5a8e551c5f3d48 | Bin 0 -> 159 bytes .../3d/7099c35404ae6c8640ce263b38bef06e98cc26 | 2 + .../3d/ec1fdf8e2f5edba28148c5db2fe8d7a842360b | 2 + .../41/1de5d96ee418c1c55f3e96e6e6e7c06bb95801 | Bin 0 -> 133 bytes .../56/175e017ad38bf3d33d74b6bd7c74624b28466a | Bin 0 -> 89 bytes .../67/136e5ab4d5c1c65d10c8048763b96b0e53c1d6 | Bin 0 -> 165 bytes .../7b/0bd630ac13865735a1dff3437a137d8ab50663 | Bin 0 -> 119 bytes .../88/cf20868e0cc445f5642a480ed034c71e0d7e9f | 2 + .../8d/2b3b35126224c975d23f109aa1e3cbac452989 | 2 + .../9b/75494003deca69527bb64bcaa352e801611dd2 | Bin 0 -> 138 bytes .../a2/2a5da9119328ea6d693f88861457c07e14ac04 | 1 + .../b9/3737be3ea6b19f6255983748a0a0f4d622f936 | Bin 0 -> 89 bytes .../c5/32bc8fde96fa63103a52057f0baffcc9f00c6b | 1 + .../c5/b315915742133dbdfbeed0753e481b55c1d364 | 1 + .../f2/68d4e56d067da9bd1d85e55bdc40a8bd2b0bca | 1 + test/repos/simple-ext-fork.git/packed-refs | 5 + .../simple-ext-fork.git/refs/heads/feature2 | 1 + .../refs/tags/forked-feature-v1 | 1 + test/repos/simple-ext.git/HEAD | 1 + test/repos/simple-ext.git/config | 6 + test/repos/simple-ext.git/description | 1 + test/repos/simple-ext.git/info/exclude | 6 + .../00/fd13e76189f9134b0506b4b8ed3172723b467f | Bin 0 -> 89 bytes .../0b/15e8af3d4615b42314216efeae3fff184046a8 | Bin 0 -> 89 bytes .../11/a76e3d9a67313dec7ce1230852ab5c86352c5c | 2 + .../36/418b4e5665956a90725c9a1b5a8e551c5f3d48 | Bin 0 -> 159 bytes .../41/1de5d96ee418c1c55f3e96e6e6e7c06bb95801 | Bin 0 -> 133 bytes .../9b/75494003deca69527bb64bcaa352e801611dd2 | Bin 0 -> 138 bytes .../a2/2a5da9119328ea6d693f88861457c07e14ac04 | 1 + .../c5/b315915742133dbdfbeed0753e481b55c1d364 | 1 + test/repos/simple-ext.git/refs/heads/feature2 | 1 + test/repos/simple-ext.git/refs/heads/master | 1 + test/repos/simple-ext.git/refs/tags/tag1 | 1 + test/requirements.txt | 5 + test/test_sys_checkout.py | 1219 +++++++++++++++++ test/test_unit_externals_description.py | 343 +++++ test/test_unit_externals_status.py | 299 ++++ test/test_unit_repository.py | 154 +++ test/test_unit_repository_git.py | 1013 ++++++++++++++ test/test_unit_repository_svn.py | 489 +++++++ test/test_unit_utils.py | 278 ++++ 101 files changed, 7800 insertions(+) create mode 100644 .dir_locals.el create mode 100644 .github/ISSUE_TEMPLATE.md create mode 100644 .github/PULL_REQUEST_TEMPLATE.md create mode 100644 .gitignore create mode 100644 .travis.yml create mode 100644 LICENSE.txt create mode 100644 README.md create mode 100644 README_FIRST create mode 100755 checkout_externals create mode 100644 manic/__init__.py create mode 100755 manic/checkout.py create mode 100644 manic/externals_description.py create mode 100644 manic/externals_status.py create mode 100644 manic/global_constants.py create mode 100644 manic/repository.py create mode 100644 manic/repository_factory.py create mode 100644 manic/repository_git.py create mode 100644 manic/repository_svn.py create mode 100644 manic/sourcetree.py create mode 100644 manic/utils.py create mode 100644 test/.coveragerc create mode 100644 test/.gitignore create mode 100644 test/.pylint.rc create mode 100644 test/Makefile create mode 100644 test/README.md create mode 100644 test/doc/.gitignore create mode 100644 test/doc/Makefile create mode 100644 test/doc/conf.py create mode 100644 test/doc/develop.rst create mode 100644 test/doc/index.rst create mode 100644 test/doc/testing.rst create mode 100644 test/repos/container.git/HEAD create mode 100644 test/repos/container.git/config create mode 100644 test/repos/container.git/description create mode 100644 test/repos/container.git/info/exclude create mode 100644 test/repos/container.git/objects/41/1de5d96ee418c1c55f3e96e6e6e7c06bb95801 create mode 100644 test/repos/container.git/objects/71/5b8f3e4afe1802a178e1d603af404ba45d59de create mode 100644 test/repos/container.git/objects/b0/f87705e2b9601cb831878f3d51efa78b910d7b create mode 100644 test/repos/container.git/objects/f9/e08370a737e941de6f6492e3f427c2ef4c1a03 create mode 100644 test/repos/container.git/refs/heads/master create mode 100644 test/repos/error/readme.txt create mode 100644 test/repos/mixed-cont-ext.git/HEAD create mode 100644 test/repos/mixed-cont-ext.git/config create mode 100644 test/repos/mixed-cont-ext.git/description create mode 100644 test/repos/mixed-cont-ext.git/info/exclude create mode 100644 test/repos/mixed-cont-ext.git/objects/06/ea30b03ffa2f8574705f8b9583f7ca7e2dccf7 create mode 100644 test/repos/mixed-cont-ext.git/objects/37/f0e70b609adc90f4c09ee21d82ed1d79c81d69 create mode 100644 test/repos/mixed-cont-ext.git/objects/41/1de5d96ee418c1c55f3e96e6e6e7c06bb95801 create mode 100644 test/repos/mixed-cont-ext.git/objects/fd/15a5ad5204356229c60a831d2a8120a43ac901 create mode 100644 test/repos/mixed-cont-ext.git/refs/heads/master create mode 100644 test/repos/simple-ext-fork.git/HEAD create mode 100644 test/repos/simple-ext-fork.git/config create mode 100644 test/repos/simple-ext-fork.git/description create mode 100644 test/repos/simple-ext-fork.git/info/exclude create mode 100644 test/repos/simple-ext-fork.git/objects/00/fd13e76189f9134b0506b4b8ed3172723b467f create mode 100644 test/repos/simple-ext-fork.git/objects/0b/15e8af3d4615b42314216efeae3fff184046a8 create mode 100644 test/repos/simple-ext-fork.git/objects/0b/67df4e7e8e6e1c6e401542738b352d18744677 create mode 100644 test/repos/simple-ext-fork.git/objects/11/a76e3d9a67313dec7ce1230852ab5c86352c5c create mode 100644 test/repos/simple-ext-fork.git/objects/16/5506a7408a482f50493434e13fffeb44af893f create mode 100644 test/repos/simple-ext-fork.git/objects/32/7e97d86e941047d809dba58f2804740c6c30cf create mode 100644 test/repos/simple-ext-fork.git/objects/36/418b4e5665956a90725c9a1b5a8e551c5f3d48 create mode 100644 test/repos/simple-ext-fork.git/objects/3d/7099c35404ae6c8640ce263b38bef06e98cc26 create mode 100644 test/repos/simple-ext-fork.git/objects/3d/ec1fdf8e2f5edba28148c5db2fe8d7a842360b create mode 100644 test/repos/simple-ext-fork.git/objects/41/1de5d96ee418c1c55f3e96e6e6e7c06bb95801 create mode 100644 test/repos/simple-ext-fork.git/objects/56/175e017ad38bf3d33d74b6bd7c74624b28466a create mode 100644 test/repos/simple-ext-fork.git/objects/67/136e5ab4d5c1c65d10c8048763b96b0e53c1d6 create mode 100644 test/repos/simple-ext-fork.git/objects/7b/0bd630ac13865735a1dff3437a137d8ab50663 create mode 100644 test/repos/simple-ext-fork.git/objects/88/cf20868e0cc445f5642a480ed034c71e0d7e9f create mode 100644 test/repos/simple-ext-fork.git/objects/8d/2b3b35126224c975d23f109aa1e3cbac452989 create mode 100644 test/repos/simple-ext-fork.git/objects/9b/75494003deca69527bb64bcaa352e801611dd2 create mode 100644 test/repos/simple-ext-fork.git/objects/a2/2a5da9119328ea6d693f88861457c07e14ac04 create mode 100644 test/repos/simple-ext-fork.git/objects/b9/3737be3ea6b19f6255983748a0a0f4d622f936 create mode 100644 test/repos/simple-ext-fork.git/objects/c5/32bc8fde96fa63103a52057f0baffcc9f00c6b create mode 100644 test/repos/simple-ext-fork.git/objects/c5/b315915742133dbdfbeed0753e481b55c1d364 create mode 100644 test/repos/simple-ext-fork.git/objects/f2/68d4e56d067da9bd1d85e55bdc40a8bd2b0bca create mode 100644 test/repos/simple-ext-fork.git/packed-refs create mode 100644 test/repos/simple-ext-fork.git/refs/heads/feature2 create mode 100644 test/repos/simple-ext-fork.git/refs/tags/forked-feature-v1 create mode 100644 test/repos/simple-ext.git/HEAD create mode 100644 test/repos/simple-ext.git/config create mode 100644 test/repos/simple-ext.git/description create mode 100644 test/repos/simple-ext.git/info/exclude create mode 100644 test/repos/simple-ext.git/objects/00/fd13e76189f9134b0506b4b8ed3172723b467f create mode 100644 test/repos/simple-ext.git/objects/0b/15e8af3d4615b42314216efeae3fff184046a8 create mode 100644 test/repos/simple-ext.git/objects/11/a76e3d9a67313dec7ce1230852ab5c86352c5c create mode 100644 test/repos/simple-ext.git/objects/36/418b4e5665956a90725c9a1b5a8e551c5f3d48 create mode 100644 test/repos/simple-ext.git/objects/41/1de5d96ee418c1c55f3e96e6e6e7c06bb95801 create mode 100644 test/repos/simple-ext.git/objects/9b/75494003deca69527bb64bcaa352e801611dd2 create mode 100644 test/repos/simple-ext.git/objects/a2/2a5da9119328ea6d693f88861457c07e14ac04 create mode 100644 test/repos/simple-ext.git/objects/c5/b315915742133dbdfbeed0753e481b55c1d364 create mode 100644 test/repos/simple-ext.git/refs/heads/feature2 create mode 100644 test/repos/simple-ext.git/refs/heads/master create mode 100644 test/repos/simple-ext.git/refs/tags/tag1 create mode 100644 test/requirements.txt create mode 100644 test/test_sys_checkout.py create mode 100644 test/test_unit_externals_description.py create mode 100644 test/test_unit_externals_status.py create mode 100644 test/test_unit_repository.py create mode 100644 test/test_unit_repository_git.py create mode 100644 test/test_unit_repository_svn.py create mode 100644 test/test_unit_utils.py diff --git a/.dir_locals.el b/.dir_locals.el new file mode 100644 index 0000000000..a370490e92 --- /dev/null +++ b/.dir_locals.el @@ -0,0 +1,12 @@ +; -*- mode: Lisp -*- + +((python-mode + . ( + ;; fill the paragraph to 80 columns when using M-q + (fill-column . 80) + + ;; Use 4 spaces to indent in Python + (python-indent-offset . 4) + (indent-tabs-mode . nil) + ))) + diff --git a/.github/ISSUE_TEMPLATE.md b/.github/ISSUE_TEMPLATE.md new file mode 100644 index 0000000000..8ecb2ae64b --- /dev/null +++ b/.github/ISSUE_TEMPLATE.md @@ -0,0 +1,6 @@ +### Summary of Issue: +### Expected behavior and actual behavior: +### Steps to reproduce the problem (should include model description file(s) or link to publi c repository): +### What is the changeset ID of the code, and the machine you are using: +### have you modified the code? If so, it must be committed and available for testing: +### Screen output or log file showing the error message and context: diff --git a/.github/PULL_REQUEST_TEMPLATE.md b/.github/PULL_REQUEST_TEMPLATE.md new file mode 100644 index 0000000000..b68b1fb5e2 --- /dev/null +++ b/.github/PULL_REQUEST_TEMPLATE.md @@ -0,0 +1,17 @@ +[ 50 character, one line summary ] + +[ Description of the changes in this commit. It should be enough + information for someone not following this development to understand. + Lines should be wrapped at about 72 characters. ] + +User interface changes?: [ No/Yes ] +[ If yes, describe what changed, and steps taken to ensure backward compatibilty ] + +Fixes: [Github issue #s] And brief description of each issue. + +Testing: + test removed: + unit tests: + system tests: + manual testing: + diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000000..411de5d96e --- /dev/null +++ b/.gitignore @@ -0,0 +1,14 @@ +# directories that are checked out by the tool +cime/ +cime_config/ +components/ + +# generated local files +*.log + +# editor files +*~ +*.bak + +# generated python files +*.pyc diff --git a/.travis.yml b/.travis.yml new file mode 100644 index 0000000000..5da83c5654 --- /dev/null +++ b/.travis.yml @@ -0,0 +1,32 @@ +# NOTE(bja, 2017-11) travis-ci dosen't support python language builds +# on mac os. As a work around, we use built-in python on linux, and +# declare osx a 'generic' language, and create our own python env. + +language: python +os: linux +python: + - "2.7" + - "3.4" + - "3.5" + - "3.6" +matrix: + include: + - os: osx + language: generic + before_install: + # NOTE(bja, 2017-11) update is slow, 2.7.12 installed by default, good enough! + # - brew update + # - brew outdated python2 || brew upgrade python2 + - virtualenv env -p python2 + - source env/bin/activate +install: + - pip install -r test/requirements.txt +before_script: + - git --version +script: + - cd test; make test + - cd test; make lint +after_success: + - cd test; make coverage + - cd test; coveralls + \ No newline at end of file diff --git a/LICENSE.txt b/LICENSE.txt new file mode 100644 index 0000000000..69d97201e3 --- /dev/null +++ b/LICENSE.txt @@ -0,0 +1,34 @@ +Copyright (c) 2017, University Corporation for Atmospheric Research (UCAR) +All rights reserved. + +Developed by: + University Corporation for Atmospheric Research - National Center for Atmospheric Research + https://www2.cesm.ucar.edu/working-groups/sewg + +Permission is hereby granted, free of charge, to any person obtaining +a copy of this software and associated documentation files (the "Software"), +to deal with the Software without restriction, including without limitation +the rights to use, copy, modify, merge, publish, distribute, sublicense, +and/or sell copies of the Software, and to permit persons to whom +the Software is furnished to do so, subject to the following conditions: + + - Redistributions of source code must retain the above copyright notice, + this list of conditions and the following disclaimers. + - Redistributions in binary form must reproduce the above copyright notice, + this list of conditions and the following disclaimers in the documentation + and/or other materials provided with the distribution. + - Neither the names of [Name of Development Group, UCAR], + nor the names of its contributors may be used to endorse or promote + products derived from this Software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE +ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE +LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR +CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF +SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS +INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN +CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) +ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +POSSIBILITY OF SUCH DAMAGE. diff --git a/README.md b/README.md new file mode 100644 index 0000000000..90ed2d1459 --- /dev/null +++ b/README.md @@ -0,0 +1,166 @@ +-- AUTOMATICALLY GENERATED FILE. DO NOT EDIT -- + +-n [![Build Status](https://travis-ci.org/NCAR/manage_externals.svg?branch=master)](https://travis-ci.org/NCAR/manage_externals) +[![Coverage Status](https://coveralls.io/repos/github/NCAR/manage_externals/badge.svg?branch=master)](https://coveralls.io/github/NCAR/manage_externals?branch=master) +``` + +usage: checkout_externals [-h] [-e [EXTERNALS]] [-o] [-S] [-v] [--backtrace] + [-d] + +checkout_externals manages checking out CESM externals from revision control +based on a externals description file. By default only the required +externals are checkout out. + +NOTE: checkout_externals *MUST* be run from the root of the source tree. + +Running checkout_externals without the '--status' option will always attempt to +synchronize the working copy with the externals description. + +optional arguments: + -h, --help show this help message and exit + -e [EXTERNALS], --externals [EXTERNALS] + The externals description filename. Default: CESM.cfg. + -o, --optional By default only the required externals are checked + out. This flag will also checkout the optional + externals. + -S, --status Output status of the repositories managed by + checkout_externals. By default only summary + information is provided. Use verbose output to see + details. + -v, --verbose Output additional information to the screen and log + file. + --backtrace DEVELOPER: show exception backtraces as extra + debugging output + -d, --debug DEVELOPER: output additional debugging information to + the screen and log file. + +``` +NOTE: checkout_externals *MUST* be run from the root of the source tree it +is managing. For example, if you cloned CLM with: + + $ git clone git@github.com/ncar/clm clm-dev + +Then the root of the source tree is /path/to/clm-dev. If you obtained +CLM via a checkout of CESM: + + $ git clone git@github.com/escomp/cesm cesm-dev + +and you need to checkout the CLM externals, then the root of the +source tree is /path/to/cesm-dev. Do *NOT* run checkout_externals +from within /path/to/cesm-dev/components/clm. + +The root of the source tree will be referred to as `${SRC_ROOT}` below. + +# Supported workflows + + * Checkout all required components from the default externals + description file: + + $ cd ${SRC_ROOT} + $ ./manage_externals/checkout_externals + + * To update all required components to the current values in the + externals description file, re-run checkout_externals: + + $ cd ${SRC_ROOT} + $ ./manage_externals/checkout_externals + + If there are *any* modifications to *any* working copy according + to the git or svn 'status' command, checkout_externals + will not update any external repositories. Modifications + include: modified files, added files, removed files, missing + files or untracked files, + + * Checkout all required components from a user specified externals + description file: + + $ cd ${SRC_ROOT} + $ ./manage_externals/checkout_externals --excernals myCESM.xml + + * Status summary of the repositories managed by checkout_externals: + + $ cd ${SRC_ROOT} + $ ./manage_externals/checkout_externals --status + + ./cime + m ./components/cism + ./components/mosart + e-o ./components/rtm + M ./src/fates + e-o ./tools/PTCLM + + where: + * column one indicates the status of the repository in relation + to the externals description file. + * column two indicates whether the working copy has modified files. + * column three shows how the repository is managed, optional or required + + Colunm one will be one of these values: + * m : modified : repository is modefied compared to the externals description + * e : empty : directory does not exist - checkout_externals has not been run + * ? : unknown : directory exists but .git or .svn directories are missing + + Colunm two will be one of these values: + * M : Modified : untracked, modified, added, deleted or missing files + * : blank / space : clean + * - : dash : no meaningful state, for empty repositories + + Colunm three will be one of these values: + * o : optional : optionally repository + * : blank / space : required repository + + * Detailed git or svn status of the repositories managed by checkout_externals: + + $ cd ${SRC_ROOT} + $ ./manage_externals/checkout_externals --status --verbose + +# Externals description file + + The externals description contains a list of the external + repositories that are used and their version control locations. Each + external has: + + * name (string) : component name, e.g. cime, cism, clm, cam, etc. + + * required (boolean) : whether the component is a required checkout + + * local_path (string) : component path *relative* to where + checkout_externals is called. + + * protoctol (string) : version control protocol that is used to + manage the component. Valid values are 'git', 'svn', + 'externals_only'. + + Note: 'externals_only' will only process the external's own + external description file without trying to manage a repository + for the component. This is used for retreiving externals for + standalone components like cam and clm. + + * repo_url (string) : URL for the repository location, examples: + * https://svn-ccsm-models.cgd.ucar.edu/glc + * git@github.com:esmci/cime.git + * /path/to/local/repository + + If a repo url is determined to be a local path (not a network url) + then user expansion, e.g. ~/, and environment variable expansion, + e.g. $HOME or $REPO_ROOT, will be performed. + + Relative paths are difficult to get correct, especially for mixed + use repos like clm. It is advised that local paths expand to + absolute paths. If relative paths are used, they should be + relative to one level above local_path. If local path is + 'src/foo', the the relative url should be relative to + 'src'. + + * tag (string) : tag to checkout + + * branch (string) : branch to checkout + + Note: either tag or branch must be supplied, but not both. + + * externals (string) : relative path to the external's own external + description file that should also be used. It is *relative* to the + component local_path. For example, the CESM externals description + will load clm. CLM has additional externals that must be + downloaded to be complete. Those additional externals are managed + from the clm source root by the file pointed to by 'externals'. diff --git a/README_FIRST b/README_FIRST new file mode 100644 index 0000000000..c8a47d7806 --- /dev/null +++ b/README_FIRST @@ -0,0 +1,54 @@ +CESM is comprised of a number of different components that are +developed and managed independently. Each component may have +additional 'external' dependancies and optional parts that are also +developed and managed independently. + +The checkout_externals.py tool manages retreiving and updating the +components and their externals so you have a complete set of source +files for the model. + +checkout_externals.py relies on a model description file that +describes what components are needed, where to find them and where to +put them in the source tree. The default file is called "CESM.xml" +regardless of whether you are checking out CESM or a standalone +component. + +checkout_externals requires access to git and svn repositories that +require authentication. checkout_externals may pass through +authentication requests, but it will not cache them for you. For the +best and most robust user experience, you should have svn and git +working without password authentication. See: + + https://help.github.com/articles/connecting-to-github-with-ssh/ + + ?svn ref? + +NOTE: checkout_externals.py *MUST* be run from the root of the source +tree it is managing. For example, if you cloned CLM with: + + $ git clone git@github.com/ncar/clm clm-dev + +Then the root of the source tree is /path/to/cesm-dev. If you obtained +CLM via an svn checkout of CESM and you need to checkout the CLM +externals, then the root of the source tree for CLM is: + + /path/to/cesm-dev/components/clm + +The root of the source tree will be referred to as ${SRC_ROOT} below. + +To get started quickly, checkout all required components from the +default model description file: + + $ cd ${SRC_ROOT} + $ ./checkout_cesm/checkout_externals.py + +For additional information about using checkout model, please see: + + ${SRC_ROOT}/checkout_cesm/README + +or run: + + $ cd ${SRC_ROOT} + $ ./checkout_cesm/checkout_externals.py --help + + diff --git a/checkout_externals b/checkout_externals new file mode 100755 index 0000000000..0167049bff --- /dev/null +++ b/checkout_externals @@ -0,0 +1,42 @@ +#!/usr/bin/env python + +"""Main driver wrapper around the manic/checkout utility. + +Tool to assemble external respositories represented in an externals +description file. + +""" +from __future__ import absolute_import +from __future__ import unicode_literals +from __future__ import print_function + +import logging +import sys +import traceback + +import manic + +if sys.hexversion < 0x02070000: + print(70 * '*') + print('ERROR: {0} requires python >= 2.7.x. '.format(sys.argv[0])) + print('It appears that you are running python {0}'.format( + '.'.join(str(x) for x in sys.version_info[0:3]))) + print(70 * '*') + sys.exit(1) + + +if __name__ == '__main__': + logging.basicConfig(filename=manic.global_constants.LOG_FILE_NAME, + format='%(levelname)s : %(asctime)s : %(message)s', + datefmt='%Y-%m-%d %H:%M:%S', + level=logging.DEBUG) + + ARGS = manic.checkout.commandline_arguments() + try: + RET_STATUS, _ = manic.checkout.main(ARGS) + sys.exit(RET_STATUS) + except Exception as error: # pylint: disable=broad-except + manic.printlog(str(error)) + if ARGS.backtrace: + traceback.print_exc() + sys.exit(1) diff --git a/manic/__init__.py b/manic/__init__.py new file mode 100644 index 0000000000..e4d9b552d3 --- /dev/null +++ b/manic/__init__.py @@ -0,0 +1,9 @@ +"""Public API for the manage_externals library +""" + +import manic.checkout as checkout +from manic.utils import printlog + +__all__ = [ + 'checkout', 'printlog', +] diff --git a/manic/checkout.py b/manic/checkout.py new file mode 100755 index 0000000000..6d0108b656 --- /dev/null +++ b/manic/checkout.py @@ -0,0 +1,295 @@ +#!/usr/bin/env python + +""" +Tool to assemble respositories represented in a model-description file. + +If loaded as a module (e.g., in a component's buildcpp), it can be used +to check the validity of existing subdirectories and load missing sources. +""" +from __future__ import absolute_import +from __future__ import unicode_literals +from __future__ import print_function + +import argparse +import logging +import os +import os.path +import sys +import textwrap + +from manic.externals_description import create_externals_description +from manic.externals_description import read_externals_description_file +from manic.externals_status import check_safe_to_update_repos +from manic.sourcetree import SourceTree +from manic.utils import printlog +from manic.global_constants import VERSION_SEPERATOR + +if sys.hexversion < 0x02070000: + print(70 * '*') + print('ERROR: {0} requires python >= 2.7.x. '.format(sys.argv[0])) + print('It appears that you are running python {0}'.format( + VERSION_SEPERATOR.join(str(x) for x in sys.version_info[0:3]))) + print(70 * '*') + sys.exit(1) + + +# --------------------------------------------------------------------- +# +# User input +# +# --------------------------------------------------------------------- +def commandline_arguments(args=None): + """Process the command line arguments + + Params: args - optional args. Should only be used during systems + testing. + + Returns: processed command line arguments + """ + description = ''' +%(prog)s manages checking out CESM externals from revision control +based on a externals description file. By default only the required +externals are checkout out. + +NOTE: %(prog)s *MUST* be run from the root of the source tree. + +Running %(prog)s without the '--status' option will always attempt to +synchronize the working copy with the externals description. +''' + + epilog = ''' +``` +NOTE: %(prog)s *MUST* be run from the root of the source tree it +is managing. For example, if you cloned CLM with: + + $ git clone git@github.com/ncar/clm clm-dev + +Then the root of the source tree is /path/to/clm-dev. If you obtained +CLM via a checkout of CESM: + + $ git clone git@github.com/escomp/cesm cesm-dev + +and you need to checkout the CLM externals, then the root of the +source tree is /path/to/cesm-dev. Do *NOT* run %(prog)s +from within /path/to/cesm-dev/components/clm. + +The root of the source tree will be referred to as `${SRC_ROOT}` below. + + +# Supported workflows + + * Checkout all required components from the default externals + description file: + + $ cd ${SRC_ROOT} + $ ./manage_externals/%(prog)s + + * To update all required components to the current values in the + externals description file, re-run %(prog)s: + + $ cd ${SRC_ROOT} + $ ./manage_externals/%(prog)s + + If there are *any* modifications to *any* working copy according + to the git or svn 'status' command, %(prog)s + will not update any external repositories. Modifications + include: modified files, added files, removed files, missing + files or untracked files, + + * Checkout all required components from a user specified externals + description file: + + $ cd ${SRC_ROOT} + $ ./manage_externals/%(prog)s --excernals myCESM.xml + + * Status summary of the repositories managed by %(prog)s: + + $ cd ${SRC_ROOT} + $ ./manage_externals/%(prog)s --status + + ./cime + m ./components/cism + ./components/mosart + e-o ./components/rtm + M ./src/fates + e-o ./tools/PTCLM + + + where: + * column one indicates the status of the repository in relation + to the externals description file. + * column two indicates whether the working copy has modified files. + * column three shows how the repository is managed, optional or required + + Colunm one will be one of these values: + * m : modified : repository is modefied compared to the externals description + * e : empty : directory does not exist - %(prog)s has not been run + * ? : unknown : directory exists but .git or .svn directories are missing + + Colunm two will be one of these values: + * M : Modified : untracked, modified, added, deleted or missing files + * : blank / space : clean + * - : dash : no meaningful state, for empty repositories + + Colunm three will be one of these values: + * o : optional : optionally repository + * : blank / space : required repository + + * Detailed git or svn status of the repositories managed by %(prog)s: + + $ cd ${SRC_ROOT} + $ ./manage_externals/%(prog)s --status --verbose + +# Externals description file + + The externals description contains a list of the external + repositories that are used and their version control locations. Each + external has: + + * name (string) : component name, e.g. cime, cism, clm, cam, etc. + + * required (boolean) : whether the component is a required checkout + + * local_path (string) : component path *relative* to where + %(prog)s is called. + + * protoctol (string) : version control protocol that is used to + manage the component. Valid values are 'git', 'svn', + 'externals_only'. + + Note: 'externals_only' will only process the external's own + external description file without trying to manage a repository + for the component. This is used for retreiving externals for + standalone components like cam and clm. + + * repo_url (string) : URL for the repository location, examples: + * https://svn-ccsm-models.cgd.ucar.edu/glc + * git@github.com:esmci/cime.git + * /path/to/local/repository + + If a repo url is determined to be a local path (not a network url) + then user expansion, e.g. ~/, and environment variable expansion, + e.g. $HOME or $REPO_ROOT, will be performed. + + Relative paths are difficult to get correct, especially for mixed + use repos like clm. It is advised that local paths expand to + absolute paths. If relative paths are used, they should be + relative to one level above local_path. If local path is + 'src/foo', the the relative url should be relative to + 'src'. + + * tag (string) : tag to checkout + + * branch (string) : branch to checkout + + Note: either tag or branch must be supplied, but not both. + + * externals (string) : relative path to the external's own external + description file that should also be used. It is *relative* to the + component local_path. For example, the CESM externals description + will load clm. CLM has additional externals that must be + downloaded to be complete. Those additional externals are managed + from the clm source root by the file pointed to by 'externals'. + +''' + + parser = argparse.ArgumentParser( + description=description, epilog=epilog, + formatter_class=argparse.RawDescriptionHelpFormatter) + + # + # user options + # + parser.add_argument('-e', '--externals', nargs='?', default='CESM.cfg', + help='The externals description filename. ' + 'Default: %(default)s.') + + parser.add_argument('-o', '--optional', action='store_true', default=False, + help='By default only the required externals ' + 'are checked out. This flag will also checkout the ' + 'optional externals.') + + parser.add_argument('-S', '--status', action='store_true', default=False, + help='Output status of the repositories managed by ' + '%(prog)s. By default only summary information ' + 'is provided. Use verbose output to see details.') + + parser.add_argument('-v', '--verbose', action='store_true', default=False, + help='Output additional information to ' + 'the screen and log file.') + + # + # developer options + # + parser.add_argument('--backtrace', action='store_true', + help='DEVELOPER: show exception backtraces as extra ' + 'debugging output') + + parser.add_argument('-d', '--debug', action='store_true', default=False, + help='DEVELOPER: output additional debugging ' + 'information to the screen and log file.') + + if args: + options = parser.parse_args(args) + else: + options = parser.parse_args() + return options + + +# --------------------------------------------------------------------- +# +# main +# +# --------------------------------------------------------------------- +def main(args): + """ + Function to call when module is called from the command line. + Parse externals file and load required repositories or all repositories if + the --all option is passed. + """ + logging.info('Begining of checkout_externals') + + load_all = False + if args.optional: + load_all = True + + root_dir = os.path.abspath(os.getcwd()) + external_data = read_externals_description_file(root_dir, args.externals) + external = create_externals_description(external_data) + + source_tree = SourceTree(root_dir, external) + printlog('Checking status of externals: ', end='') + tree_status = source_tree.status() + printlog('') + + if args.status: + # user requested status-only + for comp in sorted(tree_status.keys()): + msg = str(tree_status[comp]) + printlog(msg) + if args.verbose: + # user requested verbose status dump of the git/svn status commands + source_tree.verbose_status() + else: + # checkout / update the external repositories. + safe_to_update = check_safe_to_update_repos(tree_status) + if not safe_to_update: + # print status + for comp in sorted(tree_status.keys()): + msg = str(tree_status[comp]) + printlog(msg) + # exit gracefully + msg = textwrap.fill( + 'Some external repositories that are not in a clean ' + 'state. Please ensure all external repositories are clean ' + 'before updating.') + printlog('-' * 70) + printlog(msg) + printlog('-' * 70) + else: + source_tree.checkout(load_all) + printlog('') + + logging.info('checkout_externals completed without exceptions.') + # NOTE(bja, 2017-11) tree status is used by the systems tests + return 0, tree_status diff --git a/manic/externals_description.py b/manic/externals_description.py new file mode 100644 index 0000000000..f3d3fad78f --- /dev/null +++ b/manic/externals_description.py @@ -0,0 +1,375 @@ +#!/usr/bin/env python + +"""Model description + +Model description is the representation of the various externals +included in the model. It processes in input data structure, and +converts it into a standard interface that is used by the rest of the +system. + +To maintain backward compatibility, externals description files should +follow semantic versioning rules, http://semver.org/ + + + +""" +from __future__ import absolute_import +from __future__ import unicode_literals +from __future__ import print_function + +import logging +import os +import os.path +import re + +# ConfigParser was renamed in python2 to configparser. In python2, +# ConfigParser returns byte strings, str, instead of unicode. We need +# unicode to be compatible with xml and json parser and python3. +try: + # python2 + from ConfigParser import SafeConfigParser as config_parser + from ConfigParser import MissingSectionHeaderError + from ConfigParser import NoSectionError, NoOptionError + + def config_string_cleaner(text): + """convert strings into unicode + """ + return text.decode('utf-8') +except ImportError: + # python3 + from configparser import ConfigParser as config_parser + from configparser import MissingSectionHeaderError + from configparser import NoSectionError, NoOptionError + + def config_string_cleaner(text): + """Python3 already uses unicode strings, so just return the string + without modification. + + """ + return text + +from .utils import printlog, fatal_error, str_to_bool, expand_local_url +from .global_constants import EMPTY_STR, PPRINTER, VERSION_SEPERATOR + +# +# Globals +# +DESCRIPTION_SECTION = 'externals_description' +VERSION_ITEM = 'schema_version' + + +def read_externals_description_file(root_dir, file_name): + """Given a file name containing a externals description, determine the + format and read it into it's internal representation. + + """ + root_dir = os.path.abspath(root_dir) + msg = 'In directory : {0}'.format(root_dir) + logging.info(msg) + printlog('Processing externals description file : {0}'.format(file_name)) + + file_path = os.path.join(root_dir, file_name) + if not os.path.exists(file_name): + msg = ('ERROR: Model description file, "{0}", does not ' + 'exist at path:\n {1}\nDid you run from the root of ' + 'the source tree?'.format(file_name, file_path)) + fatal_error(msg) + + externals_description = None + try: + config = config_parser() + config.read(file_path) + externals_description = config + except MissingSectionHeaderError: + # not a cfg file + pass + + if externals_description is None: + msg = 'Unknown file format!' + fatal_error(msg) + + return externals_description + + +def create_externals_description(model_data, model_format='cfg'): + """Create the a externals description object from the provided data + """ + externals_description = None + if model_format == 'dict': + externals_description = ExternalsDescriptionDict(model_data, ) + elif model_format == 'cfg': + major, _, _ = get_cfg_schema_version(model_data) + if major == 1: + externals_description = ExternalsDescriptionConfigV1(model_data) + else: + msg = ('Externals description file has unsupported schema ' + 'version "{0}".'.format(major)) + fatal_error(msg) + else: + msg = 'Unknown model data format "{0}"'.format(model_format) + fatal_error(msg) + return externals_description + + +def get_cfg_schema_version(model_cfg): + """Extract the major, minor, patch version of the config file schema + + Params: + model_cfg - config parser object containing the externas description data + + Returns: + major = integer major version + minor = integer minor version + patch = integer patch version + """ + semver_str = '' + try: + semver_str = model_cfg.get(DESCRIPTION_SECTION, VERSION_ITEM) + except (NoSectionError, NoOptionError): + msg = ('externals description file must have the required ' + 'section: "{0}" and item "{1}"'.format(DESCRIPTION_SECTION, + VERSION_ITEM)) + fatal_error(msg) + + # NOTE(bja, 2017-11) Assume we don't care about the + # build/pre-release metadata for now! + version_list = re.split(r'[-+]', semver_str) + version_str = version_list[0] + version = version_str.split(VERSION_SEPERATOR) + try: + major = int(version[0].strip()) + minor = int(version[1].strip()) + patch = int(version[2].strip()) + except ValueError: + msg = ('Config file schema version must have integer digits for ' + 'major, minor and patch versions. ' + 'Received "{0}"'.format(version_str)) + fatal_error(msg) + return major, minor, patch + + +class ExternalsDescription(dict): + """Base externals description class that is independent of the user input + format. Different input formats can all be converted to this + representation to provide a consistent represtentation for the + rest of the objects in the system. + + """ + # keywords defining the interface into the externals description data + EXTERNALS = 'externals' + BRANCH = 'branch' + REPO = 'repo' + REQUIRED = 'required' + TAG = 'tag' + PATH = 'local_path' + PROTOCOL = 'protocol' + REPO_URL = 'repo_url' + NAME = 'name' + + PROTOCOL_EXTERNALS_ONLY = 'externals_only' + PROTOCOL_GIT = 'git' + PROTOCOL_SVN = 'svn' + KNOWN_PRROTOCOLS = [PROTOCOL_GIT, PROTOCOL_SVN, PROTOCOL_EXTERNALS_ONLY] + + # v1 xml keywords + _V1_TREE_PATH = 'TREE_PATH' + _V1_ROOT = 'ROOT' + _V1_TAG = 'TAG' + _V1_BRANCH = 'BRANCH' + _V1_REQ_SOURCE = 'REQ_SOURCE' + + _source_schema = {REQUIRED: True, + PATH: 'string', + EXTERNALS: 'string', + REPO: {PROTOCOL: 'string', + REPO_URL: 'string', + TAG: 'string', + BRANCH: 'string', + } + } + + def __init__(self): + """Convert the xml into a standardized dict that can be used to + construct the source objects + + """ + dict.__init__(self) + + def _check_user_input(self): + """Run a series of checks to attempt to validate the user input and + detect errors as soon as possible. + """ + self._check_optional() + self._validate() + self._check_data() + + def _check_data(self): + """Check user supplied data is valid where possible. + """ + for ext_name in self.keys(): + if (self[ext_name][self.REPO][self.PROTOCOL] + not in self.KNOWN_PRROTOCOLS): + msg = 'Unknown repository protocol "{0}" in "{1}".'.format( + self[ext_name][self.REPO][self.PROTOCOL], ext_name) + fatal_error(msg) + + if (self[ext_name][self.REPO][self.PROTOCOL] + != self.PROTOCOL_EXTERNALS_ONLY): + if (self[ext_name][self.REPO][self.TAG] and + self[ext_name][self.REPO][self.BRANCH]): + msg = ('Model description is over specified! Can not ' + 'have both "tag" and "branch" in repo ' + 'description for "{0}"'.format(ext_name)) + fatal_error(msg) + + if (not self[ext_name][self.REPO][self.TAG] and + not self[ext_name][self.REPO][self.BRANCH]): + msg = ('Model description is under specified! Must have ' + 'either "tag" or "branch" in repo ' + 'description for "{0}"'.format(ext_name)) + fatal_error(msg) + + if not self[ext_name][self.REPO][self.REPO_URL]: + msg = ('Model description is under specified! Must have ' + 'either "repo_url" in repo ' + 'description for "{0}"'.format(ext_name)) + fatal_error(msg) + + url = expand_local_url( + self[ext_name][self.REPO][self.REPO_URL], ext_name) + self[ext_name][self.REPO][self.REPO_URL] = url + + def _check_optional(self): + """Some fields like externals, repo:tag repo:branch are + (conditionally) optional. We don't want the user to be + required to enter them in every externals description file, but + still want to validate the input. Check conditions and add + default values if appropriate. + + """ + for field in self: + # truely optional + if self.EXTERNALS not in self[field]: + self[field][self.EXTERNALS] = EMPTY_STR + + # git and svn repos must tags and branches for validation purposes. + if self.TAG not in self[field][self.REPO]: + self[field][self.REPO][self.TAG] = EMPTY_STR + if self.BRANCH not in self[field][self.REPO]: + self[field][self.REPO][self.BRANCH] = EMPTY_STR + if self.REPO_URL not in self[field][self.REPO]: + self[field][self.REPO][self.REPO_URL] = EMPTY_STR + + def _validate(self): + """Validate that the parsed externals description contains all necessary + fields. + + """ + def validate_data_struct(schema, data): + """Compare a data structure against a schema and validate all required + fields are present. + + """ + is_valid = False + in_ref = True + valid = True + if isinstance(schema, dict) and isinstance(data, dict): + for k in schema: + in_ref = in_ref and (k in data) + if in_ref: + valid = valid and ( + validate_data_struct(schema[k], data[k])) + is_valid = in_ref and valid + else: + is_valid = isinstance(data, type(schema)) + if not is_valid: + printlog(" Unmatched schema and data:") + if isinstance(schema, dict): + for item in schema: + printlog(" {0} schema = {1} ({2})".format( + item, schema[item], type(schema[item]))) + printlog(" {0} data = {1} ({2})".format( + item, data[item], type(data[item]))) + else: + printlog(" schema = {0} ({1})".format( + schema, type(schema))) + printlog(" data = {0} ({1})".format(data, type(data))) + return is_valid + + for field in self: + valid = validate_data_struct(self._source_schema, self[field]) + if not valid: + PPRINTER.pprint(self._source_schema) + PPRINTER.pprint(self[field]) + msg = 'ERROR: source for "{0}" did not validate'.format(field) + fatal_error(msg) + + +class ExternalsDescriptionDict(ExternalsDescription): + """Create a externals description object from a dictionary using the API + representations. Primarily used to simplify creating model + description files for unit testing. + + """ + + def __init__(self, model_data): + """Parse a native dictionary into a externals description. + """ + ExternalsDescription.__init__(self) + self.update(model_data) + self._check_user_input() + + +class ExternalsDescriptionConfigV1(ExternalsDescription): + """Create a externals description object from a config_parser object, + schema version 1. + + """ + + def __init__(self, model_data): + """Convert the xml into a standardized dict that can be used to + construct the source objects + + """ + ExternalsDescription.__init__(self) + self._remove_metadata(model_data) + self._parse_cfg(model_data) + self._check_user_input() + + @staticmethod + def _remove_metadata(model_data): + """Remove the metadata section from the model configuration file so + that it is simpler to look through the file and construct the + externals description. + + """ + model_data.remove_section(DESCRIPTION_SECTION) + + def _parse_cfg(self, cfg_data): + """Parse a config_parser object into a externals description. + """ + def list_to_dict(input_list, convert_to_lower_case=True): + """Convert a list of key-value pairs into a dictionary. + """ + output_dict = {} + for item in input_list: + key = config_string_cleaner(item[0].strip()) + value = config_string_cleaner(item[1].strip()) + if convert_to_lower_case: + key = key.lower() + output_dict[key] = value + return output_dict + + for section in cfg_data.sections(): + name = config_string_cleaner(section.lower().strip()) + self[name] = {} + self[name].update(list_to_dict(cfg_data.items(section))) + self[name][self.REPO] = {} + loop_keys = self[name].copy().keys() + for item in loop_keys: + if item in self._source_schema: + if isinstance(self._source_schema[item], bool): + self[name][item] = str_to_bool(self[name][item]) + if item in self._source_schema[self.REPO]: + self[name][self.REPO][item] = self[name][item] + del self[name][item] diff --git a/manic/externals_status.py b/manic/externals_status.py new file mode 100644 index 0000000000..bae2cfd6f1 --- /dev/null +++ b/manic/externals_status.py @@ -0,0 +1,126 @@ +"""ExternalStatus + +Class to store status and state information about repositories and +create a string representation. + +""" +from __future__ import absolute_import +from __future__ import unicode_literals +from __future__ import print_function + +from .global_constants import EMPTY_STR + + +class ExternalStatus(object): + """Class to represent the status of a given source repository or tree. + + Individual repositories determine their own status in the + Repository objects. This object is just resposible for storing the + information and passing it up to a higher level for reporting or + global decisions. + + There are two states of concern: + + * If the repository is in-sync with the externals description file. + + * If the repostiory working copy is clean and there are no pending + transactions (e.g. add, remove, rename, untracked files). + + """ + DEFAULT = '-' + UNKNOWN = '?' + EMPTY = 'e' + MODEL_MODIFIED = 'm' + DIRTY = 'M' + + STATUS_OK = ' ' + STATUS_ERROR = '!' + + # source types + OPTIONAL = 'o' + STANDALONE = 's' + MANAGED = ' ' + + def __init__(self): + self.sync_state = self.DEFAULT + self.clean_state = self.DEFAULT + self.source_type = self.DEFAULT + self.path = EMPTY_STR + + def __str__(self): + msg = '{sync}{clean}{src_type} {path}'.format( + sync=self.sync_state, clean=self.clean_state, + src_type=self.source_type, path=self.path) + return msg + + def safe_to_update(self): + """Report if it is safe to update a repository. Safe is defined as: + + * If a repository is empty, it is safe to update. + + * If a repository exists and has a clean working copy state + with no pending transactions. + + """ + safe_to_update = False + repo_exists = self.exists() + if not repo_exists: + safe_to_update = True + else: + # If the repo exists, it must be in ok or modified + # sync_state. Any other sync_state at this point + # represents a logic error that should have been handled + # before now! + sync_safe = ((self.sync_state == ExternalStatus.STATUS_OK) or + (self.sync_state == ExternalStatus.MODEL_MODIFIED)) + if sync_safe: + # The clean_state must be STATUS_OK to update. Otherwise we + # are dirty or there was a missed error previously. + if self.clean_state == ExternalStatus.STATUS_OK: + safe_to_update = True + return safe_to_update + + def exists(self): + """Determine if the repo exists. This is indicated by: + + * sync_state is not EMPTY + + * if the sync_state is empty, then the valid states for + clean_state are default, empty or unknown. Anything else + and there was probably an internal logic error. + + NOTE(bja, 2017-10) For the moment we are considering a + sync_state of default or unknown to require user intervention, + but we may want to relax this convention. This is probably a + result of a network error or internal logic error but more + testing is needed. + + """ + is_empty = (self.sync_state == ExternalStatus.EMPTY) + clean_valid = ((self.clean_state == ExternalStatus.DEFAULT) or + (self.clean_state == ExternalStatus.EMPTY) or + (self.clean_state == ExternalStatus.UNKNOWN)) + + if is_empty and clean_valid: + exists = False + else: + exists = True + return exists + + +def check_safe_to_update_repos(tree_status): + """Check if *ALL* repositories are in a safe state to update. We don't + want to do a partial update of the repositories then die, leaving + the model in an inconsistent state. + + Note: if there is an update to do, the repositories will by + definiation be out of synce with the externals description, so we + can't use that as criteria for updating. + + """ + safe_to_update = True + for comp in tree_status: + stat = tree_status[comp] + safe_to_update &= stat.safe_to_update() + + return safe_to_update diff --git a/manic/global_constants.py b/manic/global_constants.py new file mode 100644 index 0000000000..c71c29071a --- /dev/null +++ b/manic/global_constants.py @@ -0,0 +1,14 @@ +"""Globals shared across modules +""" + +from __future__ import absolute_import +from __future__ import unicode_literals +from __future__ import print_function + +import pprint + +EMPTY_STR = '' +LOCAL_PATH_INDICATOR = '.' +VERSION_SEPERATOR = '.' +LOG_FILE_NAME = 'manage_externals.log' +PPRINTER = pprint.PrettyPrinter(indent=4) diff --git a/manic/repository.py b/manic/repository.py new file mode 100644 index 0000000000..8025eb6ac8 --- /dev/null +++ b/manic/repository.py @@ -0,0 +1,73 @@ +"""Base class representation of a repository +""" + +from .externals_description import ExternalsDescription +from .utils import fatal_error +from .global_constants import EMPTY_STR + + +class Repository(object): + """ + Class to represent and operate on a repository description. + """ + + def __init__(self, component_name, repo): + """ + Parse repo externals description + """ + self._name = component_name + self._protocol = repo[ExternalsDescription.PROTOCOL] + self._tag = repo[ExternalsDescription.TAG] + self._branch = repo[ExternalsDescription.BRANCH] + self._url = repo[ExternalsDescription.REPO_URL] + + if self._url is EMPTY_STR: + fatal_error('repo must have a URL') + + if self._tag is EMPTY_STR and self._branch is EMPTY_STR: + fatal_error('repo must have either a branch or a tag element') + + if self._tag is not EMPTY_STR and self._branch is not EMPTY_STR: + fatal_error('repo cannot have both a tag and a branch element') + + def checkout(self, base_dir_path, repo_dir_name): # pylint: disable=unused-argument + """ + If the repo destination directory exists, ensure it is correct (from + correct URL, correct branch or tag), and possibly update the source. + If the repo destination directory does not exist, checkout the correce + branch or tag. + """ + msg = ('DEV_ERROR: checkout method must be implemented in all ' + 'repository classes! {0}'.format(self.__class__.__name__)) + fatal_error(msg) + + def status(self, stat, repo_dir_path): # pylint: disable=unused-argument + """Report the status of the repo + + """ + msg = ('DEV_ERROR: status method must be implemented in all ' + 'repository classes! {0}'.format(self.__class__.__name__)) + fatal_error(msg) + + def verbose_status(self, repo_dir_path): # pylint: disable=unused-argument + """Display the raw repo status to the user. + + """ + msg = ('DEV_ERROR: status method must be implemented in all ' + 'repository classes! {0}'.format(self.__class__.__name__)) + fatal_error(msg) + + def url(self): + """Public access of repo url. + """ + return self._url + + def tag(self): + """Public access of repo tag + """ + return self._tag + + def branch(self): + """Public access of repo branch. + """ + return self._branch diff --git a/manic/repository_factory.py b/manic/repository_factory.py new file mode 100644 index 0000000000..c95e7a509b --- /dev/null +++ b/manic/repository_factory.py @@ -0,0 +1,29 @@ +"""Factory for creating and initializing the appropriate repository class +""" + +from __future__ import absolute_import +from __future__ import unicode_literals +from __future__ import print_function + +from .repository_git import GitRepository +from .repository_svn import SvnRepository +from .externals_description import ExternalsDescription +from .utils import fatal_error + + +def create_repository(component_name, repo_info): + """Determine what type of repository we have, i.e. git or svn, and + create the appropriate object. + + """ + protocol = repo_info[ExternalsDescription.PROTOCOL].lower() + if protocol == 'git': + repo = GitRepository(component_name, repo_info) + elif protocol == 'svn': + repo = SvnRepository(component_name, repo_info) + elif protocol == 'externals_only': + repo = None + else: + msg = 'Unknown repo protocol "{0}"'.format(protocol) + fatal_error(msg) + return repo diff --git a/manic/repository_git.py b/manic/repository_git.py new file mode 100644 index 0000000000..be330ae724 --- /dev/null +++ b/manic/repository_git.py @@ -0,0 +1,668 @@ +"""Class for interacting with git repositories +""" + +from __future__ import absolute_import +from __future__ import unicode_literals +from __future__ import print_function + +import copy +import os +import re + +from .global_constants import EMPTY_STR, LOCAL_PATH_INDICATOR +from .repository import Repository +from .externals_status import ExternalStatus +from .utils import expand_local_url, split_remote_url, is_remote_url +from .utils import log_process_output, fatal_error +from .utils import execute_subprocess + + +class GitRepository(Repository): + """Class to represent and operate on a repository description. + + For testing purpose, all system calls to git should: + + * be isolated in separate functions with no application logic + * of the form: + - cmd = ['git', ...] + - value = execute_subprocess(cmd, output_to_caller={T|F}, + status_to_caller={T|F}) + - return value + * be static methods (not rely on self) + * name as _git_subcommand_args(user_args) + + This convention allows easy unit testing of the repository logic + by mocking the specific calls to return predefined results. + + """ + + # match XYZ of '* (HEAD detached at {XYZ}): + # e.g. * (HEAD detached at origin/feature-2) + RE_DETACHED = re.compile( + r'\* \((?:[\w]+[\s]+)?detached (?:at|from) ([\w\-./]+)\)') + + # match tracking reference info, return XYZ from [XYZ] + # e.g. [origin/master] + RE_TRACKING = re.compile(r'\[([\w\-./]+)\]') + + def __init__(self, component_name, repo): + """ + Parse repo (a XML element). + """ + Repository.__init__(self, component_name, repo) + + # ---------------------------------------------------------------- + # + # Public API, defined by Repository + # + # ---------------------------------------------------------------- + def checkout(self, base_dir_path, repo_dir_name): + """ + If the repo destination directory exists, ensure it is correct (from + correct URL, correct branch or tag), and possibly update the source. + If the repo destination directory does not exist, checkout the correce + branch or tag. + """ + repo_dir_path = os.path.join(base_dir_path, repo_dir_name) + if not os.path.exists(repo_dir_path): + self._clone_repo(base_dir_path, repo_dir_name) + self._checkout_ref(repo_dir_path) + + def status(self, stat, repo_dir_path): + """ + If the repo destination directory exists, ensure it is correct (from + correct URL, correct branch or tag), and possibly update the source. + If the repo destination directory does not exist, checkout the correce + branch or tag. + """ + self._check_sync(stat, repo_dir_path) + if os.path.exists(repo_dir_path): + self._status_summary(stat, repo_dir_path) + + def verbose_status(self, repo_dir_path): + """Display the raw repo status to the user. + + """ + if os.path.exists(repo_dir_path): + self._status_verbose(repo_dir_path) + + # ---------------------------------------------------------------- + # + # Internal work functions + # + # ---------------------------------------------------------------- + def _clone_repo(self, base_dir_path, repo_dir_name): + """Prepare to execute the clone by managing directory location + """ + cwd = os.getcwd() + os.chdir(base_dir_path) + self._git_clone(self._url, repo_dir_name) + os.chdir(cwd) + + def _current_ref_from_branch_command(self, git_output): + """Parse output of the 'git branch' command to determine the current branch. + The line starting with '*' is the current branch. It can be one of: + + feature2 36418b4 [origin/feature2] Work on feature2 +* feature3 36418b4 Work on feature2 + master 9b75494 [origin/master] Initialize repository. + +* (HEAD detached at 36418b4) 36418b4 Work on feature2 + feature2 36418b4 [origin/feature2] Work on feature2 + master 9b75494 [origin/master] Initialize repository. + +* (HEAD detached at origin/feature2) 36418b4 Work on feature2 + feature2 36418b4 [origin/feature2] Work on feature2 + feature3 36418b4 Work on feature2 + master 9b75494 [origin/master] Initialize repository. + + Possible head states: + + * detached from remote branch --> ref = remote/branch + * detached from tag --> ref = tag + * detached from sha --> ref = sha + * on local branch --> ref = branch + * on tracking branch --> ref = remote/branch + + On a branch: + * cm-testing + + Detached head from a tag: + * (HEAD detached at junk-tag) + + Detached head from a hash + * (HEAD detached at 0246874c) + + NOTE: Parsing the output of the porcelain is probably not a + great idea, but there doesn't appear to be a single plumbing + command that will return the same info. + + """ + lines = git_output.splitlines() + ref = '' + for line in lines: + if line.startswith('*'): + ref = line + break + current_ref = EMPTY_STR + if not ref: + # not a git repo? some other error? we return so the + # caller can handle. + pass + elif 'detached' in ref: + match = self.RE_DETACHED.search(ref) + try: + current_ref = match.group(1) + except BaseException: + msg = 'DEV_ERROR: regex to detect detached head state failed!' + msg += '\nref:\n{0}\ngit_output\n{1}\n'.format(ref, git_output) + fatal_error(msg) + elif '[' in ref: + match = self.RE_TRACKING.search(ref) + try: + current_ref = match.group(1) + except BaseException: + msg = 'DEV_ERROR: regex to detect tracking branch failed.' + else: + # assumed local branch + current_ref = ref.split()[1] + + current_ref = current_ref.strip() + return current_ref + + def _check_sync(self, stat, repo_dir_path): + """Determine whether a git repository is in-sync with the model + description. + + Because repos can have multiple remotes, the only criteria is + whether the branch or tag is the same. + + """ + if not os.path.exists(repo_dir_path): + # NOTE(bja, 2017-10) condition should have been determined + # by _Source() object and should never be here! + stat.sync_state = ExternalStatus.STATUS_ERROR + else: + git_dir = os.path.join(repo_dir_path, '.git') + if not os.path.exists(git_dir): + # NOTE(bja, 2017-10) directory exists, but no git repo + # info.... Can't test with subprocess git command + # because git will move up directory tree until it + # finds the parent repo git dir! + stat.sync_state = ExternalStatus.UNKNOWN + else: + self._check_sync_logic(stat, repo_dir_path) + + def _check_sync_logic(self, stat, repo_dir_path): + """Isolate the complicated synce logic so it is not so deeply nested + and a bit easier to understand. + + Sync logic - only reporting on whether we are on the ref + (branch, tag, hash) specified in the externals description. + + + """ + def compare_refs(current_ref, expected_ref): + """Compare the current and expected ref. + + """ + if current_ref == expected_ref: + status = ExternalStatus.STATUS_OK + else: + status = ExternalStatus.MODEL_MODIFIED + return status + + cwd = os.getcwd() + os.chdir(repo_dir_path) + git_output = self._git_branch_vv() + current_ref = self._current_ref_from_branch_command(git_output) + if current_ref == EMPTY_STR: + stat.sync_state = ExternalStatus.UNKNOWN + elif self._branch: + if self._url == LOCAL_PATH_INDICATOR: + expected_ref = self._branch + stat.sync_state = compare_refs(current_ref, expected_ref) + else: + remote_name = self._determine_remote_name() + if not remote_name: + # git doesn't know about this remote. by definition + # this is a modefied state. + stat.sync_state = ExternalStatus.MODEL_MODIFIED + else: + expected_ref = "{0}/{1}".format(remote_name, self._branch) + stat.sync_state = compare_refs(current_ref, expected_ref) + else: + stat.sync_state = compare_refs(current_ref, self._tag) + os.chdir(cwd) + + def _determine_remote_name(self): + """Return the remote name. + + Note that this is for the *future* repo url and branch, not + the current working copy! + + """ + git_output = self._git_remote_verbose() + git_output = git_output.splitlines() + remote_name = '' + for line in git_output: + data = line.strip() + if not data: + continue + data = data.split() + name = data[0].strip() + url = data[1].strip() + if self._url == url: + remote_name = name + break + return remote_name + + def _create_remote_name(self): + """The url specified in the externals description file was not known + to git. We need to add it, which means adding a unique and + safe name.... + + The assigned name needs to be safe for git to use, e.g. can't + look like a path 'foo/bar' and work with both remote and local paths. + + Remote paths include but are not limited to: git, ssh, https, + github, gitlab, bitbucket, custom server, etc. + + Local paths can be relative or absolute. They may contain + shell variables, e.g. ${REPO_ROOT}/repo_name, or username + expansion, i.e. ~/ or ~someuser/. + + Relative paths must be at least one layer of redirection, i.e. + container/../ext_repo, but may be many layers deep, e.g. + container/../../../../../ext_repo + + NOTE(bja, 2017-11) + + The base name below may not be unique, for example if the + user has local paths like: + + /path/to/my/repos/nice_repo + /path/to/other/repos/nice_repo + + But the current implementation should cover most common + use cases for remotes and still provide usable names. + + """ + url = copy.deepcopy(self._url) + if is_remote_url(url): + url = split_remote_url(url) + else: + url = expand_local_url(url, self._name) + url = url.split('/') + repo_name = url[-1] + base_name = url[-2] + # repo name should nominally already be something that git can + # deal with. We need to remove other possibly troublesome + # punctuation, e.g. /, $, from the base name. + unsafe_characters = '!@#$%^&*()[]{}\\/,;~' + for unsafe in unsafe_characters: + base_name = base_name.replace(unsafe, '') + remote_name = "{0}_{1}".format(base_name, repo_name) + return remote_name + + def _checkout_ref(self, repo_dir): + """Checkout the user supplied reference + """ + # import pdb; pdb.set_trace() + cwd = os.getcwd() + os.chdir(repo_dir) + if self._url.strip() == LOCAL_PATH_INDICATOR: + self._checkout_local_ref() + else: + self._checkout_external_ref() + os.chdir(cwd) + + def _checkout_local_ref(self): + """Checkout the reference considering the local repo only. Do not + fetch any additional remotes or specify the remote when + checkout out the ref. + + """ + if self._tag: + ref = self._tag + else: + ref = self._branch + self._check_for_valid_ref(ref) + self._git_checkout_ref(ref) + + def _checkout_external_ref(self): + """Checkout the reference from a remote repository + """ + remote_name = self._determine_remote_name() + if not remote_name: + remote_name = self._create_remote_name() + self._git_remote_add(remote_name, self._url) + self._git_fetch(remote_name) + if self._tag: + is_unique_tag, check_msg = self._is_unique_tag(self._tag, + remote_name) + if not is_unique_tag: + msg = ('In repo "{0}": tag "{1}" {2}'.format( + self._name, self._tag, check_msg)) + fatal_error(msg) + ref = self._tag + else: + ref = '{0}/{1}'.format(remote_name, self._branch) + self._git_checkout_ref(ref) + + def _check_for_valid_ref(self, ref): + """Try some basic sanity checks on the user supplied reference so we + can provide a more useful error message than calledprocess + error... + + """ + is_tag = self._ref_is_tag(ref) + is_branch = self._ref_is_branch(ref) + is_commit = self._ref_is_commit(ref) + + is_valid = is_tag or is_branch or is_commit + if not is_valid: + msg = ('In repo "{0}": reference "{1}" does not appear to be a ' + 'valid tag, branch or commit! Please verify the reference ' + 'name (e.g. spelling), is available from: {2} '.format( + self._name, ref, self._url)) + fatal_error(msg) + return is_valid + + def _is_unique_tag(self, ref, remote_name): + """Verify that a reference is a valid tag and is unique (not a branch) + + Tags may be tag names, or SHA id's. It is also possible that a + branch and tag have the some name. + + Note: values returned by git_showref_* and git_revparse are + shell return codes, which are zero for success, non-zero for + error! + + """ + is_tag = self._ref_is_tag(ref) + is_branch = self._ref_is_branch(ref, remote_name) + is_commit = self._ref_is_commit(ref) + + msg = '' + is_unique_tag = False + if is_tag and not is_branch: + # unique tag + msg = 'is ok' + is_unique_tag = True + elif is_tag and is_branch: + msg = ('is both a branch and a tag. git may checkout the branch ' + 'instead of the tag depending on your version of git.') + is_unique_tag = False + elif not is_tag and is_branch: + msg = ('is a branch, and not a tag. If you intended to checkout ' + 'a branch, please change the externals description to be ' + 'a branch. If you intended to checkout a tag, it does not ' + 'exist. Please check the name.') + is_unique_tag = False + else: # not is_tag and not is_branch: + if is_commit: + # probably a sha1 or HEAD, etc, we call it a tag + msg = 'is ok' + is_unique_tag = True + else: + # undetermined state. + msg = ('does not appear to be a valid tag, branch or commit! ' + 'Please check the name and repository.') + is_unique_tag = False + + return is_unique_tag, msg + + def _ref_is_tag(self, ref): + """Verify that a reference is a valid tag according to git. + + Note: values returned by git_showref_* and git_revparse are + shell return codes, which are zero for success, non-zero for + error! + """ + is_tag = False + value = self._git_showref_tag(ref) + if value == 0: + is_tag = True + return is_tag + + def _ref_is_branch(self, ref, remote_name=None): + """Verify if a ref is any kind of branch (local, tracked remote, + untracked remote). + + """ + local_branch = False + remote_branch = False + if remote_name: + remote_branch = self._ref_is_remote_branch(ref, remote_name) + local_branch = self._ref_is_local_branch(ref) + + is_branch = False + if local_branch or remote_branch: + is_branch = True + return is_branch + + def _ref_is_local_branch(self, ref): + """Verify that a reference is a valid branch according to git. + + show-ref branch returns local branches that have been + previously checked out. It will not necessarily pick up + untracked remote branches. + + Note: values returned by git_showref_* and git_revparse are + shell return codes, which are zero for success, non-zero for + error! + + """ + is_branch = False + value = self._git_showref_branch(ref) + if value == 0: + is_branch = True + return is_branch + + def _ref_is_remote_branch(self, ref, remote_name): + """Verify that a reference is a valid branch according to git. + + show-ref branch returns local branches that have been + previously checked out. It will not necessarily pick up + untracked remote branches. + + Note: values returned by git_showref_* and git_revparse are + shell return codes, which are zero for success, non-zero for + error! + + """ + is_branch = False + value = self._git_lsremote_branch(ref, remote_name) + if value == 0: + is_branch = True + return is_branch + + def _ref_is_commit(self, ref): + """Verify that a reference is a valid commit according to git. + + This could be a tag, branch, sha1 id, HEAD and potentially others... + + Note: values returned by git_showref_* and git_revparse are + shell return codes, which are zero for success, non-zero for + error! + """ + is_commit = False + value = self._git_revparse_commit(ref) + if value == 0: + is_commit = True + return is_commit + + def _status_summary(self, stat, repo_dir_path): + """Determine the clean/dirty status of a git repository + + """ + cwd = os.getcwd() + os.chdir(repo_dir_path) + git_output = self._git_status_porcelain_v1z() + os.chdir(cwd) + is_dirty = self._status_v1z_is_dirty(git_output) + if is_dirty: + stat.clean_state = ExternalStatus.DIRTY + else: + stat.clean_state = ExternalStatus.STATUS_OK + + def _status_verbose(self, repo_dir_path): + """Display raw git status output to the user + + """ + cwd = os.getcwd() + os.chdir(repo_dir_path) + git_output = self._git_status_verbose() + os.chdir(cwd) + log_process_output(git_output) + print(git_output) + + @staticmethod + def _status_v1z_is_dirty(git_output): + """Parse the git status output from --porcelain=v1 -z and determine if + the repo status is clean or dirty. Dirty means: + + * modified files + * missing files + * added files + * untracked files + * removed + * renamed + * unmerged + + NOTE: Based on the above definition, the porcelain status + should be an empty string to be considered 'clean'. Of course + this assumes we only get an empty string from an status + command on a clean checkout, and not some error + condition... Could alse use 'git diff --quiet'. + + """ + is_dirty = False + if git_output: + is_dirty = True + return is_dirty + + # ---------------------------------------------------------------- + # + # system call to git for information gathering + # + # ---------------------------------------------------------------- + @staticmethod + def _git_branch_vv(): + """Run git branch -vv to obtain verbose branch information, including + upstream tracking and hash. + + """ + cmd = ['git', 'branch', '--verbose', '--verbose'] + git_output = execute_subprocess(cmd, output_to_caller=True) + return git_output + + @staticmethod + def _git_showref_tag(ref): + """Run git show-ref check if the user supplied ref is a tag. + + could also use git rev-parse --quiet --verify tagname^{tag} + """ + cmd = ['git', 'show-ref', '--quiet', '--verify', + 'refs/tags/{0}'.format(ref), ] + status = execute_subprocess(cmd, status_to_caller=True) + return status + + @staticmethod + def _git_showref_branch(ref): + """Run git show-ref check if the user supplied ref is a local or + tracked remote branch. + + """ + cmd = ['git', 'show-ref', '--quiet', '--verify', + 'refs/heads/{0}'.format(ref), ] + status = execute_subprocess(cmd, status_to_caller=True) + return status + + @staticmethod + def _git_lsremote_branch(ref, remote_name): + """Run git ls-remote to check if the user supplied ref is a remote + branch that is not being tracked + + """ + cmd = ['git', 'ls-remote', '--exit-code', '--heads', + remote_name, ref, ] + status = execute_subprocess(cmd, status_to_caller=True) + return status + + @staticmethod + def _git_revparse_commit(ref): + """Run git rev-parse to detect if a reference is a SHA, HEAD or other + valid commit. + + """ + cmd = ['git', 'rev-parse', '--quiet', '--verify', + '{0}^{1}'.format(ref, '{commit}'), ] + status = execute_subprocess(cmd, status_to_caller=True) + return status + + @staticmethod + def _git_status_porcelain_v1z(): + """Run git status to obtain repository information. + + The machine parable format that is guarenteed not to change + between git versions or *user configuration*. + + """ + cmd = ['git', 'status', '--porcelain', '-z'] + git_output = execute_subprocess(cmd, output_to_caller=True) + return git_output + + @staticmethod + def _git_status_verbose(): + """Run the git status command to obtain repository information. + """ + cmd = ['git', 'status'] + git_output = execute_subprocess(cmd, output_to_caller=True) + return git_output + + @staticmethod + def _git_remote_verbose(): + """Run the git remote command to obtain repository information. + """ + cmd = ['git', 'remote', '--verbose'] + git_output = execute_subprocess(cmd, output_to_caller=True) + return git_output + + # ---------------------------------------------------------------- + # + # system call to git for sideffects modifying the working tree + # + # ---------------------------------------------------------------- + @staticmethod + def _git_clone(url, repo_dir_name): + """Run git clone for the side effect of creating a repository. + """ + cmd = ['git', 'clone', url, repo_dir_name] + execute_subprocess(cmd) + + @staticmethod + def _git_remote_add(name, url): + """Run the git remote command to for the side effect of adding a remote + """ + cmd = ['git', 'remote', 'add', name, url] + execute_subprocess(cmd) + + @staticmethod + def _git_fetch(remote_name): + """Run the git fetch command to for the side effect of updating the repo + """ + cmd = ['git', 'fetch', remote_name] + execute_subprocess(cmd) + + @staticmethod + def _git_checkout_ref(ref): + """Run the git checkout command to for the side effect of updating the repo + + Param: ref is a reference to a local or remote object in the + form 'origin/my_feature', or 'tag1'. + + """ + cmd = ['git', 'checkout', ref] + execute_subprocess(cmd) diff --git a/manic/repository_svn.py b/manic/repository_svn.py new file mode 100644 index 0000000000..27617ca0c9 --- /dev/null +++ b/manic/repository_svn.py @@ -0,0 +1,232 @@ +"""Class for interacting with svn repositories +""" + +from __future__ import absolute_import +from __future__ import unicode_literals +from __future__ import print_function + +import os +import re +import xml.etree.ElementTree as ET + +from .repository import Repository +from .externals_status import ExternalStatus +from .utils import fatal_error, log_process_output +from .utils import execute_subprocess + + +class SvnRepository(Repository): + """ + Class to represent and operate on a repository description. + + For testing purpose, all system calls to svn should: + + * be isolated in separate functions with no application logic + * of the form: + - cmd = ['svn', ...] + - value = execute_subprocess(cmd, output_to_caller={T|F}, + status_to_caller={T|F}) + - return value + * be static methods (not rely on self) + * name as _svn_subcommand_args(user_args) + + This convention allows easy unit testing of the repository logic + by mocking the specific calls to return predefined results. + + """ + RE_URLLINE = re.compile(r'^URL:') + + def __init__(self, component_name, repo): + """ + Parse repo (a XML element). + """ + Repository.__init__(self, component_name, repo) + if self._branch: + self._url = os.path.join(self._url, self._branch) + elif self._tag: + self._url = os.path.join(self._url, self._tag) + else: + msg = "DEV_ERROR in svn repository. Shouldn't be here!" + fatal_error(msg) + + # ---------------------------------------------------------------- + # + # Public API, defined by Repository + # + # ---------------------------------------------------------------- + def checkout(self, base_dir_path, repo_dir_name): + """Checkout or update the working copy + + If the repo destination directory exists, switch the sandbox to + match the externals description. + + If the repo destination directory does not exist, checkout the + correct branch or tag. + + """ + repo_dir_path = os.path.join(base_dir_path, repo_dir_name) + if os.path.exists(repo_dir_path): + cwd = os.getcwd() + os.chdir(repo_dir_path) + self._svn_switch(self._url) + os.chdir(cwd) + else: + self._svn_checkout(self._url, repo_dir_path) + + def status(self, stat, repo_dir_path): + """ + Check and report the status of the repository + """ + self._check_sync(stat, repo_dir_path) + if os.path.exists(repo_dir_path): + self._status_summary(stat, repo_dir_path) + return stat + + def verbose_status(self, repo_dir_path): + """Display the raw repo status to the user. + + """ + if os.path.exists(repo_dir_path): + self._status_verbose(repo_dir_path) + + # ---------------------------------------------------------------- + # + # Internal work functions + # + # ---------------------------------------------------------------- + def _check_sync(self, stat, repo_dir_path): + """Check to see if repository directory exists and is at the expected + url. Return: status object + + """ + if not os.path.exists(repo_dir_path): + # NOTE(bja, 2017-10) this state should have been handled by + # the source object and we never get here! + stat.sync_state = ExternalStatus.STATUS_ERROR + else: + svn_output = self._svn_info(repo_dir_path) + if not svn_output: + # directory exists, but info returned nothing. .svn + # directory removed or incomplete checkout? + stat.sync_state = ExternalStatus.UNKNOWN + else: + stat.sync_state = self._check_url(svn_output, self._url) + + @staticmethod + def _check_url(svn_output, expected_url): + """Determine the svn url from svn info output and return whether it + matches the expected value. + + """ + url = None + for line in svn_output.splitlines(): + if SvnRepository.RE_URLLINE.match(line): + url = line.split(': ')[1].strip() + break + if not url: + status = ExternalStatus.UNKNOWN + elif url == expected_url: + status = ExternalStatus.STATUS_OK + else: + status = ExternalStatus.MODEL_MODIFIED + return status + + def _status_summary(self, stat, repo_dir_path): + """Report whether the svn repository is in-sync with the model + description and whether the sandbox is clean or dirty. + + """ + svn_output = self._svn_status_xml(repo_dir_path) + is_dirty = self.xml_status_is_dirty(svn_output) + if is_dirty: + stat.clean_state = ExternalStatus.DIRTY + else: + stat.clean_state = ExternalStatus.STATUS_OK + + def _status_verbose(self, repo_dir_path): + """Display the raw svn status output to the user. + + """ + svn_output = self._svn_status_verbose(repo_dir_path) + log_process_output(svn_output) + print(svn_output) + + @staticmethod + def xml_status_is_dirty(svn_output): + """Parse svn status xml output and determine if the working copy is + clean or dirty. Dirty is defined as: + + * modified files + * added files + * deleted files + * missing files + * unversioned files + + The only acceptable state returned from svn is 'external' + + """ + # pylint: disable=invalid-name + SVN_EXTERNAL = 'external' + # pylint: enable=invalid-name + + is_dirty = False + xml_status = ET.fromstring(svn_output) + xml_target = xml_status.find('./target') + entries = xml_target.findall('./entry') + for entry in entries: + status = entry.find('./wc-status') + item = status.get('item') + if item != SVN_EXTERNAL: + is_dirty = True + return is_dirty + + # ---------------------------------------------------------------- + # + # system call to svn for information gathering + # + # ---------------------------------------------------------------- + @staticmethod + def _svn_info(repo_dir_path): + """Return results of svn info command + """ + cmd = ['svn', 'info', repo_dir_path] + output = execute_subprocess(cmd, output_to_caller=True) + return output + + @staticmethod + def _svn_status_verbose(repo_dir_path): + """capture the full svn status output + """ + cmd = ['svn', 'status', repo_dir_path] + svn_output = execute_subprocess(cmd, output_to_caller=True) + return svn_output + + @staticmethod + def _svn_status_xml(repo_dir_path): + """ + Get status of the subversion sandbox in repo_dir + """ + cmd = ['svn', 'status', '--xml', repo_dir_path] + svn_output = execute_subprocess(cmd, output_to_caller=True) + return svn_output + + # ---------------------------------------------------------------- + # + # system call to svn for sideffects modifying the working tree + # + # ---------------------------------------------------------------- + @staticmethod + def _svn_checkout(url, repo_dir_path): + """ + Checkout a subversion repository (repo_url) to checkout_dir. + """ + cmd = ['svn', 'checkout', url, repo_dir_path] + execute_subprocess(cmd) + + @staticmethod + def _svn_switch(url): + """ + Switch branches for in an svn sandbox + """ + cmd = ['svn', 'switch', url] + execute_subprocess(cmd) diff --git a/manic/sourcetree.py b/manic/sourcetree.py new file mode 100644 index 0000000000..20d124ea74 --- /dev/null +++ b/manic/sourcetree.py @@ -0,0 +1,296 @@ +""" + +FIXME(bja, 2017-11) External and SourceTree have a circular dependancy! +""" + +import errno +import logging +import os + +from .externals_description import ExternalsDescription +from .externals_description import read_externals_description_file +from .externals_description import create_externals_description +from .repository_factory import create_repository +from .externals_status import ExternalStatus +from .utils import fatal_error, printlog +from .global_constants import EMPTY_STR, LOCAL_PATH_INDICATOR + + +class _External(object): + """ + _External represents an external object in side a SourceTree + """ + + # pylint: disable=R0902 + + def __init__(self, root_dir, name, ext_description): + """Parse an external description file into a dictionary of externals. + + Input: + + root_dir : string - the root directory path where + 'local_path' is relative to. + + name : string - name of the ext_description object. may or may not + correspond to something in the path. + + ext_description : dict - source ExternalsDescription object + + """ + self._name = name + self._repo = None + self._externals = EMPTY_STR + self._externals_sourcetree = None + # Parse the sub-elements + + # _path : local path relative to the containing source tree + self._local_path = ext_description[ExternalsDescription.PATH] + # _repo_dir : full repository directory + repo_dir = os.path.join(root_dir, self._local_path) + self._repo_dir_path = os.path.abspath(repo_dir) + # _base_dir : base directory *containing* the repository + self._base_dir_path = os.path.dirname(self._repo_dir_path) + # repo_dir_name : base_dir_path + repo_dir_name = rep_dir_path + self._repo_dir_name = os.path.basename(self._repo_dir_path) + assert(os.path.join(self._base_dir_path, self._repo_dir_name) + == self._repo_dir_path) + + self._required = ext_description[ExternalsDescription.REQUIRED] + self._externals = ext_description[ExternalsDescription.EXTERNALS] + if self._externals: + self._create_externals_sourcetree() + repo = create_repository( + name, ext_description[ExternalsDescription.REPO]) + if repo: + self._repo = repo + + def get_name(self): + """ + Return the external object's name + """ + return self._name + + def get_local_path(self): + """ + Return the external object's path + """ + return self._local_path + + def status(self): + """ + If the repo destination directory exists, ensure it is correct (from + correct URL, correct branch or tag), and possibly update the external. + If the repo destination directory does not exist, checkout the correce + branch or tag. + If load_all is True, also load all of the the externals sub-externals. + """ + + stat = ExternalStatus() + stat.path = self.get_local_path() + if not self._required: + stat.source_type = ExternalStatus.OPTIONAL + elif self._local_path == LOCAL_PATH_INDICATOR: + # LOCAL_PATH_INDICATOR, '.' paths, are standalone + # component directories that are not managed by + # checkout_externals. + stat.source_type = ExternalStatus.STANDALONE + else: + # managed by checkout_externals + stat.source_type = ExternalStatus.MANAGED + + ext_stats = {} + + if not os.path.exists(self._repo_dir_path): + stat.sync_state = ExternalStatus.EMPTY + msg = ('status check: repository directory for "{0}" does not ' + 'exist.'.format(self._name)) + logging.info(msg) + else: + if self._repo: + self._repo.status(stat, self._repo_dir_path) + + if self._externals and self._externals_sourcetree: + # we expect externals and they exist + cwd = os.getcwd() + # SourceTree expecteds to be called from the correct + # root directory. + os.chdir(self._repo_dir_path) + ext_stats = self._externals_sourcetree.status(self._local_path) + os.chdir(cwd) + + all_stats = {} + # don't add the root component because we don't manage it + # and can't provide useful info about it. + if self._local_path != LOCAL_PATH_INDICATOR: + # store the stats under tha local_path, not comp name so + # it will be sorted correctly + all_stats[stat.path] = stat + + if ext_stats: + all_stats.update(ext_stats) + + return all_stats + + def verbose_status(self): + """Display the verbose status to the user. This is just the raw output + from the repository 'status' command. + + """ + if not os.path.exists(self._repo_dir_path): + msg = ('status check: repository directory for "{0}" does not ' + 'exist!'.format(self._name)) + logging.info(msg) + else: + cwd = os.getcwd() + os.chdir(self._repo_dir_path) + if self._repo: + self._repo.verbose_status(self._repo_dir_path) + os.chdir(cwd) + + def checkout(self, load_all): + """ + If the repo destination directory exists, ensure it is correct (from + correct URL, correct branch or tag), and possibly update the external. + If the repo destination directory does not exist, checkout the correce + branch or tag. + If load_all is True, also load all of the the externals sub-externals. + """ + if load_all: + pass + # Make sure we are in correct location + + if not os.path.exists(self._repo_dir_path): + # repository directory doesn't exist. Need to check it + # out, and for that we need the base_dir_path to exist + try: + os.makedirs(self._base_dir_path) + except OSError as error: + if error.errno != errno.EEXIST: + msg = 'Could not create directory "{0}"'.format( + self._base_dir_path) + fatal_error(msg) + + if self._repo: + self._repo.checkout(self._base_dir_path, self._repo_dir_name) + + def checkout_externals(self, load_all): + """Checkout the sub-externals for this object + """ + if self._externals: + if not self._externals_sourcetree: + self._create_externals_sourcetree() + self._externals_sourcetree.checkout(load_all) + + def _create_externals_sourcetree(self): + """ + """ + if not os.path.exists(self._repo_dir_path): + # NOTE(bja, 2017-10) repository has not been checked out + # yet, can't process the externals file. Assume we are + # checking status before code is checkoud out and this + # will be handled correctly later. + return + + cwd = os.getcwd() + os.chdir(self._repo_dir_path) + if not os.path.exists(self._externals): + # NOTE(bja, 2017-10) this check is redundent with the one + # in read_externals_description_file! + msg = ('External externals description file "{0}" ' + 'does not exist! In directory: {1}'.format( + self._externals, self._repo_dir_path)) + fatal_error(msg) + + externals_root = self._repo_dir_path + model_data = read_externals_description_file(externals_root, + self._externals) + externals = create_externals_description(model_data) + self._externals_sourcetree = SourceTree(externals_root, externals) + os.chdir(cwd) + + +class SourceTree(object): + """ + SourceTree represents a group of managed externals + """ + + def __init__(self, root_dir, model): + """ + Build a SourceTree object from a model description + """ + self._root_dir = os.path.abspath(root_dir) + self._all_components = {} + self._required_compnames = [] + for comp in model: + src = _External(self._root_dir, comp, model[comp]) + self._all_components[comp] = src + if model[comp][ExternalsDescription.REQUIRED]: + self._required_compnames.append(comp) + + def status(self, relative_path_base=LOCAL_PATH_INDICATOR): + """Report the status components + + FIXME(bja, 2017-10) what do we do about situations where the + user checked out the optional components, but didn't add + optional for running status? What do we do where the user + didn't add optional to the checkout but did add it to the + status. -- For now, we run status on all components, and try + to do the right thing based on the results.... + + """ + load_comps = self._all_components.keys() + + summary = {} + for comp in load_comps: + printlog('{0}, '.format(comp), end='') + stat = self._all_components[comp].status() + for name in stat.keys(): + # check if we need to append the relative_path_base to + # the path so it will be sorted in the correct order. + if not stat[name].path.startswith(relative_path_base): + stat[name].path = os.path.join(relative_path_base, + stat[name].path) + # store under key = updated path, and delete the + # old key. + comp_stat = stat[name] + del stat[name] + stat[comp_stat.path] = comp_stat + summary.update(stat) + + return summary + + def verbose_status(self): + """Display verbose status to the user. This is just the raw output of + the git and svn status commands. + + """ + load_comps = self._all_components.keys() + for comp in load_comps: + self._all_components[comp].verbose_status() + + def checkout(self, load_all, load_comp=None): + """ + Checkout or update indicated components into the the configured + subdirs. + + If load_all is True, recursively checkout all externals. + If load_all is False, load_comp is an optional set of components to load. + If load_all is True and load_comp is None, only load the required externals. + """ + printlog('Checking out externals: ', end='') + if load_all: + load_comps = self._all_components.keys() + elif load_comp is not None: + load_comps = [load_comp] + else: + load_comps = self._required_compnames + + # checkout the primary externals + for comp in load_comps: + printlog('{0}, '.format(comp), end='') + self._all_components[comp].checkout(load_all) + printlog('') + + # now give each external an opportunitity to checkout it's externals. + for comp in load_comps: + self._all_components[comp].checkout_externals(load_all) diff --git a/manic/utils.py b/manic/utils.py new file mode 100644 index 0000000000..c63abcba0d --- /dev/null +++ b/manic/utils.py @@ -0,0 +1,237 @@ +#!/usr/bin/env python +""" +Common public utilities for manic package + +""" + +from __future__ import absolute_import +from __future__ import unicode_literals +from __future__ import print_function + +import logging +import os +import string +import subprocess +import sys + +from .global_constants import LOCAL_PATH_INDICATOR, LOG_FILE_NAME + +# --------------------------------------------------------------------- +# +# screen and logging output +# +# --------------------------------------------------------------------- + + +def log_process_output(output): + """Log each line of process output at debug level so it can be + filtered if necessary. By default, output is a single string, and + logging.debug(output) will only put log info heading on the first + line. This makes it hard to filter with grep. + + """ + output = output.split('\n') + for line in output: + logging.debug(line) + + +def printlog(msg, **kwargs): + """Wrapper script around print to ensure that everything printed to + the screen also gets logged. + + """ + logging.info(msg) + if kwargs: + print(msg, **kwargs) + else: + print(msg) + sys.stdout.flush() + + +# --------------------------------------------------------------------- +# +# error handling +# +# --------------------------------------------------------------------- +def fatal_error(message): + """ + Error output function + """ + logging.error(message) + raise RuntimeError("{0}ERROR: {1}".format(os.linesep, message)) + + +# --------------------------------------------------------------------- +# +# Data conversion / manipulation +# +# --------------------------------------------------------------------- +def str_to_bool(bool_str): + """Convert a sting representation of as boolean into a true boolean. + + Conversion should be case insensitive. + """ + value = None + str_lower = bool_str.lower() + if (str_lower == 'true') or (str_lower == 't'): + value = True + elif (str_lower == 'false') or (str_lower == 'f'): + value = False + if value is None: + msg = ('ERROR: invalid boolean string value "{0}". ' + 'Must be "true" or "false"'.format(bool_str)) + fatal_error(msg) + return value + + +REMOTE_PREFIXES = ['http://', 'https://', 'ssh://', 'git@'] + + +def is_remote_url(url): + """check if the user provided a local file path instead of a + remote. If so, it must be expanded to an absolute + path. + + """ + remote_url = False + for prefix in REMOTE_PREFIXES: + if url.startswith(prefix): + remote_url = True + return remote_url + + +def split_remote_url(url): + """check if the user provided a local file path or a + remote. If remote, try to strip off protocol info. + + """ + remote_url = is_remote_url(url) + if not remote_url: + return url + + for prefix in REMOTE_PREFIXES: + url = url.replace(prefix, '') + + if '@' in url: + url = url.split('@')[1] + + if ':' in url: + url = url.split(':')[1] + + return url + + +def expand_local_url(url, field): + """check if the user provided a local file path instead of a + remote. If so, it must be expanded to an absolute + path. + + Note: local paths of LOCAL_PATH_INDICATOR have special meaning and + represent local copy only, don't work with the remotes. + + """ + remote_url = is_remote_url(url) + if not remote_url: + if url.strip() == LOCAL_PATH_INDICATOR: + pass + else: + url = os.path.expandvars(url) + url = os.path.expanduser(url) + if not os.path.isabs(url): + msg = ('WARNING: Externals description for "{0}" contains a ' + 'url that is not remote and does not expand to an ' + 'absolute path. Version control operations may ' + 'fail.\n\nurl={1}'.format(field, url)) + printlog(msg) + else: + url = os.path.normpath(url) + return url + + +# --------------------------------------------------------------------- +# +# subprocess +# +# --------------------------------------------------------------------- +def execute_subprocess(commands, status_to_caller=False, + output_to_caller=False): + """Wrapper around subprocess.check_output to handle common + exceptions. + + check_output runs a command with arguments and waits + for it to complete. + + check_output raises an exception on a nonzero return code. if + status_to_caller is true, execute_subprocess returns the subprocess + return code, otherwise execute_subprocess treats non-zero return + status as an error and raises an exception. + + """ + msg = 'In directory: {0}\nexecute_subprocess running command:'.format( + os.getcwd()) + logging.info(msg) + logging.info(commands) + return_to_caller = status_to_caller or output_to_caller + status = -1 + output = '' + try: + logging.info(' '.join(commands)) + output = subprocess.check_output(commands, stderr=subprocess.STDOUT, + universal_newlines=True) + log_process_output(output) + status = 0 + except OSError as error: + msg = failed_command_msg( + 'Command execution failed. Does the executable exist?', + commands) + logging.error(error) + fatal_error(msg) + except ValueError as error: + msg = failed_command_msg( + 'DEV_ERROR: Invalid arguments trying to run subprocess', + commands) + logging.error(error) + fatal_error(msg) + except subprocess.CalledProcessError as error: + # Only report the error if we are NOT returning to the + # caller. If we are returning to the caller, then it may be a + # simple status check. If returning, it is the callers + # responsibility determine if an error occurred and handle it + # appropriately. + if not return_to_caller: + msg = failed_command_msg( + 'Called process did not run successfully.\n' + 'Returned status: {0}'.format(error.returncode), + commands) + logging.error(error) + logging.error(msg) + log_process_output(error.output) + fatal_error(msg) + status = error.returncode + + if status_to_caller and output_to_caller: + ret_value = (status, output) + elif status_to_caller: + ret_value = status + elif output_to_caller: + ret_value = output + else: + ret_value = None + + return ret_value + + +def failed_command_msg(msg_context, command): + """Template for consistent error messages from subprocess calls. + """ + error_msg = string.Template("""$context +Failed command: + $command +Please check the log file "$log" for more details.""") + values = { + 'context': msg_context, + 'command': ' '.join(command), + 'log': LOG_FILE_NAME, + } + msg = error_msg.substitute(values) + return msg diff --git a/test/.coveragerc b/test/.coveragerc new file mode 100644 index 0000000000..8b681888b8 --- /dev/null +++ b/test/.coveragerc @@ -0,0 +1,7 @@ +[run] +branch = True +omit = test_unit_*.py + test_sys_*.py + /usr/* + .local/* + */site-packages/* \ No newline at end of file diff --git a/test/.gitignore b/test/.gitignore new file mode 100644 index 0000000000..dd5795998f --- /dev/null +++ b/test/.gitignore @@ -0,0 +1,7 @@ +# virtual environments +env_python* + +# python code coverage tool output +.coverage +htmlcov + diff --git a/test/.pylint.rc b/test/.pylint.rc new file mode 100644 index 0000000000..3e66113f7f --- /dev/null +++ b/test/.pylint.rc @@ -0,0 +1,426 @@ +[MASTER] + +# A comma-separated list of package or module names from where C extensions may +# be loaded. Extensions are loading into the active Python interpreter and may +# run arbitrary code +extension-pkg-whitelist= + +# Add files or directories to the blacklist. They should be base names, not +# paths. +ignore=.git,.svn,env2 + +# Add files or directories matching the regex patterns to the blacklist. The +# regex matches against base names, not paths. +ignore-patterns= + +# Python code to execute, usually for sys.path manipulation such as +# pygtk.require(). +#init-hook= + +# Use multiple processes to speed up Pylint. +jobs=1 + +# List of plugins (as comma separated values of python modules names) to load, +# usually to register additional checkers. +load-plugins= + +# Pickle collected data for later comparisons. +persistent=yes + +# Specify a configuration file. +#rcfile= + +# Allow loading of arbitrary C extensions. Extensions are imported into the +# active Python interpreter and may run arbitrary code. +unsafe-load-any-extension=no + + +[MESSAGES CONTROL] + +# Only show warnings with the listed confidence levels. Leave empty to show +# all. Valid levels: HIGH, INFERENCE, INFERENCE_FAILURE, UNDEFINED +confidence= + +# Disable the message, report, category or checker with the given id(s). You +# can either give multiple identifiers separated by comma (,) or put this +# option multiple times (only on the command line, not in the configuration +# file where it should appear only once).You can also use "--disable=all" to +# disable everything first and then reenable specific checks. For example, if +# you want to run only the similarities checker, you can use "--disable=all +# --enable=similarities". If you want to run only the classes checker, but have +# no Warning level messages displayed, use"--disable=all --enable=classes +# --disable=W" +disable=bad-continuation + + +# Enable the message, report, category or checker with the given id(s). You can +# either give multiple identifier separated by comma (,) or put this option +# multiple time (only on the command line, not in the configuration file where +# it should appear only once). See also the "--disable" option for examples. +enable= + + +[REPORTS] + +# Python expression which should return a note less than 10 (10 is the highest +# note). You have access to the variables errors warning, statement which +# respectively contain the number of errors / warnings messages and the total +# number of statements analyzed. This is used by the global evaluation report +# (RP0004). +evaluation=10.0 - ((float(5 * error + warning + refactor + convention) / statement) * 10) + +# Template used to display messages. This is a python new-style format string +# used to format the message information. See doc for all details +msg-template={msg_id}:{line:3d},{column:2d}: {msg} ({symbol}) + +# Set the output format. Available formats are text, parseable, colorized, json +# and msvs (visual studio).You can also give a reporter class, eg +# mypackage.mymodule.MyReporterClass. +output-format=text + +# Tells whether to display a full report or only the messages +#reports=yes + +# Activate the evaluation score. +score=yes + + +[REFACTORING] + +# Maximum number of nested blocks for function / method body +max-nested-blocks=5 + + +[BASIC] + +# Naming hint for argument names +argument-name-hint=(([a-z][a-z0-9_]{2,30})|(_[a-z0-9_]*))$ + +# Regular expression matching correct argument names +argument-rgx=(([a-z][a-z0-9_]{2,30})|(_[a-z0-9_]*))$ + +# Naming hint for attribute names +attr-name-hint=(([a-z][a-z0-9_]{2,30})|(_[a-z0-9_]*))$ + +# Regular expression matching correct attribute names +attr-rgx=(([a-z][a-z0-9_]{2,30})|(_[a-z0-9_]*))$ + +# Bad variable names which should always be refused, separated by a comma +bad-names=foo,bar,baz,toto,tutu,tata + +# Naming hint for class attribute names +class-attribute-name-hint=([A-Za-z_][A-Za-z0-9_]{2,30}|(__.*__))$ + +# Regular expression matching correct class attribute names +class-attribute-rgx=([A-Za-z_][A-Za-z0-9_]{2,30}|(__.*__))$ + +# Naming hint for class names +class-name-hint=[A-Z_][a-zA-Z0-9]+$ + +# Regular expression matching correct class names +class-rgx=[A-Z_][a-zA-Z0-9]+$ + +# Naming hint for constant names +const-name-hint=(([A-Z_][A-Z0-9_]*)|(__.*__))$ + +# Regular expression matching correct constant names +const-rgx=(([A-Z_][A-Z0-9_]*)|(__.*__))$ + +# Minimum line length for functions/classes that require docstrings, shorter +# ones are exempt. +docstring-min-length=-1 + +# Naming hint for function names +function-name-hint=(([a-z][a-z0-9_]{2,30})|(_[a-z0-9_]*))$ + +# Regular expression matching correct function names +function-rgx=(([a-z][a-z0-9_]{2,30})|(_[a-z0-9_]*))$ + +# Good variable names which should always be accepted, separated by a comma +good-names=i,j,k,ex,Run,_ + +# Include a hint for the correct naming format with invalid-name +include-naming-hint=no + +# Naming hint for inline iteration names +inlinevar-name-hint=[A-Za-z_][A-Za-z0-9_]*$ + +# Regular expression matching correct inline iteration names +inlinevar-rgx=[A-Za-z_][A-Za-z0-9_]*$ + +# Naming hint for method names +method-name-hint=(([a-z][a-z0-9_]{2,30})|(_[a-z0-9_]*))$ + +# Regular expression matching correct method names +method-rgx=(([a-z][a-z0-9_]{2,30})|(_[a-z0-9_]*))$ + +# Naming hint for module names +module-name-hint=(([a-z_][a-z0-9_]*)|([A-Z][a-zA-Z0-9]+))$ + +# Regular expression matching correct module names +module-rgx=(([a-z_][a-z0-9_]*)|([A-Z][a-zA-Z0-9]+))$ + +# Colon-delimited sets of names that determine each other's naming style when +# the name regexes allow several styles. +name-group= + +# Regular expression which should only match function or class names that do +# not require a docstring. +no-docstring-rgx=^_ + +# List of decorators that produce properties, such as abc.abstractproperty. Add +# to this list to register other decorators that produce valid properties. +property-classes=abc.abstractproperty + +# Naming hint for variable names +variable-name-hint=(([a-z][a-z0-9_]{2,30})|(_[a-z0-9_]*))$ + +# Regular expression matching correct variable names +variable-rgx=(([a-z][a-z0-9_]{2,30})|(_[a-z0-9_]*))$ + + +[FORMAT] + +# Expected format of line ending, e.g. empty (any line ending), LF or CRLF. +expected-line-ending-format= + +# Regexp for a line that is allowed to be longer than the limit. +ignore-long-lines=^\s*(# )??$ + +# Number of spaces of indent required inside a hanging or continued line. +indent-after-paren=4 + +# String used as indentation unit. This is usually " " (4 spaces) or "\t" (1 +# tab). +indent-string=' ' + +# Maximum number of characters on a single line. +max-line-length=100 + +# Maximum number of lines in a module +max-module-lines=1000 + +# List of optional constructs for which whitespace checking is disabled. `dict- +# separator` is used to allow tabulation in dicts, etc.: {1 : 1,\n222: 2}. +# `trailing-comma` allows a space between comma and closing bracket: (a, ). +# `empty-line` allows space-only lines. +no-space-check=trailing-comma,dict-separator + +# Allow the body of a class to be on the same line as the declaration if body +# contains single statement. +single-line-class-stmt=no + +# Allow the body of an if to be on the same line as the test if there is no +# else. +single-line-if-stmt=no + + +[LOGGING] + +# Logging modules to check that the string format arguments are in logging +# function parameter format +logging-modules=logging + + +[MISCELLANEOUS] + +# List of note tags to take in consideration, separated by a comma. +notes=FIXME,XXX,TODO + + +[SIMILARITIES] + +# Ignore comments when computing similarities. +ignore-comments=yes + +# Ignore docstrings when computing similarities. +ignore-docstrings=yes + +# Ignore imports when computing similarities. +ignore-imports=no + +# Minimum lines number of a similarity. +min-similarity-lines=4 + + +[SPELLING] + +# Spelling dictionary name. Available dictionaries: none. To make it working +# install python-enchant package. +spelling-dict= + +# List of comma separated words that should not be checked. +spelling-ignore-words= + +# A path to a file that contains private dictionary; one word per line. +spelling-private-dict-file= + +# Tells whether to store unknown words to indicated private dictionary in +# --spelling-private-dict-file option instead of raising a message. +spelling-store-unknown-words=no + + +[TYPECHECK] + +# List of decorators that produce context managers, such as +# contextlib.contextmanager. Add to this list to register other decorators that +# produce valid context managers. +contextmanager-decorators=contextlib.contextmanager + +# List of members which are set dynamically and missed by pylint inference +# system, and so shouldn't trigger E1101 when accessed. Python regular +# expressions are accepted. +generated-members= + +# Tells whether missing members accessed in mixin class should be ignored. A +# mixin class is detected if its name ends with "mixin" (case insensitive). +ignore-mixin-members=yes + +# This flag controls whether pylint should warn about no-member and similar +# checks whenever an opaque object is returned when inferring. The inference +# can return multiple potential results while evaluating a Python object, but +# some branches might not be evaluated, which results in partial inference. In +# that case, it might be useful to still emit no-member and other checks for +# the rest of the inferred objects. +ignore-on-opaque-inference=yes + +# List of class names for which member attributes should not be checked (useful +# for classes with dynamically set attributes). This supports the use of +# qualified names. +ignored-classes=optparse.Values,thread._local,_thread._local + +# List of module names for which member attributes should not be checked +# (useful for modules/projects where namespaces are manipulated during runtime +# and thus existing member attributes cannot be deduced by static analysis. It +# supports qualified module names, as well as Unix pattern matching. +ignored-modules= + +# Show a hint with possible names when a member name was not found. The aspect +# of finding the hint is based on edit distance. +missing-member-hint=yes + +# The minimum edit distance a name should have in order to be considered a +# similar match for a missing member name. +missing-member-hint-distance=1 + +# The total number of similar names that should be taken in consideration when +# showing a hint for a missing member. +missing-member-max-choices=1 + + +[VARIABLES] + +# List of additional names supposed to be defined in builtins. Remember that +# you should avoid to define new builtins when possible. +additional-builtins= + +# Tells whether unused global variables should be treated as a violation. +allow-global-unused-variables=yes + +# List of strings which can identify a callback function by name. A callback +# name must start or end with one of those strings. +callbacks=cb_,_cb + +# A regular expression matching the name of dummy variables (i.e. expectedly +# not used). +dummy-variables-rgx=_+$|(_[a-zA-Z0-9_]*[a-zA-Z0-9]+?$)|dummy|^ignored_|^unused_ + +# Argument names that match this expression will be ignored. Default to name +# with leading underscore +ignored-argument-names=_.*|^ignored_|^unused_ + +# Tells whether we should check for unused import in __init__ files. +init-import=no + +# List of qualified module names which can have objects that can redefine +# builtins. +redefining-builtins-modules=six.moves,future.builtins + + +[CLASSES] + +# List of method names used to declare (i.e. assign) instance attributes. +defining-attr-methods=__init__,__new__,setUp + +# List of member names, which should be excluded from the protected access +# warning. +exclude-protected=_asdict,_fields,_replace,_source,_make + +# List of valid names for the first argument in a class method. +valid-classmethod-first-arg=cls + +# List of valid names for the first argument in a metaclass class method. +valid-metaclass-classmethod-first-arg=mcs + + +[DESIGN] + +# Maximum number of arguments for function / method +max-args=5 + +# Maximum number of attributes for a class (see R0902). +max-attributes=7 + +# Maximum number of boolean expressions in a if statement +max-bool-expr=5 + +# Maximum number of branch for function / method body +max-branches=12 + +# Maximum number of locals for function / method body +max-locals=15 + +# Maximum number of parents for a class (see R0901). +max-parents=7 + +# Maximum number of public methods for a class (see R0904). +max-public-methods=20 + +# Maximum number of return / yield for function / method body +max-returns=6 + +# Maximum number of statements in function / method body +max-statements=50 + +# Minimum number of public methods for a class (see R0903). +min-public-methods=2 + + +[IMPORTS] + +# Allow wildcard imports from modules that define __all__. +allow-wildcard-with-all=no + +# Analyse import fallback blocks. This can be used to support both Python 2 and +# 3 compatible code, which means that the block might have code that exists +# only in one or another interpreter, leading to false positives when analysed. +analyse-fallback-blocks=no + +# Deprecated modules which should not be used, separated by a comma +deprecated-modules=regsub,TERMIOS,Bastion,rexec + +# Create a graph of external dependencies in the given file (report RP0402 must +# not be disabled) +ext-import-graph= + +# Create a graph of every (i.e. internal and external) dependencies in the +# given file (report RP0402 must not be disabled) +import-graph= + +# Create a graph of internal dependencies in the given file (report RP0402 must +# not be disabled) +int-import-graph= + +# Force import order to recognize a module as part of the standard +# compatibility libraries. +known-standard-library= + +# Force import order to recognize a module as part of a third party library. +known-third-party=enchant + + +[EXCEPTIONS] + +# Exceptions that will emit a warning when being caught. Defaults to +# "Exception" +overgeneral-exceptions=Exception diff --git a/test/Makefile b/test/Makefile new file mode 100644 index 0000000000..6acb8194a9 --- /dev/null +++ b/test/Makefile @@ -0,0 +1,118 @@ +python = not-set +verbose = not-set +debug = not-set + +ifneq ($(python), not-set) +PYTHON=$(python) +else +PYTHON=python +endif + +# we need the python path to point one level up to access the package +# and executables +PYPATH=PYTHONPATH=..: + +# common args for running tests +TEST_ARGS=-m unittest discover + +ifeq ($(debug), not-set) + ifeq ($(verbose), not-set) + # summary only output + TEST_ARGS+=--buffer + else + # show individual test summary + TEST_ARGS+=--buffer --verbose + endif +else + # show detailed test output + TEST_ARGS+=--verbose +endif + + +# auto reformat the code +AUTOPEP8=autopep8 +AUTOPEP8_ARGS=--aggressive --in-place + +# run lint +PYLINT=pylint +PYLINT_ARGS=-j 2 --rcfile=.pylint.rc + +# code coverage +COVERAGE=coverage +COVERAGE_ARGS=--rcfile=.coveragerc --append + +# source files +SRC = \ + ../checkout_externals \ + ../manic/*.py + +CHECKOUT_EXE = ../checkout_externals + +TEST_DIR = . + +README = ../README.md + +# +# testing +# +.PHONY : utest +utest : FORCE + $(PYPATH) $(PYTHON) $(TEST_ARGS) --pattern 'test_unit_*.py' + +.PHONY : stest +stest : FORCE + $(PYPATH) $(PYTHON) $(TEST_ARGS) --pattern 'test_sys_*.py' + +.PHONY : test +test : utest stest + +# +# documentation +# +.PHONY : readme +readme : $(CHECKOUT_EXE) + echo '-- AUTOMATICALLY GENERATED FILE. DO NOT EDIT --\n' > $(README) + echo -n '[![Build Status](https://travis-ci.org/NCAR/manage_externals.svg?branch=master)](https://travis-ci.org/NCAR/manage_externals)' >> $(README) + echo '[![Coverage Status](https://coveralls.io/repos/github/NCAR/manage_externals/badge.svg?branch=master)](https://coveralls.io/github/NCAR/manage_externals?branch=master)' >> $(README) + echo '```\n' >> $(README) + $(CHECKOUT_EXE) --help >> $(README) + +# +# coding standards +# +.PHONY : style +style : FORCE + $(AUTOPEP8) $(AUTOPEP8_ARGS) --recursive $(SRC) $(TEST_DIR)/test_*.py + +.PHONY : lint +lint : style + $(PYLINT) $(PYLINT_ARGS) $(SRC) $(TEST_DIR)/test_*.py + +.PHONY : coverage +coverage : FORCE + $(PYPATH) $(COVERAGE) erase + $(PYPATH) $(COVERAGE) run $(COVERAGE_ARGS) $(TEST_ARGS) --pattern 'test_unit_*.py' + $(PYPATH) $(COVERAGE) run $(COVERAGE_ARGS) $(TEST_ARGS) --pattern 'test_sys_*.py' + $(PYPATH) $(COVERAGE) html + +# +# virtual environment creation +# +.PHONY : env +env : FORCE + $(PYPATH) virtualenv --python $(PYTHON) $@_$(PYTHON) + . $@_$(PYTHON)/bin/activate; pip install -r requirements.txt + +# +# utilites +# +.PHONY : clean +clean : FORCE + -rm -rf *~ *.pyc tmp fake htmlcov + +.PHONY : clobber +clobber : clean + -rm -rf env_* + +FORCE : + diff --git a/test/README.md b/test/README.md new file mode 100644 index 0000000000..938a900eec --- /dev/null +++ b/test/README.md @@ -0,0 +1,77 @@ +# Testing for checkout_externals + +NOTE: Python2 is the supported runtime environment. Python3 compatibility is +in progress, complicated by the different proposed input methods +(yaml, xml, cfg/ini, json) and their different handling of strings +(unicode vs byte) in python2. Full python3 compatibility will be +possible once the number of possible input formats has been narrowed. + +## Setup development environment + +Development environments should be setup for python2 and python3: + +```SH + cd checkout_externals/test + make python=python2 env + make python=python3 env +``` + +## Unit tests + +Tests should be run for both python2 and python3. It is recommended +that you have seperate terminal windows open python2 and python3 +testing to avoid errors activating and deactivating environments. + +```SH + cd checkout_externals/test + . env_python2/bin/activate + make utest + deactivate +``` + +```SH + cd checkout_externals/test + . env_python2/bin/activate + make utest + deactivate +``` + +## System tests + +Not yet implemented. + +## Static analysis + +checkout_externals is difficult to test thoroughly because it relies +on git and svn, and svn requires a live network connection and +repository. Static analysis will help catch bugs in code paths that +are not being executed, but it requires conforming to community +standards and best practices. autopep8 and pylint should be run +regularly for automatic code formatting and linting. + +```SH + cd checkout_externals/test + . env_python2/bin/activate + make lint + deactivate +``` + +The canonical formatting for the code is whatever autopep8 +generates. All issues identified by pylint should be addressed. + + +## Code coverage + +All changes to the code should include maintaining existing tests and +writing new tests for new or changed functionality. To ensure test +coverage, run the code coverage tool: + +```SH + cd checkout_externals/test + . env_python2/bin/activate + make coverage + open -a Firefox.app htmlcov/index.html + deactivate +``` + + diff --git a/test/doc/.gitignore b/test/doc/.gitignore new file mode 100644 index 0000000000..d4e11e5ea0 --- /dev/null +++ b/test/doc/.gitignore @@ -0,0 +1,2 @@ +_build + diff --git a/test/doc/Makefile b/test/doc/Makefile new file mode 100644 index 0000000000..18f4d5bf99 --- /dev/null +++ b/test/doc/Makefile @@ -0,0 +1,20 @@ +# Minimal makefile for Sphinx documentation +# + +# You can set these variables from the command line. +SPHINXOPTS = +SPHINXBUILD = sphinx-build +SPHINXPROJ = ManageExternals +SOURCEDIR = . +BUILDDIR = _build + +# Put it first so that "make" without argument is like "make help". +help: + @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) + +.PHONY: help Makefile + +# Catch-all target: route all unknown targets to Sphinx using the new +# "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS). +%: Makefile + @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) \ No newline at end of file diff --git a/test/doc/conf.py b/test/doc/conf.py new file mode 100644 index 0000000000..469c0b0dc5 --- /dev/null +++ b/test/doc/conf.py @@ -0,0 +1,172 @@ +# -*- coding: utf-8 -*- +# +# Manage Externals documentation build configuration file, created by +# sphinx-quickstart on Wed Nov 29 10:53:25 2017. +# +# This file is execfile()d with the current directory set to its +# containing dir. +# +# Note that not all possible configuration values are present in this +# autogenerated file. +# +# All configuration values have a default; values that are commented out +# serve to show the default. + +# If extensions (or modules to document with autodoc) are in another directory, +# add these directories to sys.path here. If the directory is relative to the +# documentation root, use os.path.abspath to make it absolute, like shown here. +# +# import os +# import sys +# sys.path.insert(0, os.path.abspath('.')) + + +# -- General configuration ------------------------------------------------ + +# If your documentation needs a minimal Sphinx version, state it here. +# +# needs_sphinx = '1.0' + +# Add any Sphinx extension module names here, as strings. They can be +# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom +# ones. +extensions = ['sphinx.ext.autodoc', + 'sphinx.ext.todo', + 'sphinx.ext.coverage', + 'sphinx.ext.viewcode', + 'sphinx.ext.githubpages'] + +# Add any paths that contain templates here, relative to this directory. +templates_path = ['_templates'] + +# The suffix(es) of source filenames. +# You can specify multiple suffix as a list of string: +# +# source_suffix = ['.rst', '.md'] +source_suffix = '.rst' + +# The master toctree document. +master_doc = 'index' + +# General information about the project. +project = u'Manage Externals' +copyright = u'2017, CSEG at NCAR' +author = u'CSEG at NCAR' + +# The version info for the project you're documenting, acts as replacement for +# |version| and |release|, also used in various other places throughout the +# built documents. +# +# The short X.Y version. +version = u'1.0.0' +# The full version, including alpha/beta/rc tags. +release = u'1.0.0' + +# The language for content autogenerated by Sphinx. Refer to documentation +# for a list of supported languages. +# +# This is also used if you do content translation via gettext catalogs. +# Usually you set "language" from the command line for these cases. +language = None + +# List of patterns, relative to source directory, that match files and +# directories to ignore when looking for source files. +# This patterns also effect to html_static_path and html_extra_path +exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store'] + +# The name of the Pygments (syntax highlighting) style to use. +pygments_style = 'sphinx' + +# If true, `todo` and `todoList` produce output, else they produce nothing. +todo_include_todos = True + + +# -- Options for HTML output ---------------------------------------------- + +# The theme to use for HTML and HTML Help pages. See the documentation for +# a list of builtin themes. +# +html_theme = 'alabaster' + +# Theme options are theme-specific and customize the look and feel of a theme +# further. For a list of options available for each theme, see the +# documentation. +# +# html_theme_options = {} + +# Add any paths that contain custom static files (such as style sheets) here, +# relative to this directory. They are copied after the builtin static files, +# so a file named "default.css" will overwrite the builtin "default.css". +html_static_path = ['_static'] + +# Custom sidebar templates, must be a dictionary that maps document names +# to template names. +# +# This is required for the alabaster theme +# refs: http://alabaster.readthedocs.io/en/latest/installation.html#sidebars +html_sidebars = { + '**': [ + 'relations.html', # needs 'show_related': True theme option to display + 'searchbox.html', + ] +} + + +# -- Options for HTMLHelp output ------------------------------------------ + +# Output file base name for HTML help builder. +htmlhelp_basename = 'ManageExternalsdoc' + + +# -- Options for LaTeX output --------------------------------------------- + +latex_elements = { + # The paper size ('letterpaper' or 'a4paper'). + # + # 'papersize': 'letterpaper', + + # The font size ('10pt', '11pt' or '12pt'). + # + # 'pointsize': '10pt', + + # Additional stuff for the LaTeX preamble. + # + # 'preamble': '', + + # Latex figure (float) alignment + # + # 'figure_align': 'htbp', +} + +# Grouping the document tree into LaTeX files. List of tuples +# (source start file, target name, title, +# author, documentclass [howto, manual, or own class]). +latex_documents = [ + (master_doc, 'ManageExternals.tex', u'Manage Externals Documentation', + u'CSEG at NCAR', 'manual'), +] + + +# -- Options for manual page output --------------------------------------- + +# One entry per manual page. List of tuples +# (source start file, name, description, authors, manual section). +man_pages = [ + (master_doc, 'manageexternals', u'Manage Externals Documentation', + [author], 1) +] + + +# -- Options for Texinfo output ------------------------------------------- + +# Grouping the document tree into Texinfo files. List of tuples +# (source start file, target name, title, author, +# dir menu entry, description, category) +texinfo_documents = [ + (master_doc, 'ManageExternals', u'Manage Externals Documentation', + author, 'ManageExternals', 'One line description of project.', + 'Miscellaneous'), +] + + + diff --git a/test/doc/develop.rst b/test/doc/develop.rst new file mode 100644 index 0000000000..b817b7b093 --- /dev/null +++ b/test/doc/develop.rst @@ -0,0 +1,202 @@ +Developer Guidelines +==================== + +The manage externals utilities are a light weight replacement for svn +externals that will work with git repositories pulling in a mixture of +git and svn dependencies. + +Given an externals description and a working copy: + +* *checkout_externals* attempts to make the working copy agree with the + externals description + +* *generate_externals* attempts to make the externals description agree + with the working copy. + +For these operations utilities should: + +* operate consistently across git and svn + +* operate simply with minimal user complexity + +* robustly across a wide range of repository states + +* provide explicit error messages when a problem occurs + +* leave the working copy in a valid state + +The utilities in manage externals are **NOT** generic wrappers around +revision control operations or a replacement for common tasks. Users +are expected to: + +* create branches prior to starting development + +* add remotes and push changes + +* create tags + +* delete branches + +These types of tasks are often highly workflow dependent, e.g. branch +naming conventions may vary between repositories, have the potential +to destroy user data, introduce significant code complexit and 'edge +cases' that are extremely difficult to detect and test, and often +require subtle decision making, especially if a problem occurs. + +Users who want to automate these types are encouraged to create their +own tools. The externals description files are explicitly versioned +and the internal APIs are intended to be stable for these purposes. + +Core Design Principles +----------------------- + +1. Users can, and are actively encouraged to, modify the externals + directories using revision control outside of manage_externals + tools. You can't make any assumptions about the state of the + working copy. Examples: adding a remote, creating a branch, + switching to a branch, deleting the directory entirely. + +2. Give that the user can do anything, the manage externals library + can not preserve state between calls. The only information it can + rely on is what it expectes based on the content of the externals + description file, and what the actual state of the directory tree + is. + +3. Do *not* do anything that will possibly destroy user data! + + a. Do not remove files from the file system. We are operating on + user supplied input. If you don't call 'rm', you can't + accidentally remove the user's data. Thinking of calling + ``shutil.rmtree(user_input)``? What if the user accidentally + specified user_input such that it resolves to their home + directory.... Yeah. Don't go there. + + b. Rely on git and svn to do their job as much as possible. Don't + duplicate functionality. Examples: + + i. We require the working copies to be 'clean' as reported by + ``git status`` and ``svn status``. What if there are misc + editor files floating around that prevent an update? Use the + git and svn ignore functionality so they are not + reported. Don't try to remove them from manage_externals or + determine if they are 'safe' to ignore. + + ii. Do not use '--force'. Ever. This is a sign you are doing + something dangerous, it may not be what the user + wants. Remember, they are encouraged to modify their repo. + +4. There are often multiple ways to obtain a particular piece of + information from git. Scraping screen output is brittle and + generally not considered a stable API across different versions of + git. Given a choice between: + + a. a lower level git 'plumbing' command that processes a + specific request and returns a sucess/failure status. + + b. high level git command that produces a bunch of output + that must be processed. + + We always prefer the former. It almost always involves + writing and maintaining less code and is more likely to be + stable. + +5. Backward compatibility is critical. We have *nested* + repositories. They are trivially easy to change versions. They may + have very different versions of the top level manage_externals. The + ability to read and work with old model description files is + critical to avoid problems for users. We also have automated tools + (testdb) that must generate and read external description + files. Backward compatibility will make staging changes vastly + simpler. + +Model Users +----------- + +Consider the needs of the following model userswhen developing manage_externals: + +* Users who will checkout the code once, and never change versions. + +* Users who will checkout the code once, then work for several years, + never updating. before trying to update or request integration. + +* Users develope code but do not use revision control beyond the + initial checkout. If they have modified or untracked files in the + repo, they may be irreplacable. Don't destroy user data. + +* Intermediate users who are working with multiple repos or branches + on a regular basis. They may only use manage_externals weekly or + monthly. Keep the user interface and documentation simple and + explicit. The more command line options they have to remember or + look up, the more frustrated they git. + +* Software engineers who use the tools multiple times a day. It should + get out of their way. + +User Interface +-------------- + +Basic operation for the most standard use cases should be kept as +simple as possible. Many users will only rarely run the manage +utilities. Even advanced users don't like reading a lot of help +documentation or struggling to remember commands and piece together +what they need to run. Having many command line options, even if not +needed, is exteremly frustrating and overwhelming for most users. A few +simple, explicitly named commands are better than a single command +with many options. + +How will users get help if something goes wrong? This is a custom, +one-off solution. Searching the internet for manage_externals, will +only return the user doc for this project at best. There isn't likely +to be a stackoverflow question or blog post where someone else already +answered a user's question. And very few people outside this community +will be able to provide help if something goes wrong. The sooner we +kick users out of these utilities and into standard version control +tools, the better off they are going to be if they run into a problem. + +Repositories +------------ + +There are three basic types of repositories that must be considered: + +* container repositories - repositories that are always top level + repositories, and have a group of externals that must be managed. + +* simple repositories - repositories that are externals to another + repository, and do not have any of their own externals that will be + managed. + +* mixed use repositories - repositories that can act as a top level + container repository or as an external to a top level + container. They may also have their own sub-externals that are + required. They may have different externals needs depening on + whether they are top level or not. + +Repositories must be able to checkout and switch to both branches and +tags. + +Development +=========== + +The functionality to manage externals is broken into a library of core +functionality and applications built with the library. + +The core library is called 'manic', pseduo-homophone of (man)age +(ex)ternals that is: short, pronounceable and spell-checkable. It is +also no more or less meaningful to an unfamiliar user than a random +jumble of letters forming an acronym. + +The core architecture of manic is: + +* externals description - an abstract description on an external, + including of how to obtain it, where to obtain it, where it goes in + the working tree. + +* externals - the software object representing an external. + +* source trees - collection of externals + +* repository wrappers - object oriented wrappers around repository + operations. So the higher level management of the soure tree and + external does not have to be concerned with how a particular + external is obtained and managed. + diff --git a/test/doc/index.rst b/test/doc/index.rst new file mode 100644 index 0000000000..9ab287ad8c --- /dev/null +++ b/test/doc/index.rst @@ -0,0 +1,22 @@ +.. Manage Externals documentation master file, created by + sphinx-quickstart on Wed Nov 29 10:53:25 2017. + You can adapt this file completely to your liking, but it should at least + contain the root `toctree` directive. + +Welcome to Manage Externals's documentation! +============================================ + +.. toctree:: + :maxdepth: 2 + :caption: Contents: + + + develop.rst + testing.rst + +Indices and tables +================== + +* :ref:`genindex` +* :ref:`modindex` +* :ref:`search` diff --git a/test/doc/testing.rst b/test/doc/testing.rst new file mode 100644 index 0000000000..623f0e431c --- /dev/null +++ b/test/doc/testing.rst @@ -0,0 +1,123 @@ +Testing +======= + +The manage_externals package has an automated test suite. All pull +requests are expected to pass 100% of the automated tests, as well as +be pep8 and lint 'clean' and maintain approximately constant (at a +minimum) level of code coverage. + +Quick Start +----------- + +Do nothing approach +~~~~~~~~~~~~~~~~~~~ + +When you create a pull request on GitHub, Travis-CI continuous +integration testing will run the test suite in both python2 and +python3. Test results, lint results, and code coverage results are +available online. + +Do something approach +~~~~~~~~~~~~~~~~~~~~~ + +In the test directory, run: + +.. code-block:: shell + + make env + make lint + make test + make coverage + + +Automated Testing +----------------- + +The manage_externals manic library and executables are developed to be +python2 and python3 compatible using only the standard library. The +test suites meet the same requirements. But additional tools are +required to provide lint and code coverage metrics and generate +documentation. The requirements are maintained in the requirements.txt +file, and can be automatically installed into an isolated environment +via Makefile. + +Bootstrap requirements: + +* python2 - version 2.7.x or later + +* python3 - version 3.6 tested other versions may work + +* pip and virtualenv for python2 and python3 + +Note: all make rules can be of the form ``make python=pythonX rule`` +or ``make rule`` depending if you want to use the default system +python or specify a specific version. + +The Makefile in the test directory has the following rules: + +* ``make python=pythonX env`` - create a python virtual environment + for python2 or python3 and install all required packages. These + packages are required to run lint or coverage. + +* ``make style`` - runs autopep8 + +* ``make lint`` - runs autopep8 and pylint + +* ``make test`` - run the full test suite + +* ``make utest`` - run jus the unit tests + +* ``make stest`` - run jus the system integration tests + +* ``make coverage`` - run the full test suite through the code + coverage tool and generate an html report. + +* ``make readme`` - automatically generate the README files. + +* ``make clean`` - remove editor and pyc files + +* ``make clobber`` - remove all generated test files, including + virtual environments, coverage reports, and temporary test + repository directories. + +Unit Tests +---------- + +Unit tests are probably not 'true unit tests' for the pedantic, but +are pragmatic unit tests. They cover small practicle code blocks: +functions, class methods, and groups of functions and class methods. + +System Integration Tests +------------------------ + +NOTE(bja, 2017-11) The systems integration tests currently do not include svn repositories. + +The manage_externals package is extremely tedious and error prone to test manually. + +Combinations that must be tested to ensure basic functionality are: + +* container repository pulling in simple externals + +* container repository pulling in mixed externals with sub-externals. + +* mixed repository acting as a container, pulling in simple externals and sub-externals + +Automatic system tests are handled the same way manual testing is done: + +* clone a test repository + +* create an externals description file for the test + +* run the executable with the desired args + +* check the results + +* potentially modify the repo (checkout a different branch) + +* rerun and test + +* etc + +The automated system stores small test repositories in the main repo +by adding them as bare repositories. These repos are cloned via a +subprocess call to git and manipulated during the tests. diff --git a/test/repos/container.git/HEAD b/test/repos/container.git/HEAD new file mode 100644 index 0000000000..cb089cd89a --- /dev/null +++ b/test/repos/container.git/HEAD @@ -0,0 +1 @@ +ref: refs/heads/master diff --git a/test/repos/container.git/config b/test/repos/container.git/config new file mode 100644 index 0000000000..e6da231579 --- /dev/null +++ b/test/repos/container.git/config @@ -0,0 +1,6 @@ +[core] + repositoryformatversion = 0 + filemode = true + bare = true + ignorecase = true + precomposeunicode = true diff --git a/test/repos/container.git/description b/test/repos/container.git/description new file mode 100644 index 0000000000..498b267a8c --- /dev/null +++ b/test/repos/container.git/description @@ -0,0 +1 @@ +Unnamed repository; edit this file 'description' to name the repository. diff --git a/test/repos/container.git/info/exclude b/test/repos/container.git/info/exclude new file mode 100644 index 0000000000..a5196d1be8 --- /dev/null +++ b/test/repos/container.git/info/exclude @@ -0,0 +1,6 @@ +# git ls-files --others --exclude-from=.git/info/exclude +# Lines that start with '#' are comments. +# For a project mostly in C, the following would be a good set of +# exclude patterns (uncomment them if you want to use them): +# *.[oa] +# *~ diff --git a/test/repos/container.git/objects/41/1de5d96ee418c1c55f3e96e6e6e7c06bb95801 b/test/repos/container.git/objects/41/1de5d96ee418c1c55f3e96e6e6e7c06bb95801 new file mode 100644 index 0000000000000000000000000000000000000000..f65234e17f32800b1be0aa9908cc706458b14605 GIT binary patch literal 133 zcmV;00DAv;0acB$4#OY}L_6~pma=t7)Fr=DfpLNr2P1F?mVSF_r3_t8x_fuJAR6GY zuD1yyS3=Xu)WDKA@Ra});Xx7fWf1zv2~1TS@=422pQw4`eHcB9X3EwU=O)-GQ}s5s nqUZ%S7HaN3i|$`ck;m7Sz6S{Y_}`UoN%K{iOGozsJ+C?sZtFeC literal 0 HcmV?d00001 diff --git a/test/repos/container.git/objects/71/5b8f3e4afe1802a178e1d603af404ba45d59de b/test/repos/container.git/objects/71/5b8f3e4afe1802a178e1d603af404ba45d59de new file mode 100644 index 0000000000000000000000000000000000000000..9759965b1ba440f1899216c1c82c0780fb65f46e GIT binary patch literal 136 zcmV;30C)d*0hNtQ3c@fDKwak)a{8g?ULFizQ5yOj!O$#BY{3QX>9e{j4e8<)

AV=y!@Ff%bx&`ZxO$xP47FG^)_lzn~QNUpn5)Pnq=ii~6DWK2pp8O#dS+Wke_L literal 0 HcmV?d00001 diff --git a/test/repos/container.git/objects/f9/e08370a737e941de6f6492e3f427c2ef4c1a03 b/test/repos/container.git/objects/f9/e08370a737e941de6f6492e3f427c2ef4c1a03 new file mode 100644 index 0000000000000000000000000000000000000000..460fd7781917e095c826e8bc77ad53d943f199aa GIT binary patch literal 81 zcmV-X0IvUd0R_Ry4S+BV1VG+Yu@&$_q5vvMU;#^(9XS?9_smrFie(;Fw=7}|1e%|)SJ(su+=pM4B|mwZ+(Kd$t05rB_(M< zmNu<2!_Lge2B#67kHO(Q1a!AV=y!@Ff%bx&`ZxO$xP47FG^)_lzn`tC9-|*xG$A9N literal 0 HcmV?d00001 diff --git a/test/repos/mixed-cont-ext.git/objects/41/1de5d96ee418c1c55f3e96e6e6e7c06bb95801 b/test/repos/mixed-cont-ext.git/objects/41/1de5d96ee418c1c55f3e96e6e6e7c06bb95801 new file mode 100644 index 0000000000000000000000000000000000000000..f65234e17f32800b1be0aa9908cc706458b14605 GIT binary patch literal 133 zcmV;00DAv;0acB$4#OY}L_6~pma=t7)Fr=DfpLNr2P1F?mVSF_r3_t8x_fuJAR6GY zuD1yyS3=Xu)WDKA@Ra});Xx7fWf1zv2~1TS@=422pQw4`eHcB9X3EwU=O)-GQ}s5s nqUZ%S7HaN3i|$`ck;m7Sz6S{Y_}`UoN%K{iOGozsJ+C?sZtFeC literal 0 HcmV?d00001 diff --git a/test/repos/mixed-cont-ext.git/objects/fd/15a5ad5204356229c60a831d2a8120a43ac901 b/test/repos/mixed-cont-ext.git/objects/fd/15a5ad5204356229c60a831d2a8120a43ac901 new file mode 100644 index 0000000000..619e38ee78 --- /dev/null +++ b/test/repos/mixed-cont-ext.git/objects/fd/15a5ad5204356229c60a831d2a8120a43ac901 @@ -0,0 +1,2 @@ +x=Ê;Ã0 ÑÔ:Åvî §Ì=rJf`)ˆ noW)¦z“›g¼—Ïë«A í•>.p¢ˆA +!ìÜ  w4ݵ¡¸Qªé€Øú=©Ã¤á¨ÏZ9ü0„þûkÌ éžG)* \ No newline at end of file diff --git a/test/repos/mixed-cont-ext.git/refs/heads/master b/test/repos/mixed-cont-ext.git/refs/heads/master new file mode 100644 index 0000000000..508331f329 --- /dev/null +++ b/test/repos/mixed-cont-ext.git/refs/heads/master @@ -0,0 +1 @@ +06ea30b03ffa2f8574705f8b9583f7ca7e2dccf7 diff --git a/test/repos/simple-ext-fork.git/HEAD b/test/repos/simple-ext-fork.git/HEAD new file mode 100644 index 0000000000..cb089cd89a --- /dev/null +++ b/test/repos/simple-ext-fork.git/HEAD @@ -0,0 +1 @@ +ref: refs/heads/master diff --git a/test/repos/simple-ext-fork.git/config b/test/repos/simple-ext-fork.git/config new file mode 100644 index 0000000000..04eba17870 --- /dev/null +++ b/test/repos/simple-ext-fork.git/config @@ -0,0 +1,8 @@ +[core] + repositoryformatversion = 0 + filemode = true + bare = true + ignorecase = true + precomposeunicode = true +[remote "origin"] + url = /Users/andreb/projects/ncar/git-conversion/checkout-model-dev/cesm-demo-externals/manage_externals/test/repos/simple-ext.git diff --git a/test/repos/simple-ext-fork.git/description b/test/repos/simple-ext-fork.git/description new file mode 100644 index 0000000000..498b267a8c --- /dev/null +++ b/test/repos/simple-ext-fork.git/description @@ -0,0 +1 @@ +Unnamed repository; edit this file 'description' to name the repository. diff --git a/test/repos/simple-ext-fork.git/info/exclude b/test/repos/simple-ext-fork.git/info/exclude new file mode 100644 index 0000000000..a5196d1be8 --- /dev/null +++ b/test/repos/simple-ext-fork.git/info/exclude @@ -0,0 +1,6 @@ +# git ls-files --others --exclude-from=.git/info/exclude +# Lines that start with '#' are comments. +# For a project mostly in C, the following would be a good set of +# exclude patterns (uncomment them if you want to use them): +# *.[oa] +# *~ diff --git a/test/repos/simple-ext-fork.git/objects/00/fd13e76189f9134b0506b4b8ed3172723b467f b/test/repos/simple-ext-fork.git/objects/00/fd13e76189f9134b0506b4b8ed3172723b467f new file mode 100644 index 0000000000000000000000000000000000000000..ae28c037e5e8773bab7a7f9b6b050a01c3c8402a GIT binary patch literal 89 zcmV-f0H*(V0V^p=O;s>AV=y!@Ff%bx&`ZxO$xP47FG^)_lznAV=y!@Ff%bx&`ZxO$xP47FG^)_lzn&Ekz!U-;cU~)E`&5u^pl|A>?=DrCt|Zp*KGhtORPb%uc6q&p;{~x`YAHy z#2GbEv6YQH#`fOIuH1gSE*yL=Ojyh~{nIdqe*nnpf*T V&^Fln@|2-4tBgli^9u#mM`!{nPaFUM literal 0 HcmV?d00001 diff --git a/test/repos/simple-ext-fork.git/objects/11/a76e3d9a67313dec7ce1230852ab5c86352c5c b/test/repos/simple-ext-fork.git/objects/11/a76e3d9a67313dec7ce1230852ab5c86352c5c new file mode 100644 index 0000000000..564e7bba63 --- /dev/null +++ b/test/repos/simple-ext-fork.git/objects/11/a76e3d9a67313dec7ce1230852ab5c86352c5c @@ -0,0 +1,2 @@ +x%ŒK +Â0@]çse&ßDÔ›L’!´˜¶„l¼½).¼Åãu.@Æ_ö¸Jê0ÇàìlM–Ä~v:ÄèmLÌÆi™åY*/ŸÛè@ŽpòÞ W ˆJ¥&Üå¿ø)´*Í \ No newline at end of file diff --git a/test/repos/simple-ext-fork.git/objects/16/5506a7408a482f50493434e13fffeb44af893f b/test/repos/simple-ext-fork.git/objects/16/5506a7408a482f50493434e13fffeb44af893f new file mode 100644 index 0000000000000000000000000000000000000000..0d738af68b021dcd9918c8f2047aa4fff55bf6e4 GIT binary patch literal 89 zcmV-f0H*(V0V^p=O;s>AV=y!@Ff%bx&`ZxO$xP47FG^)_lznAV=y!@Ff%bx&`ZxO$xP47FG^)_lzniemt(y-3DP$mtIvOOf literal 0 HcmV?d00001 diff --git a/test/repos/simple-ext-fork.git/objects/36/418b4e5665956a90725c9a1b5a8e551c5f3d48 b/test/repos/simple-ext-fork.git/objects/36/418b4e5665956a90725c9a1b5a8e551c5f3d48 new file mode 100644 index 0000000000000000000000000000000000000000..9da8434f65ef3bfdb57cb8117e312a56663a31a2 GIT binary patch literal 159 zcmV;Q0AT-k0hNwh3c@fD0R7G>_5#Z8=Ft>H)JyoiX*NFFNQn2h9>Kq1U|^;?&-V_@ zcGH_GU?Q(kip?&NPmV1)rl3VdZ7GGKLl-2Pw=`WkjA`(0bci¹`ý}0…M”؇BÚÁs0/µâ¿}öï:: \ No newline at end of file diff --git a/test/repos/simple-ext-fork.git/objects/3d/ec1fdf8e2f5edba28148c5db2fe8d7a842360b b/test/repos/simple-ext-fork.git/objects/3d/ec1fdf8e2f5edba28148c5db2fe8d7a842360b new file mode 100644 index 0000000000..9a31c7ef2e --- /dev/null +++ b/test/repos/simple-ext-fork.git/objects/3d/ec1fdf8e2f5edba28148c5db2fe8d7a842360b @@ -0,0 +1,2 @@ +x•ŽKnÃ0 ³Ö)x”,ÊI½EÑŸ´–A¹Ü#t7o€ŒìÛ¶vp.žzS…ÁšÆƒ&oÑ„©d¦8¹xLd@™Ì‹›ÖCð6f¯% +œpt$‰m&ŽJd…¦¡øhøÝ—½Á—VxÔÒ®ùÉpŸ7^/²o7°d­K1ÂGDsØ#¯ë¿æ{o?Z 7®²€,\g½˜AV=y!@Ff%bx&`ZxO$xP47FG^)_lznvGy0&Z${j?E8>6rD10GHRYE2d literal 0 HcmV?d00001 diff --git a/test/repos/simple-ext-fork.git/objects/67/136e5ab4d5c1c65d10c8048763b96b0e53c1d6 b/test/repos/simple-ext-fork.git/objects/67/136e5ab4d5c1c65d10c8048763b96b0e53c1d6 new file mode 100644 index 0000000000000000000000000000000000000000..d3dfe31113715fe07ea6833f0c2a25e868ac20b2 GIT binary patch literal 165 zcmV;W09yZe0hNwR4#F@DL|Nw)z5pm6r*$QSfIWwB8k=t$6s6+&lq0Ykjo#?ZSf=UT zz+~D012)4Gj)~xM%ugTv-b1AFi TQ|c4S3@Y4~D&BknM3zUWvn5b3 literal 0 HcmV?d00001 diff --git a/test/repos/simple-ext-fork.git/objects/7b/0bd630ac13865735a1dff3437a137d8ab50663 b/test/repos/simple-ext-fork.git/objects/7b/0bd630ac13865735a1dff3437a137d8ab50663 new file mode 100644 index 0000000000000000000000000000000000000000..0a2ec0494bc1600144cb54b61a6d7b43c7f3e806 GIT binary patch literal 119 zcmV--0Eqv10X50d4FVw$MNz-0;#IJTYiz*^YyjkKAhHY@MpwI+#E{&tb3>7U^YwDN zr`$2}=y`92Fm{8oNzW$w#gQ$c3ivT<^#zfQHTwFÁ©¹£rPkÖSèkJ´^ë \ No newline at end of file diff --git a/test/repos/simple-ext-fork.git/objects/b9/3737be3ea6b19f6255983748a0a0f4d622f936 b/test/repos/simple-ext-fork.git/objects/b9/3737be3ea6b19f6255983748a0a0f4d622f936 new file mode 100644 index 0000000000000000000000000000000000000000..9b40a0afa00b93a318cd503d3b29db1162978b03 GIT binary patch literal 89 zcmV-f0H*(V0V^p=O;s>AV=y!@Ff%bx&`ZxO$xP47FG^)_lznD—}ÂD>£Nƒv“{ŠZ¼M˜I…¥?jƒ‹Ìpžs8ÄgøÓ½„qÚ¥ZŽ€qo j†­f­ÕJ×{]þÕµÓ¥®¥Om/¨3Ü$ô¥‰Q_@ÞH© \ No newline at end of file diff --git a/test/repos/simple-ext-fork.git/packed-refs b/test/repos/simple-ext-fork.git/packed-refs new file mode 100644 index 0000000000..b8f9e86308 --- /dev/null +++ b/test/repos/simple-ext-fork.git/packed-refs @@ -0,0 +1,5 @@ +# pack-refs with: peeled fully-peeled sorted +36418b4e5665956a90725c9a1b5a8e551c5f3d48 refs/heads/feature2 +9b75494003deca69527bb64bcaa352e801611dd2 refs/heads/master +11a76e3d9a67313dec7ce1230852ab5c86352c5c refs/tags/tag1 +^9b75494003deca69527bb64bcaa352e801611dd2 diff --git a/test/repos/simple-ext-fork.git/refs/heads/feature2 b/test/repos/simple-ext-fork.git/refs/heads/feature2 new file mode 100644 index 0000000000..d223b0362d --- /dev/null +++ b/test/repos/simple-ext-fork.git/refs/heads/feature2 @@ -0,0 +1 @@ +f268d4e56d067da9bd1d85e55bdc40a8bd2b0bca diff --git a/test/repos/simple-ext-fork.git/refs/tags/forked-feature-v1 b/test/repos/simple-ext-fork.git/refs/tags/forked-feature-v1 new file mode 100644 index 0000000000..2764b552d5 --- /dev/null +++ b/test/repos/simple-ext-fork.git/refs/tags/forked-feature-v1 @@ -0,0 +1 @@ +8d2b3b35126224c975d23f109aa1e3cbac452989 diff --git a/test/repos/simple-ext.git/HEAD b/test/repos/simple-ext.git/HEAD new file mode 100644 index 0000000000..cb089cd89a --- /dev/null +++ b/test/repos/simple-ext.git/HEAD @@ -0,0 +1 @@ +ref: refs/heads/master diff --git a/test/repos/simple-ext.git/config b/test/repos/simple-ext.git/config new file mode 100644 index 0000000000..e6da231579 --- /dev/null +++ b/test/repos/simple-ext.git/config @@ -0,0 +1,6 @@ +[core] + repositoryformatversion = 0 + filemode = true + bare = true + ignorecase = true + precomposeunicode = true diff --git a/test/repos/simple-ext.git/description b/test/repos/simple-ext.git/description new file mode 100644 index 0000000000..498b267a8c --- /dev/null +++ b/test/repos/simple-ext.git/description @@ -0,0 +1 @@ +Unnamed repository; edit this file 'description' to name the repository. diff --git a/test/repos/simple-ext.git/info/exclude b/test/repos/simple-ext.git/info/exclude new file mode 100644 index 0000000000..a5196d1be8 --- /dev/null +++ b/test/repos/simple-ext.git/info/exclude @@ -0,0 +1,6 @@ +# git ls-files --others --exclude-from=.git/info/exclude +# Lines that start with '#' are comments. +# For a project mostly in C, the following would be a good set of +# exclude patterns (uncomment them if you want to use them): +# *.[oa] +# *~ diff --git a/test/repos/simple-ext.git/objects/00/fd13e76189f9134b0506b4b8ed3172723b467f b/test/repos/simple-ext.git/objects/00/fd13e76189f9134b0506b4b8ed3172723b467f new file mode 100644 index 0000000000000000000000000000000000000000..ae28c037e5e8773bab7a7f9b6b050a01c3c8402a GIT binary patch literal 89 zcmV-f0H*(V0V^p=O;s>AV=y!@Ff%bx&`ZxO$xP47FG^)_lznAV=y!@Ff%bx&`ZxO$xP47FG^)_lznåY*/ŸÛè@ŽpòÞ W ˆJ¥&Üå¿ø)´*Í \ No newline at end of file diff --git a/test/repos/simple-ext.git/objects/36/418b4e5665956a90725c9a1b5a8e551c5f3d48 b/test/repos/simple-ext.git/objects/36/418b4e5665956a90725c9a1b5a8e551c5f3d48 new file mode 100644 index 0000000000000000000000000000000000000000..9da8434f65ef3bfdb57cb8117e312a56663a31a2 GIT binary patch literal 159 zcmV;Q0AT-k0hNwh3c@fD0R7G>_5#Z8=Ft>H)JyoiX*NFFNQn2h9>Kq1U|^;?&-V_@ zcGH_GU?Q(kip?&NPmV1)rl3VdZ7GGKLl-2Pw=`WkjA`(0bciÁ©¹£rPkÖSèkJ´^ë \ No newline at end of file diff --git a/test/repos/simple-ext.git/objects/c5/b315915742133dbdfbeed0753e481b55c1d364 b/test/repos/simple-ext.git/objects/c5/b315915742133dbdfbeed0753e481b55c1d364 new file mode 100644 index 0000000000..1d27accb58 --- /dev/null +++ b/test/repos/simple-ext.git/objects/c5/b315915742133dbdfbeed0753e481b55c1d364 @@ -0,0 +1 @@ +x ÈÁ € @ßT±øàeV` ›p ¹;£v¯É¼&מ±Äi+bø%˜œ£Ns(G7ñ®/nñ‚ÖÁÇ©-UlGj»ÐæV&¿”Yÿ+!|£òŠ \ No newline at end of file diff --git a/test/repos/simple-ext.git/refs/heads/feature2 b/test/repos/simple-ext.git/refs/heads/feature2 new file mode 100644 index 0000000000..01a0dd6e23 --- /dev/null +++ b/test/repos/simple-ext.git/refs/heads/feature2 @@ -0,0 +1 @@ +36418b4e5665956a90725c9a1b5a8e551c5f3d48 diff --git a/test/repos/simple-ext.git/refs/heads/master b/test/repos/simple-ext.git/refs/heads/master new file mode 100644 index 0000000000..5c67504966 --- /dev/null +++ b/test/repos/simple-ext.git/refs/heads/master @@ -0,0 +1 @@ +9b75494003deca69527bb64bcaa352e801611dd2 diff --git a/test/repos/simple-ext.git/refs/tags/tag1 b/test/repos/simple-ext.git/refs/tags/tag1 new file mode 100644 index 0000000000..ee595be8bd --- /dev/null +++ b/test/repos/simple-ext.git/refs/tags/tag1 @@ -0,0 +1 @@ +11a76e3d9a67313dec7ce1230852ab5c86352c5c diff --git a/test/requirements.txt b/test/requirements.txt new file mode 100644 index 0000000000..d66f6f1e67 --- /dev/null +++ b/test/requirements.txt @@ -0,0 +1,5 @@ +pylint>=1.7.0 +autopep8>=1.3.0 +coverage>=4.4.0 +coveralls>=1.2.0 +sphinx>=1.6.0 diff --git a/test/test_sys_checkout.py b/test/test_sys_checkout.py new file mode 100644 index 0000000000..d988d83c87 --- /dev/null +++ b/test/test_sys_checkout.py @@ -0,0 +1,1219 @@ +#!/usr/bin/env python + +"""Unit test driver for checkout_externals + +Note: this script assume the path to the manic and +checkout_externals module is already in the python path. This is +usually handled by the makefile. If you call it directly, you may need +to adjust your path. + +NOTE(bja, 2017-11) If a test fails, we want to keep the repo for that +test. But the tests will keep running, so we need a unique name. Also, +tearDown is always called after each test. I haven't figured out how +to determine if an assertion failed and whether it is safe to clean up +the test repos. + +So the solution is: + +* assign a unique id to each test repo. + +* never cleanup during the run. + +* Erase any existing repos at the begining of the module in +setUpModule. + +""" + +# NOTE(bja, 2017-11) pylint complains that the module is too big, but +# I'm still working on how to break up the tests and still have the +# temporary directory be preserved.... +# pylint: disable=too-many-lines + + +from __future__ import absolute_import +from __future__ import unicode_literals +from __future__ import print_function + +import os +import os.path +import shutil +import unittest + +from manic.externals_description import ExternalsDescription +from manic.externals_description import DESCRIPTION_SECTION, VERSION_ITEM +from manic.externals_status import ExternalStatus +from manic.repository_git import GitRepository +from manic.utils import printlog, execute_subprocess +from manic.global_constants import LOCAL_PATH_INDICATOR +from manic import checkout + +# ConfigParser was renamed in python2 to configparser. In python2, +# ConfigParser returns byte strings, str, instead of unicode. We need +# unicode to be compatible with xml and json parser and python3. +try: + # python2 + from ConfigParser import SafeConfigParser as config_parser +except ImportError: + # python3 + from configparser import ConfigParser as config_parser + +# --------------------------------------------------------------------- +# +# Global constants +# +# --------------------------------------------------------------------- + +# environment variable names +MANIC_TEST_BARE_REPO_ROOT = 'MANIC_TEST_BARE_REPO_ROOT' +MANIC_TEST_TMP_REPO_ROOT = 'MANIC_TEST_TMP_REPO_ROOT' + +# directory names +TMP_REPO_DIR_NAME = 'tmp' +BARE_REPO_ROOT_NAME = 'repos' +CONTAINER_REPO_NAME = 'container.git' +MIXED_REPO_NAME = 'mixed-cont-ext.git' +SIMPLE_REPO_NAME = 'simple-ext.git' +SIMPLE_FORK_NAME = 'simple-ext-fork.git' +SIMPLE_LOCAL_ONLY_NAME = '.' +ERROR_REPO_NAME = 'error' +EXTERNALS_NAME = 'externals' +SUB_EXTERNALS_PATH = 'src' +CFG_NAME = 'externals.cfg' +CFG_SUB_NAME = 'sub-externals.cfg' +README_NAME = 'readme.txt' + +SVN_TEST_REPO = 'https://github.com/escomp/cesm' + + +def setUpModule(): # pylint: disable=C0103 + """Setup for all tests in this module. It is called once per module! + """ + repo_root = os.path.join(os.getcwd(), TMP_REPO_DIR_NAME) + repo_root = os.path.abspath(repo_root) + # delete if it exists from previous runs + try: + shutil.rmtree(repo_root) + except BaseException: + pass + # create clean dir for this run + os.mkdir(repo_root) + # set into the environment so var will be expanded in externals + # filess when executables are run + os.environ[MANIC_TEST_TMP_REPO_ROOT] = repo_root + + +class GenerateExternalsDescriptionCfgV1(object): + """Class to provide building blocks to create + ExternalsDescriptionCfgV1 files. + + Includes predefined files used in tests. + + """ + + def __init__(self): + self._schema_version = '1.0.0' + self._config = None + + def container_full(self, dest_dir): + """Create the full container config file with simple and mixed use + externals + + """ + self.create_config() + self.create_section(SIMPLE_REPO_NAME, 'simp_tag', + tag='tag1') + + self.create_section(SIMPLE_REPO_NAME, 'simp_branch', + branch='feature2') + + self.create_section(SIMPLE_REPO_NAME, 'simp_opt', + tag='tag1', required=False) + + self.create_section(MIXED_REPO_NAME, 'mixed_req', + tag='tag1', externals=CFG_SUB_NAME) + + self.create_section(MIXED_REPO_NAME, 'mixed_opt', + tag='tag1', externals=CFG_SUB_NAME, + required=False) + + self._write_config(dest_dir) + + def container_simple_required(self, dest_dir): + """Create a container externals file with only simple externals. + + """ + self.create_config() + self.create_section(SIMPLE_REPO_NAME, 'simp_tag', + tag='tag1') + + self.create_section(SIMPLE_REPO_NAME, 'simp_branch', + branch='feature2') + + self._write_config(dest_dir) + + def container_simple_optional(self, dest_dir): + """Create a container externals file with optional simple externals + + """ + self.create_config() + self.create_section(SIMPLE_REPO_NAME, 'simp_req', + tag='tag1') + + self.create_section(SIMPLE_REPO_NAME, 'simp_opt', + tag='tag1', required=False) + + self._write_config(dest_dir) + + def container_simple_svn(self, dest_dir): + """Create a container externals file with only simple externals. + + """ + self.create_config() + self.create_section(SIMPLE_REPO_NAME, 'simp_tag', tag='tag1') + + self.create_svn_external('svn_branch', branch='trunk') + self.create_svn_external('svn_tag', tag='tags/cesm2.0.beta07') + + self._write_config(dest_dir) + + def mixed_simple_base(self, dest_dir): + """Create a mixed-use base externals file with only simple externals. + + """ + self.create_config() + self.create_section_ext_only('mixed_base') + self.create_section(SIMPLE_REPO_NAME, 'simp_tag', + tag='tag1') + + self.create_section(SIMPLE_REPO_NAME, 'simp_branch', + branch='feature2') + + self._write_config(dest_dir) + + def mixed_simple_sub(self, dest_dir): + """Create a mixed-use sub externals file with only simple externals. + + """ + self.create_config() + self.create_section(SIMPLE_REPO_NAME, 'simp_tag', + tag='tag1', path=SUB_EXTERNALS_PATH) + + self.create_section(SIMPLE_REPO_NAME, 'simp_branch', + branch='feature2', path=SUB_EXTERNALS_PATH) + + self._write_config(dest_dir, filename=CFG_SUB_NAME) + + def _write_config(self, dest_dir, filename=CFG_NAME): + """Write the configuration file to disk + + """ + dest_path = os.path.join(dest_dir, filename) + with open(dest_path, 'w') as configfile: + self._config.write(configfile) + + def create_config(self): + """Create an config object and add the required metadata section + + """ + self._config = config_parser() + self.create_metadata() + + def create_metadata(self): + """Create the metadata section of the config file + """ + self._config.add_section(DESCRIPTION_SECTION) + + self._config.set(DESCRIPTION_SECTION, VERSION_ITEM, + self._schema_version) + + def create_section(self, repo_type, name, tag='', branch='', + required=True, path=EXTERNALS_NAME, externals=''): + """Create a config section with autofilling some items and handling + optional items. + + """ + # pylint: disable=R0913 + self._config.add_section(name) + self._config.set(name, ExternalsDescription.PATH, + os.path.join(path, name)) + + self._config.set(name, ExternalsDescription.PROTOCOL, + ExternalsDescription.PROTOCOL_GIT) + + repo_url = os.path.join('${MANIC_TEST_BARE_REPO_ROOT}', repo_type) + self._config.set(name, ExternalsDescription.REPO_URL, repo_url) + + self._config.set(name, ExternalsDescription.REQUIRED, str(required)) + + if tag: + self._config.set(name, ExternalsDescription.TAG, tag) + + if branch: + self._config.set(name, ExternalsDescription.BRANCH, branch) + + if externals: + self._config.set(name, ExternalsDescription.EXTERNALS, externals) + + def create_section_ext_only(self, name, + required=True, externals=CFG_SUB_NAME): + """Create a config section with autofilling some items and handling + optional items. + + """ + # pylint: disable=R0913 + self._config.add_section(name) + self._config.set(name, ExternalsDescription.PATH, LOCAL_PATH_INDICATOR) + + self._config.set(name, ExternalsDescription.PROTOCOL, + ExternalsDescription.PROTOCOL_EXTERNALS_ONLY) + + self._config.set(name, ExternalsDescription.REPO_URL, + LOCAL_PATH_INDICATOR) + + self._config.set(name, ExternalsDescription.REQUIRED, str(required)) + + if externals: + self._config.set(name, ExternalsDescription.EXTERNALS, externals) + + def create_svn_external(self, name, tag='', branch=''): + """Create a config section for an svn repository. + + """ + self._config.add_section(name) + self._config.set(name, ExternalsDescription.PATH, + os.path.join(EXTERNALS_NAME, name)) + + self._config.set(name, ExternalsDescription.PROTOCOL, + ExternalsDescription.PROTOCOL_SVN) + + self._config.set(name, ExternalsDescription.REPO_URL, SVN_TEST_REPO) + + self._config.set(name, ExternalsDescription.REQUIRED, str(True)) + + if tag: + self._config.set(name, ExternalsDescription.TAG, tag) + + if branch: + self._config.set(name, ExternalsDescription.BRANCH, branch) + + @staticmethod + def create_branch(dest_dir, repo_name, branch, with_commit=False): + """Update a repository branch, and potentially the remote. + """ + # pylint: disable=R0913 + cwd = os.getcwd() + repo_root = os.path.join(dest_dir, EXTERNALS_NAME) + repo_root = os.path.join(repo_root, repo_name) + os.chdir(repo_root) + cmd = ['git', 'checkout', '-b', branch, ] + execute_subprocess(cmd) + if with_commit: + msg = 'start work on {0}'.format(branch) + with open(README_NAME, 'a') as handle: + handle.write(msg) + cmd = ['git', 'add', README_NAME, ] + execute_subprocess(cmd) + cmd = ['git', 'commit', '-m', msg, ] + execute_subprocess(cmd) + os.chdir(cwd) + + def update_branch(self, dest_dir, name, branch, repo_type=None, + filename=CFG_NAME): + """Update a repository branch, and potentially the remote. + """ + # pylint: disable=R0913 + self._config.set(name, ExternalsDescription.BRANCH, branch) + + if repo_type: + if repo_type == SIMPLE_LOCAL_ONLY_NAME: + repo_url = SIMPLE_LOCAL_ONLY_NAME + else: + repo_url = os.path.join('${MANIC_TEST_BARE_REPO_ROOT}', + repo_type) + self._config.set(name, ExternalsDescription.REPO_URL, repo_url) + + try: + # remove the tag if it existed + self._config.remove_option(name, ExternalsDescription.TAG) + except BaseException: + pass + + self._write_config(dest_dir, filename) + + def update_svn_branch(self, dest_dir, name, branch, filename=CFG_NAME): + """Update a repository branch, and potentially the remote. + """ + # pylint: disable=R0913 + self._config.set(name, ExternalsDescription.BRANCH, branch) + + try: + # remove the tag if it existed + self._config.remove_option(name, ExternalsDescription.TAG) + except BaseException: + pass + + self._write_config(dest_dir, filename) + + def update_tag(self, dest_dir, name, tag, repo_type=None, + filename=CFG_NAME, remove_branch=True): + """Update a repository tag, and potentially the remote + + NOTE(bja, 2017-11) remove_branch=False should result in an + overspecified external with both a branch and tag. This is + used for error condition testing. + + """ + # pylint: disable=R0913 + self._config.set(name, ExternalsDescription.TAG, tag) + + if repo_type: + repo_url = os.path.join('${MANIC_TEST_BARE_REPO_ROOT}', repo_type) + self._config.set(name, ExternalsDescription.REPO_URL, repo_url) + + try: + # remove the branch if it existed + if remove_branch: + self._config.remove_option(name, ExternalsDescription.BRANCH) + except BaseException: + pass + + self._write_config(dest_dir, filename) + + def update_underspecify_branch_tag(self, dest_dir, name, + filename=CFG_NAME): + """Update a repository protocol, and potentially the remote + """ + # pylint: disable=R0913 + try: + # remove the branch if it existed + self._config.remove_option(name, ExternalsDescription.BRANCH) + except BaseException: + pass + + try: + # remove the tag if it existed + self._config.remove_option(name, ExternalsDescription.TAG) + except BaseException: + pass + + self._write_config(dest_dir, filename) + + def update_underspecify_remove_url(self, dest_dir, name, + filename=CFG_NAME): + """Update a repository protocol, and potentially the remote + """ + # pylint: disable=R0913 + try: + # remove the repo url if it existed + self._config.remove_option(name, ExternalsDescription.REPO_URL) + except BaseException: + pass + + self._write_config(dest_dir, filename) + + def update_protocol(self, dest_dir, name, protocol, repo_type=None, + filename=CFG_NAME): + """Update a repository protocol, and potentially the remote + """ + # pylint: disable=R0913 + self._config.set(name, ExternalsDescription.PROTOCOL, protocol) + + if repo_type: + repo_url = os.path.join('${MANIC_TEST_BARE_REPO_ROOT}', repo_type) + self._config.set(name, ExternalsDescription.REPO_URL, repo_url) + + self._write_config(dest_dir, filename) + + +class BaseTestSysCheckout(unittest.TestCase): + """Base class of reusable systems level test setup for + checkout_externals + + """ + # NOTE(bja, 2017-11) pylint complains about long method names, but + # it is hard to differentiate tests without making them more + # cryptic. + # pylint: disable=invalid-name + + status_args = ['--status'] + checkout_args = [] + optional_args = ['--optional'] + verbose_args = ['--status', '--verbose'] + + def setUp(self): + """Setup for all individual checkout_externals tests + """ + # directory we want to return to after the test system and + # checkout_externals are done cd'ing all over the place. + self._return_dir = os.getcwd() + + self._test_id = self.id().split('.')[-1] + + # path to the executable + self._checkout = os.path.join('../checkout_externals') + self._checkout = os.path.abspath(self._checkout) + + # directory where we have test repositories + self._bare_root = os.path.join(os.getcwd(), BARE_REPO_ROOT_NAME) + self._bare_root = os.path.abspath(self._bare_root) + + # set into the environment so var will be expanded in externals files + os.environ[MANIC_TEST_BARE_REPO_ROOT] = self._bare_root + + # set the input file generator + self._generator = GenerateExternalsDescriptionCfgV1() + # set the input file generator for secondary externals + self._sub_generator = GenerateExternalsDescriptionCfgV1() + + def tearDown(self): + """Tear down for individual tests + """ + # remove the env var we added in setup + del os.environ[MANIC_TEST_BARE_REPO_ROOT] + + # return to our common starting point + os.chdir(self._return_dir) + + def setup_test_repo(self, parent_repo_name): + """Setup the paths and clone the base test repo + + """ + # unique repo for this test + test_dir_name = self._test_id + print("Test repository name: {0}".format(test_dir_name)) + + parent_repo_dir = os.path.join(self._bare_root, parent_repo_name) + dest_dir = os.path.join(os.environ[MANIC_TEST_TMP_REPO_ROOT], + test_dir_name) + # pylint: disable=W0212 + GitRepository._git_clone(parent_repo_dir, dest_dir) + return dest_dir + + @staticmethod + def _add_file_to_repo(under_test_dir, filename): + """Add a file to the repository so we can put it into a dirty state + + """ + dirty_path = os.path.join(under_test_dir, filename) + with open(dirty_path, 'w') as tmp: + tmp.write('Hello, world!') + + @staticmethod + def execute_cmd_in_dir(under_test_dir, args): + """Extecute the checkout command in the appropriate repo dir with the + specified additional args + + Note that we are calling the command line processing and main + routines and not using a subprocess call so that we get code + coverage results! + + """ + cwd = os.getcwd() + checkout_path = os.path.abspath('{0}/../../checkout_externals') + os.chdir(under_test_dir) + cmdline = ['--externals', CFG_NAME, ] + cmdline += args + repo_root = 'MANIC_TEST_BARE_REPO_ROOT={root}'.format( + root=os.environ[MANIC_TEST_BARE_REPO_ROOT]) + manual_cmd = ('Test cmd:\npushd {cwd}; {env} {checkout} {args}'.format( + cwd=under_test_dir, env=repo_root, checkout=checkout_path, + args=' '.join(cmdline))) + printlog(manual_cmd) + options = checkout.commandline_arguments(cmdline) + overall_status, tree_status = checkout.main(options) + os.chdir(cwd) + return overall_status, tree_status + + # ---------------------------------------------------------------- + # + # Check results for generic perturbation of states + # + # ---------------------------------------------------------------- + def _check_generic_empty_default_required(self, tree, name): + self.assertEqual(tree[name].sync_state, ExternalStatus.EMPTY) + self.assertEqual(tree[name].clean_state, ExternalStatus.DEFAULT) + self.assertEqual(tree[name].source_type, ExternalStatus.MANAGED) + + def _check_generic_ok_clean_required(self, tree, name): + self.assertEqual(tree[name].sync_state, ExternalStatus.STATUS_OK) + self.assertEqual(tree[name].clean_state, ExternalStatus.STATUS_OK) + self.assertEqual(tree[name].source_type, ExternalStatus.MANAGED) + + def _check_generic_ok_dirty_required(self, tree, name): + self.assertEqual(tree[name].sync_state, ExternalStatus.STATUS_OK) + self.assertEqual(tree[name].clean_state, ExternalStatus.DIRTY) + self.assertEqual(tree[name].source_type, ExternalStatus.MANAGED) + + def _check_generic_modified_ok_required(self, tree, name): + self.assertEqual(tree[name].sync_state, ExternalStatus.MODEL_MODIFIED) + self.assertEqual(tree[name].clean_state, ExternalStatus.STATUS_OK) + self.assertEqual(tree[name].source_type, ExternalStatus.MANAGED) + + def _check_generic_empty_default_optional(self, tree, name): + self.assertEqual(tree[name].sync_state, ExternalStatus.EMPTY) + self.assertEqual(tree[name].clean_state, ExternalStatus.DEFAULT) + self.assertEqual(tree[name].source_type, ExternalStatus.OPTIONAL) + + def _check_generic_ok_clean_optional(self, tree, name): + self.assertEqual(tree[name].sync_state, ExternalStatus.STATUS_OK) + self.assertEqual(tree[name].clean_state, ExternalStatus.STATUS_OK) + self.assertEqual(tree[name].source_type, ExternalStatus.OPTIONAL) + + # ---------------------------------------------------------------- + # + # Check results for individual named externals + # + # ---------------------------------------------------------------- + def _check_simple_tag_empty(self, tree, directory=EXTERNALS_NAME): + name = './{0}/simp_tag'.format(directory) + self._check_generic_empty_default_required(tree, name) + + def _check_simple_tag_ok(self, tree, directory=EXTERNALS_NAME): + name = './{0}/simp_tag'.format(directory) + self._check_generic_ok_clean_required(tree, name) + + def _check_simple_tag_dirty(self, tree, directory=EXTERNALS_NAME): + name = './{0}/simp_tag'.format(directory) + self._check_generic_ok_dirty_required(tree, name) + + def _check_simple_branch_empty(self, tree, directory=EXTERNALS_NAME): + name = './{0}/simp_branch'.format(directory) + self._check_generic_empty_default_required(tree, name) + + def _check_simple_branch_ok(self, tree, directory=EXTERNALS_NAME): + name = './{0}/simp_branch'.format(directory) + self._check_generic_ok_clean_required(tree, name) + + def _check_simple_branch_modified(self, tree, directory=EXTERNALS_NAME): + name = './{0}/simp_branch'.format(directory) + self._check_generic_modified_ok_required(tree, name) + + def _check_simple_req_empty(self, tree, directory=EXTERNALS_NAME): + name = './{0}/simp_req'.format(directory) + self._check_generic_empty_default_required(tree, name) + + def _check_simple_req_ok(self, tree, directory=EXTERNALS_NAME): + name = './{0}/simp_req'.format(directory) + self._check_generic_ok_clean_required(tree, name) + + def _check_simple_opt_empty(self, tree, directory=EXTERNALS_NAME): + name = './{0}/simp_opt'.format(directory) + self._check_generic_empty_default_optional(tree, name) + + def _check_simple_opt_ok(self, tree, directory=EXTERNALS_NAME): + name = './{0}/simp_opt'.format(directory) + self._check_generic_ok_clean_optional(tree, name) + + # ---------------------------------------------------------------- + # + # Check results for groups of externals under specific conditions + # + # ---------------------------------------------------------------- + def _check_container_simple_required_pre_checkout(self, overall, tree): + self.assertEqual(overall, 0) + self._check_simple_tag_empty(tree) + self._check_simple_branch_empty(tree) + + def _check_container_simple_required_checkout(self, overall, tree): + # Note, this is the internal tree status just before checkout + self.assertEqual(overall, 0) + self._check_simple_tag_empty(tree) + self._check_simple_branch_empty(tree) + + def _check_container_simple_required_post_checkout(self, overall, tree): + self.assertEqual(overall, 0) + self._check_simple_tag_ok(tree) + self._check_simple_branch_ok(tree) + + def _check_container_simple_optional_pre_checkout(self, overall, tree): + self.assertEqual(overall, 0) + self._check_simple_req_empty(tree) + self._check_simple_opt_empty(tree) + + def _check_container_simple_optional_checkout(self, overall, tree): + self.assertEqual(overall, 0) + self._check_simple_req_empty(tree) + self._check_simple_opt_empty(tree) + + def _check_container_simple_optional_post_checkout(self, overall, tree): + self.assertEqual(overall, 0) + self._check_simple_req_ok(tree) + self._check_simple_opt_empty(tree) + + def _check_container_simple_optional_post_optional(self, overall, tree): + self.assertEqual(overall, 0) + self._check_simple_req_ok(tree) + self._check_simple_opt_ok(tree) + + def _check_container_simple_required_sb_modified(self, overall, tree): + self.assertEqual(overall, 0) + self._check_simple_tag_ok(tree) + self._check_simple_branch_modified(tree) + + def _check_container_simple_optional_st_dirty(self, overall, tree): + self.assertEqual(overall, 0) + self._check_simple_tag_dirty(tree) + self._check_simple_branch_ok(tree) + + def _check_mixed_sub_simple_required_pre_checkout(self, overall, tree): + # Note, this is the internal tree status just before checkout + self.assertEqual(overall, 0) + self._check_simple_tag_empty(tree, directory=EXTERNALS_NAME) + self._check_simple_branch_empty(tree, directory=EXTERNALS_NAME) + self._check_simple_tag_empty(tree, directory=SUB_EXTERNALS_PATH) + self._check_simple_branch_empty(tree, directory=SUB_EXTERNALS_PATH) + + def _check_mixed_sub_simple_required_checkout(self, overall, tree): + # Note, this is the internal tree status just before checkout + self.assertEqual(overall, 0) + self._check_simple_tag_empty(tree, directory=EXTERNALS_NAME) + self._check_simple_branch_empty(tree, directory=EXTERNALS_NAME) + self._check_simple_tag_empty(tree, directory=SUB_EXTERNALS_PATH) + self._check_simple_branch_empty(tree, directory=SUB_EXTERNALS_PATH) + + def _check_mixed_sub_simple_required_post_checkout(self, overall, tree): + # Note, this is the internal tree status just before checkout + self.assertEqual(overall, 0) + self._check_simple_tag_ok(tree, directory=EXTERNALS_NAME) + self._check_simple_branch_ok(tree, directory=EXTERNALS_NAME) + self._check_simple_tag_ok(tree, directory=SUB_EXTERNALS_PATH) + self._check_simple_branch_ok(tree, directory=SUB_EXTERNALS_PATH) + + +class TestSysCheckout(BaseTestSysCheckout): + """Run systems level tests of checkout_externals + + """ + # NOTE(bja, 2017-11) pylint complains about long method names, but + # it is hard to differentiate tests without making them more + # cryptic. + # pylint: disable=invalid-name + + # ---------------------------------------------------------------- + # + # Run systems tests + # + # ---------------------------------------------------------------- + def test_container_simple_required(self): + """Verify that a container with simple subrepos + generates the correct initial status. + + """ + # create repo + under_test_dir = self.setup_test_repo(CONTAINER_REPO_NAME) + self._generator.container_simple_required(under_test_dir) + + # status of empty repo + overall, tree = self.execute_cmd_in_dir(under_test_dir, + self.status_args) + self._check_container_simple_required_pre_checkout(overall, tree) + + # checkout + overall, tree = self.execute_cmd_in_dir(under_test_dir, + self.checkout_args) + self._check_container_simple_required_checkout(overall, tree) + + # status clean checked out + overall, tree = self.execute_cmd_in_dir(under_test_dir, + self.status_args) + self._check_container_simple_required_post_checkout(overall, tree) + + def test_container_simple_optional(self): + """Verify that container with an optional simple subrepos + generates the correct initial status. + + """ + # create repo + under_test_dir = self.setup_test_repo(CONTAINER_REPO_NAME) + self._generator.container_simple_optional(under_test_dir) + + # check status of empty repo + overall, tree = self.execute_cmd_in_dir(under_test_dir, + self.status_args) + self._check_container_simple_optional_pre_checkout(overall, tree) + + # checkout required + overall, tree = self.execute_cmd_in_dir(under_test_dir, + self.checkout_args) + self._check_container_simple_optional_checkout(overall, tree) + + # status + overall, tree = self.execute_cmd_in_dir(under_test_dir, + self.status_args) + self._check_container_simple_optional_post_checkout(overall, tree) + + # checkout optional + overall, tree = self.execute_cmd_in_dir(under_test_dir, + self.optional_args) + self._check_container_simple_optional_post_checkout(overall, tree) + + # status + overall, tree = self.execute_cmd_in_dir(under_test_dir, + self.status_args) + self._check_container_simple_optional_post_optional(overall, tree) + + def test_container_simple_verbose(self): + """Verify that container with simple subrepos runs with verbose status + output and generates the correct initial status. + + """ + # create repo + under_test_dir = self.setup_test_repo(CONTAINER_REPO_NAME) + self._generator.container_simple_required(under_test_dir) + + # checkout + overall, tree = self.execute_cmd_in_dir(under_test_dir, + self.checkout_args) + self._check_container_simple_required_checkout(overall, tree) + + # check verbose status + overall, tree = self.execute_cmd_in_dir(under_test_dir, + self.verbose_args) + self._check_container_simple_required_post_checkout(overall, tree) + + def test_container_simple_dirty(self): + """Verify that a container with simple subrepos + and a dirty status exits gracefully. + + """ + under_test_dir = self.setup_test_repo(CONTAINER_REPO_NAME) + self._generator.container_simple_required(under_test_dir) + + # checkout + overall, tree = self.execute_cmd_in_dir(under_test_dir, + self.checkout_args) + self._check_container_simple_required_checkout(overall, tree) + + # add a file to the repo + self._add_file_to_repo(under_test_dir, 'externals/simp_tag/tmp.txt') + + # checkout: pre-checkout status should be dirty, did not + # modify working copy. + overall, tree = self.execute_cmd_in_dir(under_test_dir, + self.checkout_args) + self._check_container_simple_optional_st_dirty(overall, tree) + + # verify status is still dirty + overall, tree = self.execute_cmd_in_dir(under_test_dir, + self.status_args) + self._check_container_simple_optional_st_dirty(overall, tree) + + def test_container_remote_branch(self): + """Verify that a container with remote branch change works + + """ + # create repo + under_test_dir = self.setup_test_repo(CONTAINER_REPO_NAME) + self._generator.container_simple_required(under_test_dir) + + # checkout + overall, tree = self.execute_cmd_in_dir(under_test_dir, + self.checkout_args) + self._check_container_simple_required_checkout(overall, tree) + + # update the config file to point to a different remote with + # the same branch + self._generator.update_branch(under_test_dir, 'simp_branch', + 'feature2', SIMPLE_FORK_NAME) + + # status of simp_branch should be out of sync + overall, tree = self.execute_cmd_in_dir(under_test_dir, + self.status_args) + self._check_container_simple_required_sb_modified(overall, tree) + + # checkout new externals + overall, tree = self.execute_cmd_in_dir(under_test_dir, + self.checkout_args) + self._check_container_simple_required_sb_modified(overall, tree) + + # status should be synced + overall, tree = self.execute_cmd_in_dir(under_test_dir, + self.status_args) + self._check_container_simple_required_post_checkout(overall, tree) + + def test_container_remote_tag(self): + """Verify that a container with remote tag change works. The new tag + should not be in the original repo, only the new remote fork. + + """ + # create repo + under_test_dir = self.setup_test_repo(CONTAINER_REPO_NAME) + self._generator.container_simple_required(under_test_dir) + + # checkout + overall, tree = self.execute_cmd_in_dir(under_test_dir, + self.checkout_args) + self._check_container_simple_required_checkout(overall, tree) + + # update the config file to point to a different remote with + # the tag instead of branch. Tag MUST NOT be in the original + # repo! + self._generator.update_tag(under_test_dir, 'simp_branch', + 'forked-feature-v1', SIMPLE_FORK_NAME) + + # status of simp_branch should be out of sync + overall, tree = self.execute_cmd_in_dir(under_test_dir, + self.status_args) + self._check_container_simple_required_sb_modified(overall, tree) + + # checkout new externals + overall, tree = self.execute_cmd_in_dir(under_test_dir, + self.checkout_args) + self._check_container_simple_required_sb_modified(overall, tree) + + # status should be synced + overall, tree = self.execute_cmd_in_dir(under_test_dir, + self.status_args) + self._check_container_simple_required_post_checkout(overall, tree) + + def test_container_preserve_dot(self): + """Verify that after inital checkout, modifying an external git repo + url to '.' and the current branch will leave it unchanged. + + """ + # create repo + under_test_dir = self.setup_test_repo(CONTAINER_REPO_NAME) + self._generator.container_simple_required(under_test_dir) + + # checkout + overall, tree = self.execute_cmd_in_dir(under_test_dir, + self.checkout_args) + self._check_container_simple_required_checkout(overall, tree) + + # update the config file to point to a different remote with + # the same branch + self._generator.update_branch(under_test_dir, 'simp_branch', + 'feature2', SIMPLE_FORK_NAME) + # checkout + overall, tree = self.execute_cmd_in_dir(under_test_dir, + self.checkout_args) + + # verify status is clean and unmodified + overall, tree = self.execute_cmd_in_dir(under_test_dir, + self.status_args) + self._check_container_simple_required_post_checkout(overall, tree) + + # update branch to point to a new branch that only exists in + # the local fork + self._generator.create_branch(under_test_dir, 'simp_branch', + 'private-feature', with_commit=True) + self._generator.update_branch(under_test_dir, 'simp_branch', + 'private-feature', + SIMPLE_LOCAL_ONLY_NAME) + overall, tree = self.execute_cmd_in_dir(under_test_dir, + self.checkout_args) + + # verify status is clean and unmodified + overall, tree = self.execute_cmd_in_dir(under_test_dir, + self.status_args) + self._check_container_simple_required_post_checkout(overall, tree) + + @unittest.skip('test development inprogress') + def test_container_full(self): + """Verify that 'full' container with simple and mixed subrepos + generates the correct initial status. + + """ + under_test_dir = self.setup_test_repo(CONTAINER_REPO_NAME) + self._generator.container_full(under_test_dir) + overall, tree = self.execute_cmd_in_dir( + under_test_dir, self.status_args) + self.assertEqual(overall, 0) + overall, tree = self.execute_cmd_in_dir( + under_test_dir, self.checkout_args) + self.assertEqual(overall, 0) + overall, tree = self.execute_cmd_in_dir( + under_test_dir, self.status_args) + self.assertEqual(overall, 0) + _ = tree + + def test_mixed_simple(self): + """Verify that a mixed use repo can serve as a 'full' container, + pulling in a set of externals and a seperate set of sub-externals. + + """ + #import pdb; pdb.set_trace() + # create repository + under_test_dir = self.setup_test_repo(MIXED_REPO_NAME) + # create top level externals file + self._generator.mixed_simple_base(under_test_dir) + # create sub-externals file + self._sub_generator.mixed_simple_sub(under_test_dir) + + # checkout + overall, tree = self.execute_cmd_in_dir(under_test_dir, + self.checkout_args) + self._check_mixed_sub_simple_required_checkout(overall, tree) + + # verify status is clean and unmodified + overall, tree = self.execute_cmd_in_dir(under_test_dir, + self.status_args) + self._check_mixed_sub_simple_required_post_checkout(overall, tree) + + +class TestSysCheckoutSVN(BaseTestSysCheckout): + """Run systems level tests of checkout_externals accessing svn repositories + + SVN tests - these tests use the svn repository interface. Since + they require an active network connection, they are significantly + slower than the git tests. But svn testing is critical. So try to + design the tests to only test svn repository functionality + (checkout, switch) and leave generic testing of functionality like + 'optional' to the fast git tests. + + Example timing as of 2017-11: + + * All other git and unit tests combined take between 4-5 seconds + + * Just checking if svn is available for a single test takes 2 seconds. + + * The single svn test typically takes between 10 and 25 seconds + (depending on the network)! + + NOTE(bja, 2017-11) To enable CI testing we can't use a real remote + repository that restricts access and it seems inappropriate to hit + a random open source repo. For now we are just hitting one of our + own github repos using the github svn server interface. This + should be "good enough" for basic checkout and swich + functionality. But if additional svn functionality is required, a + better solution will be necessary. I think eventually we want to + create a small local svn repository on the fly (doesn't require an + svn server or network connection!) and use it for testing. + + """ + + def _check_svn_branch_ok(self, tree, directory=EXTERNALS_NAME): + name = './{0}/svn_branch'.format(directory) + self._check_generic_ok_clean_required(tree, name) + + def _check_svn_branch_dirty(self, tree, directory=EXTERNALS_NAME): + name = './{0}/svn_branch'.format(directory) + self._check_generic_ok_dirty_required(tree, name) + + def _check_svn_tag_ok(self, tree, directory=EXTERNALS_NAME): + name = './{0}/svn_tag'.format(directory) + self._check_generic_ok_clean_required(tree, name) + + def _check_svn_tag_modified(self, tree, directory=EXTERNALS_NAME): + name = './{0}/svn_tag'.format(directory) + self._check_generic_modified_ok_required(tree, name) + + def _check_container_simple_svn_post_checkout(self, overall, tree): + self.assertEqual(overall, 0) + self._check_simple_tag_ok(tree) + self._check_svn_branch_ok(tree) + self._check_svn_tag_ok(tree) + + def _check_container_simple_svn_sb_dirty_st_mod(self, overall, tree): + self.assertEqual(overall, 0) + self._check_simple_tag_ok(tree) + self._check_svn_tag_modified(tree) + self._check_svn_branch_dirty(tree) + + @staticmethod + def have_svn_access(): + """Check if we have svn access so we can enable tests that use svn. + + """ + have_svn = False + cmd = ['svn', 'ls', SVN_TEST_REPO, ] + try: + execute_subprocess(cmd) + have_svn = True + except BaseException: + pass + return have_svn + + def skip_if_no_svn_access(self): + """Function decorator to disable svn tests when svn isn't available + """ + have_svn = self.have_svn_access() + if not have_svn: + raise unittest.SkipTest("No svn access") + + def test_container_simple_svn(self): + """Verify that a container repo can pull in an svn branch and svn tag. + + """ + self.skip_if_no_svn_access() + # create repo + under_test_dir = self.setup_test_repo(CONTAINER_REPO_NAME) + self._generator.container_simple_svn(under_test_dir) + + # checkout + overall, tree = self.execute_cmd_in_dir(under_test_dir, + self.checkout_args) + + # verify status is clean and unmodified + overall, tree = self.execute_cmd_in_dir(under_test_dir, + self.status_args) + self._check_container_simple_svn_post_checkout(overall, tree) + + # update description file to make the tag into a branch and + # trigger a switch + self._generator.update_svn_branch(under_test_dir, 'svn_tag', 'trunk') + + # checkout + overall, tree = self.execute_cmd_in_dir(under_test_dir, + self.checkout_args) + + # verify status is clean and unmodified + overall, tree = self.execute_cmd_in_dir(under_test_dir, + self.status_args) + self._check_container_simple_svn_post_checkout(overall, tree) + + # add a file to the repo + self._add_file_to_repo(under_test_dir, + 'externals/svn_branch/tmp.txt') + + # update description file to make the branch into a tag and + # trigger a modified status + self._generator.update_svn_branch(under_test_dir, 'svn_tag', + 'tags/cesm2.0.beta07') + + # checkout: pre-checkout status should be dirty and modified, + # did not modify working copy. + overall, tree = self.execute_cmd_in_dir(under_test_dir, + self.checkout_args) + self._check_container_simple_svn_sb_dirty_st_mod(overall, tree) + + # verify status is still dirty and modified with verbose, last + # checkout did not modify working dir state. + overall, tree = self.execute_cmd_in_dir(under_test_dir, + self.verbose_args) + self._check_container_simple_svn_sb_dirty_st_mod(overall, tree) + + +class TestSysCheckoutErrors(BaseTestSysCheckout): + """Run systems level tests of error conditions in checkout_externals + + Error conditions - these tests are designed to trigger specific + error conditions and ensure that they are being handled as + runtime errors (and hopefully usefull error messages) instead of + the default internal message that won't mean anything to the + user, e.g. key error, called process error, etc. + + These are not 'expected failures'. They are pass when a + RuntimeError is raised, fail if any other error is raised (or no + error is raised). + + """ + + # NOTE(bja, 2017-11) pylint complains about long method names, but + # it is hard to differentiate tests without making them more + # cryptic. + # pylint: disable=invalid-name + + def test_error_unknown_protocol(self): + """Verify that a runtime error is raised when the user specified repo + protocol is not known. + + """ + # create repo + under_test_dir = self.setup_test_repo(CONTAINER_REPO_NAME) + self._generator.container_simple_required(under_test_dir) + + # update the config file to point to a different remote with + # the tag instead of branch. Tag MUST NOT be in the original + # repo! + self._generator.update_protocol(under_test_dir, 'simp_branch', + 'this-protocol-does-not-exist') + + with self.assertRaises(RuntimeError): + self.execute_cmd_in_dir(under_test_dir, self.checkout_args) + + def test_error_switch_protocol(self): + """Verify that a runtime error is raised when the user switches + protocols, git to svn. + + TODO(bja, 2017-11) This correctly results in an error, but it + isn't a helpful error message. + + """ + # create repo + under_test_dir = self.setup_test_repo(CONTAINER_REPO_NAME) + self._generator.container_simple_required(under_test_dir) + + # update the config file to point to a different remote with + # the tag instead of branch. Tag MUST NOT be in the original + # repo! + self._generator.update_protocol(under_test_dir, 'simp_branch', 'svn') + with self.assertRaises(RuntimeError): + self.execute_cmd_in_dir(under_test_dir, self.checkout_args) + + def test_error_unknown_tag(self): + """Verify that a runtime error is raised when the user specified tag + does not exist. + + """ + # create repo + under_test_dir = self.setup_test_repo(CONTAINER_REPO_NAME) + self._generator.container_simple_required(under_test_dir) + + # update the config file to point to a different remote with + # the tag instead of branch. Tag MUST NOT be in the original + # repo! + self._generator.update_tag(under_test_dir, 'simp_branch', + 'this-tag-does-not-exist', SIMPLE_REPO_NAME) + + with self.assertRaises(RuntimeError): + self.execute_cmd_in_dir(under_test_dir, self.checkout_args) + + def test_error_overspecify_tag_branch(self): + """Verify that a runtime error is raised when the user specified both + tag and a branch + + """ + # create repo + under_test_dir = self.setup_test_repo(CONTAINER_REPO_NAME) + self._generator.container_simple_required(under_test_dir) + + # update the config file to point to a different remote with + # the tag instead of branch. Tag MUST NOT be in the original + # repo! + self._generator.update_tag(under_test_dir, 'simp_branch', + 'this-tag-does-not-exist', SIMPLE_REPO_NAME, + remove_branch=False) + + with self.assertRaises(RuntimeError): + self.execute_cmd_in_dir(under_test_dir, self.checkout_args) + + def test_error_underspecify_tag_branch(self): + """Verify that a runtime error is raised when the user specified + neither a tag or a branch + + """ + # create repo + under_test_dir = self.setup_test_repo(CONTAINER_REPO_NAME) + self._generator.container_simple_required(under_test_dir) + + # update the config file to point to a different remote with + # the tag instead of branch. Tag MUST NOT be in the original + # repo! + self._generator.update_underspecify_branch_tag(under_test_dir, + 'simp_branch') + + with self.assertRaises(RuntimeError): + self.execute_cmd_in_dir(under_test_dir, self.checkout_args) + + def test_error_missing_url(self): + """Verify that a runtime error is raised when the user specified + neither a tag or a branch + + """ + # create repo + under_test_dir = self.setup_test_repo(CONTAINER_REPO_NAME) + self._generator.container_simple_required(under_test_dir) + + # update the config file to point to a different remote with + # the tag instead of branch. Tag MUST NOT be in the original + # repo! + self._generator.update_underspecify_remove_url(under_test_dir, + 'simp_branch') + + with self.assertRaises(RuntimeError): + self.execute_cmd_in_dir(under_test_dir, self.checkout_args) + + +if __name__ == '__main__': + unittest.main() diff --git a/test/test_unit_externals_description.py b/test/test_unit_externals_description.py new file mode 100644 index 0000000000..1fb43b6797 --- /dev/null +++ b/test/test_unit_externals_description.py @@ -0,0 +1,343 @@ +#!/usr/bin/env python + +"""Unit test driver for checkout_externals + +Note: this script assume the path to the checkout_externals.py module is +already in the python path. + +""" + +from __future__ import absolute_import +from __future__ import unicode_literals +from __future__ import print_function + +import os +import os.path +import shutil +import unittest + +try: + # python2 + from ConfigParser import SafeConfigParser as config_parser + + def config_string_cleaner(text): + """convert strings into unicode + """ + return text.decode('utf-8') +except ImportError: + # python3 + from configparser import ConfigParser as config_parser + + def config_string_cleaner(text): + """Python3 already uses unicode strings, so just return the string + without modification. + + """ + return text + +from manic.externals_description import DESCRIPTION_SECTION, VERSION_ITEM +from manic.externals_description import ExternalsDescription +from manic.externals_description import ExternalsDescriptionDict +from manic.externals_description import ExternalsDescriptionConfigV1 +from manic.externals_description import get_cfg_schema_version +from manic.externals_description import read_externals_description_file +from manic.externals_description import create_externals_description + +from manic.global_constants import EMPTY_STR + + +class TestCfgSchemaVersion(unittest.TestCase): + """Test that schema identification for the externals description + returns the correct results. + + """ + + def setUp(self): + """Reusable config object + """ + self._config = config_parser() + self._config.add_section('section1') + self._config.set('section1', 'keword', 'value') + + self._config.add_section(DESCRIPTION_SECTION) + + def test_schema_version_valid(self): + """Test that schema identification returns the correct version for a + valid tag. + + """ + version_str = '2.1.3' + self._config.set(DESCRIPTION_SECTION, VERSION_ITEM, version_str) + major, minor, patch = get_cfg_schema_version(self._config) + expected_major = 2 + expected_minor = 1 + expected_patch = 3 + self.assertEqual(expected_major, major) + self.assertEqual(expected_minor, minor) + self.assertEqual(expected_patch, patch) + + def test_schema_section_missing(self): + """Test that an error is returned if the schema section is missing + from the input file. + + """ + self._config.remove_section(DESCRIPTION_SECTION) + with self.assertRaises(RuntimeError): + get_cfg_schema_version(self._config) + + def test_schema_version_missing(self): + """Test that a externals description file without a version raises a + runtime error. + + """ + # Note: the default setup method shouldn't include a version + # keyword, but remove it just to be future proof.... + self._config.remove_option(DESCRIPTION_SECTION, VERSION_ITEM) + with self.assertRaises(RuntimeError): + get_cfg_schema_version(self._config) + + def test_schema_version_not_int(self): + """Test that a externals description file a version that doesn't + decompose to integer major, minor and patch versions raises + runtime error. + + """ + self._config.set(DESCRIPTION_SECTION, VERSION_ITEM, 'unknown') + with self.assertRaises(RuntimeError): + get_cfg_schema_version(self._config) + + +class TestModelDescritionConfigV1(unittest.TestCase): + """Test that parsing config/ini fileproduces a correct dictionary + for the externals description. + + """ + # pylint: disable=R0902 + + def setUp(self): + """Boiler plate construction of string containing xml for multiple components. + """ + self._comp1_name = 'comp1' + self._comp1_path = 'path/to/comp1' + self._comp1_protocol = 'svn' + self._comp1_url = 'https://svn.somewhere.com/path/of/comp1' + self._comp1_tag = 'a_nice_tag_v1' + self._comp1_branch = '' + self._comp1_is_required = 'True' + self._comp1_externals = '' + + self._comp2_name = 'comp2' + self._comp2_path = 'path/to/comp2' + self._comp2_protocol = 'git' + self._comp2_url = '/local/clone/of/comp2' + self._comp2_tag = '' + self._comp2_branch = 'a_very_nice_branch' + self._comp2_is_required = 'False' + self._comp2_externals = 'path/to/comp2.cfg' + + def _setup_comp1(self, config): + """Boiler plate construction of xml string for componet 1 + """ + config.add_section(self._comp1_name) + config.set(self._comp1_name, 'local_path', self._comp1_path) + config.set(self._comp1_name, 'protocol', self._comp1_protocol) + config.set(self._comp1_name, 'repo_url', self._comp1_url) + config.set(self._comp1_name, 'tag', self._comp1_tag) + config.set(self._comp1_name, 'required', self._comp1_is_required) + + def _setup_comp2(self, config): + """Boiler plate construction of xml string for componet 2 + """ + config.add_section(self._comp2_name) + config.set(self._comp2_name, 'local_path', self._comp2_path) + config.set(self._comp2_name, 'protocol', self._comp2_protocol) + config.set(self._comp2_name, 'repo_url', self._comp2_url) + config.set(self._comp2_name, 'branch', self._comp2_branch) + config.set(self._comp2_name, 'required', self._comp2_is_required) + config.set(self._comp2_name, 'externals', self._comp2_externals) + + def _check_comp1(self, model): + """Test that component one was constructed correctly. + """ + self.assertTrue(self._comp1_name in model) + comp1 = model[self._comp1_name] + self.assertEqual(comp1[ExternalsDescription.PATH], self._comp1_path) + self.assertTrue(comp1[ExternalsDescription.REQUIRED]) + repo = comp1[ExternalsDescription.REPO] + self.assertEqual(repo[ExternalsDescription.PROTOCOL], + self._comp1_protocol) + self.assertEqual(repo[ExternalsDescription.REPO_URL], self._comp1_url) + self.assertEqual(repo[ExternalsDescription.TAG], self._comp1_tag) + self.assertEqual(EMPTY_STR, comp1[ExternalsDescription.EXTERNALS]) + + def _check_comp2(self, model): + """Test that component two was constucted correctly. + """ + self.assertTrue(self._comp2_name in model) + comp2 = model[self._comp2_name] + self.assertEqual(comp2[ExternalsDescription.PATH], self._comp2_path) + self.assertFalse(comp2[ExternalsDescription.REQUIRED]) + repo = comp2[ExternalsDescription.REPO] + self.assertEqual(repo[ExternalsDescription.PROTOCOL], + self._comp2_protocol) + self.assertEqual(repo[ExternalsDescription.REPO_URL], self._comp2_url) + self.assertEqual(repo[ExternalsDescription.BRANCH], self._comp2_branch) + self.assertEqual(self._comp2_externals, + comp2[ExternalsDescription.EXTERNALS]) + + def test_one_tag_required(self): + """Test that a component source with a tag is correctly parsed. + """ + config = config_parser() + self._setup_comp1(config) + model = ExternalsDescriptionConfigV1(config) + print(model) + self._check_comp1(model) + + def test_one_branch_externals(self): + """Test that a component source with a branch is correctly parsed. + """ + config = config_parser() + self._setup_comp2(config) + model = ExternalsDescriptionConfigV1(config) + print(model) + self._check_comp2(model) + + def test_two_sources(self): + """Test that multiple component sources are correctly parsed. + """ + config = config_parser() + self._setup_comp1(config) + self._setup_comp2(config) + model = ExternalsDescriptionConfigV1(config) + print(model) + self._check_comp1(model) + self._check_comp2(model) + + +class TestReadExternalsDescription(unittest.TestCase): + """Test the application logic of read_externals_description_file + """ + TMP_FAKE_DIR = 'fake' + + def setUp(self): + """Setup directory for tests + """ + if not os.path.exists(self.TMP_FAKE_DIR): + os.makedirs(self.TMP_FAKE_DIR) + + def tearDown(self): + """Cleanup tmp stuff on the file system + """ + if os.path.exists(self.TMP_FAKE_DIR): + shutil.rmtree(self.TMP_FAKE_DIR) + + def test_no_file_error(self): + """Test that a runtime error is raised when the file does not exist + + """ + root_dir = os.getcwd() + filename = 'this-file-should-not-exist' + with self.assertRaises(RuntimeError): + read_externals_description_file(root_dir, filename) + + def test_no_dir_error(self): + """Test that a runtime error is raised when the file does not exist + + """ + root_dir = '/path/to/some/repo' + filename = 'externals.cfg' + with self.assertRaises(RuntimeError): + read_externals_description_file(root_dir, filename) + + def test_no_invalid_error(self): + """Test that a runtime error is raised when the file format is invalid + + """ + root_dir = os.getcwd() + filename = 'externals.cfg' + file_path = os.path.join(root_dir, filename) + file_path = os.path.abspath(file_path) + contents = """ + +invalid file format +""" + with open(file_path, 'w') as fhandle: + fhandle.write(contents) + with self.assertRaises(RuntimeError): + read_externals_description_file(root_dir, filename) + os.remove(file_path) + + +class TestCreateExternalsDescription(unittest.TestCase): + """Test the application logic of creat_externals_description + """ + + def setUp(self): + """Create config object used as basis for all tests + """ + self._config = config_parser() + self.setup_config() + + def setup_config(self): + """Boiler plate construction of xml string for componet 1 + """ + name = 'test' + self._config.add_section(name) + self._config.set(name, ExternalsDescription.PATH, 'externals') + self._config.set(name, ExternalsDescription.PROTOCOL, 'git') + self._config.set(name, ExternalsDescription.REPO_URL, '/path/to/repo') + self._config.set(name, ExternalsDescription.TAG, 'test_tag') + self._config.set(name, ExternalsDescription.REQUIRED, 'True') + + self._config.add_section(DESCRIPTION_SECTION) + self._config.set(DESCRIPTION_SECTION, VERSION_ITEM, '1.0.0') + + def test_cfg_v1(self): + """Test that a correct cfg v1 object is created by create_externals_description + + """ + self._config.set(DESCRIPTION_SECTION, VERSION_ITEM, '1.2.3') + ext = create_externals_description(self._config, model_format='cfg') + self.assertIsInstance(ext, ExternalsDescriptionConfigV1) + + def test_dict(self): + """Test that a correct cfg v1 object is created by create_externals_description + + """ + rdata = {ExternalsDescription.PROTOCOL: 'git', + ExternalsDescription.REPO_URL: '/path/to/repo', + ExternalsDescription.TAG: 'tagv1', + ExternalsDescription.BRANCH: EMPTY_STR, } + + desc = { + 'test': { + ExternalsDescription.REQUIRED: False, + ExternalsDescription.PATH: '../fake', + ExternalsDescription.EXTERNALS: EMPTY_STR, + ExternalsDescription.REPO: rdata, }, + } + + ext = create_externals_description(desc, model_format='dict') + self.assertIsInstance(ext, ExternalsDescriptionDict) + + def test_cfg_unknown_version(self): + """Test that a runtime error is raised when an unknown file version is + received + + """ + self._config.set(DESCRIPTION_SECTION, VERSION_ITEM, '123.456.789') + with self.assertRaises(RuntimeError): + create_externals_description(self._config, model_format='cfg') + + def test_cfg_unknown_format(self): + """Test that a runtime error is raised when an unknown format string is + received + + """ + with self.assertRaises(RuntimeError): + create_externals_description(self._config, model_format='unknown') + + +if __name__ == '__main__': + unittest.main() diff --git a/test/test_unit_externals_status.py b/test/test_unit_externals_status.py new file mode 100644 index 0000000000..f8e953f756 --- /dev/null +++ b/test/test_unit_externals_status.py @@ -0,0 +1,299 @@ +#!/usr/bin/env python + +"""Unit test driver for the manic external status reporting module. + +Note: this script assumes the path to the manic package is already in +the python path. + +""" + +from __future__ import absolute_import +from __future__ import unicode_literals +from __future__ import print_function + +import unittest + +from manic.externals_status import ExternalStatus + + +class TestStatusObject(unittest.TestCase): + """Verify that the Status object behaives as expected. + """ + + def test_exists_empty_all(self): + """If the repository sync-state is empty (doesn't exist), and there is no + clean state, then it is considered not to exist. + + """ + stat = ExternalStatus() + stat.sync_state = ExternalStatus.EMPTY + stat.clean_state = ExternalStatus.DEFAULT + exists = stat.exists() + self.assertFalse(exists) + + stat.clean_state = ExternalStatus.EMPTY + exists = stat.exists() + self.assertFalse(exists) + + stat.clean_state = ExternalStatus.UNKNOWN + exists = stat.exists() + self.assertFalse(exists) + + # this state represtens an internal logic error in how the + # repo status was determined. + stat.clean_state = ExternalStatus.STATUS_OK + exists = stat.exists() + self.assertTrue(exists) + + # this state represtens an internal logic error in how the + # repo status was determined. + stat.clean_state = ExternalStatus.DIRTY + exists = stat.exists() + self.assertTrue(exists) + + def test_exists_default_all(self): + """If the repository sync-state is default, then it is considered to exist + regardless of clean state. + + """ + stat = ExternalStatus() + stat.sync_state = ExternalStatus.DEFAULT + stat.clean_state = ExternalStatus.DEFAULT + exists = stat.exists() + self.assertTrue(exists) + + stat.clean_state = ExternalStatus.EMPTY + exists = stat.exists() + self.assertTrue(exists) + + stat.clean_state = ExternalStatus.UNKNOWN + exists = stat.exists() + self.assertTrue(exists) + + stat.clean_state = ExternalStatus.STATUS_OK + exists = stat.exists() + self.assertTrue(exists) + + stat.clean_state = ExternalStatus.DIRTY + exists = stat.exists() + self.assertTrue(exists) + + def test_exists_unknown_all(self): + """If the repository sync-state is unknown, then it is considered to exist + regardless of clean state. + + """ + stat = ExternalStatus() + stat.sync_state = ExternalStatus.UNKNOWN + stat.clean_state = ExternalStatus.DEFAULT + exists = stat.exists() + self.assertTrue(exists) + + stat.clean_state = ExternalStatus.EMPTY + exists = stat.exists() + self.assertTrue(exists) + + stat.clean_state = ExternalStatus.UNKNOWN + exists = stat.exists() + self.assertTrue(exists) + + stat.clean_state = ExternalStatus.STATUS_OK + exists = stat.exists() + self.assertTrue(exists) + + stat.clean_state = ExternalStatus.DIRTY + exists = stat.exists() + self.assertTrue(exists) + + def test_exists_modified_all(self): + """If the repository sync-state is modified, then it is considered to exist + regardless of clean state. + + """ + stat = ExternalStatus() + stat.sync_state = ExternalStatus.MODEL_MODIFIED + stat.clean_state = ExternalStatus.DEFAULT + exists = stat.exists() + self.assertTrue(exists) + + stat.clean_state = ExternalStatus.EMPTY + exists = stat.exists() + self.assertTrue(exists) + + stat.clean_state = ExternalStatus.UNKNOWN + exists = stat.exists() + self.assertTrue(exists) + + stat.clean_state = ExternalStatus.STATUS_OK + exists = stat.exists() + self.assertTrue(exists) + + stat.clean_state = ExternalStatus.DIRTY + exists = stat.exists() + self.assertTrue(exists) + + def test_exists_ok_all(self): + """If the repository sync-state is ok, then it is considered to exist + regardless of clean state. + + """ + stat = ExternalStatus() + stat.sync_state = ExternalStatus.STATUS_OK + stat.clean_state = ExternalStatus.DEFAULT + exists = stat.exists() + self.assertTrue(exists) + + stat.clean_state = ExternalStatus.EMPTY + exists = stat.exists() + self.assertTrue(exists) + + stat.clean_state = ExternalStatus.UNKNOWN + exists = stat.exists() + self.assertTrue(exists) + + stat.clean_state = ExternalStatus.STATUS_OK + exists = stat.exists() + self.assertTrue(exists) + + stat.clean_state = ExternalStatus.DIRTY + exists = stat.exists() + self.assertTrue(exists) + + def test_update_ok_all(self): + """If the repository in-sync is ok, then it is safe to + update only if clean state is ok + + """ + stat = ExternalStatus() + stat.sync_state = ExternalStatus.STATUS_OK + stat.clean_state = ExternalStatus.DEFAULT + safe_to_update = stat.safe_to_update() + self.assertFalse(safe_to_update) + + stat.clean_state = ExternalStatus.EMPTY + safe_to_update = stat.safe_to_update() + self.assertFalse(safe_to_update) + + stat.clean_state = ExternalStatus.UNKNOWN + safe_to_update = stat.safe_to_update() + self.assertFalse(safe_to_update) + + stat.clean_state = ExternalStatus.STATUS_OK + safe_to_update = stat.safe_to_update() + self.assertTrue(safe_to_update) + + stat.clean_state = ExternalStatus.DIRTY + safe_to_update = stat.safe_to_update() + self.assertFalse(safe_to_update) + + def test_update_modified_all(self): + """If the repository in-sync is modified, then it is safe to + update only if clean state is ok + + """ + stat = ExternalStatus() + stat.sync_state = ExternalStatus.MODEL_MODIFIED + stat.clean_state = ExternalStatus.DEFAULT + safe_to_update = stat.safe_to_update() + self.assertFalse(safe_to_update) + + stat.clean_state = ExternalStatus.EMPTY + safe_to_update = stat.safe_to_update() + self.assertFalse(safe_to_update) + + stat.clean_state = ExternalStatus.UNKNOWN + safe_to_update = stat.safe_to_update() + self.assertFalse(safe_to_update) + + stat.clean_state = ExternalStatus.STATUS_OK + safe_to_update = stat.safe_to_update() + self.assertTrue(safe_to_update) + + stat.clean_state = ExternalStatus.DIRTY + safe_to_update = stat.safe_to_update() + self.assertFalse(safe_to_update) + + def test_update_unknown_all(self): + """If the repository in-sync is unknown, then it is not safe to + update, regardless of the clean state. + + """ + stat = ExternalStatus() + stat.sync_state = ExternalStatus.UNKNOWN + stat.clean_state = ExternalStatus.DEFAULT + safe_to_update = stat.safe_to_update() + self.assertFalse(safe_to_update) + + stat.clean_state = ExternalStatus.EMPTY + safe_to_update = stat.safe_to_update() + self.assertFalse(safe_to_update) + + stat.clean_state = ExternalStatus.UNKNOWN + safe_to_update = stat.safe_to_update() + self.assertFalse(safe_to_update) + + stat.clean_state = ExternalStatus.STATUS_OK + safe_to_update = stat.safe_to_update() + self.assertFalse(safe_to_update) + + stat.clean_state = ExternalStatus.DIRTY + safe_to_update = stat.safe_to_update() + self.assertFalse(safe_to_update) + + def test_update_default_all(self): + """If the repository in-sync is default, then it is not safe to + update, regardless of the clean state. + + """ + stat = ExternalStatus() + stat.sync_state = ExternalStatus.UNKNOWN + stat.clean_state = ExternalStatus.DEFAULT + safe_to_update = stat.safe_to_update() + self.assertFalse(safe_to_update) + + stat.clean_state = ExternalStatus.EMPTY + safe_to_update = stat.safe_to_update() + self.assertFalse(safe_to_update) + + stat.clean_state = ExternalStatus.UNKNOWN + safe_to_update = stat.safe_to_update() + self.assertFalse(safe_to_update) + + stat.clean_state = ExternalStatus.STATUS_OK + safe_to_update = stat.safe_to_update() + self.assertFalse(safe_to_update) + + stat.clean_state = ExternalStatus.DIRTY + safe_to_update = stat.safe_to_update() + self.assertFalse(safe_to_update) + + def test_update_empty_all(self): + """If the repository in-sync is empty, then it is not safe to + update, regardless of the clean state. + + """ + stat = ExternalStatus() + stat.sync_state = ExternalStatus.UNKNOWN + stat.clean_state = ExternalStatus.DEFAULT + safe_to_update = stat.safe_to_update() + self.assertFalse(safe_to_update) + + stat.clean_state = ExternalStatus.EMPTY + safe_to_update = stat.safe_to_update() + self.assertFalse(safe_to_update) + + stat.clean_state = ExternalStatus.UNKNOWN + safe_to_update = stat.safe_to_update() + self.assertFalse(safe_to_update) + + stat.clean_state = ExternalStatus.STATUS_OK + safe_to_update = stat.safe_to_update() + self.assertFalse(safe_to_update) + + stat.clean_state = ExternalStatus.DIRTY + safe_to_update = stat.safe_to_update() + self.assertFalse(safe_to_update) + + +if __name__ == '__main__': + unittest.main() diff --git a/test/test_unit_repository.py b/test/test_unit_repository.py new file mode 100644 index 0000000000..095dacb102 --- /dev/null +++ b/test/test_unit_repository.py @@ -0,0 +1,154 @@ +#!/usr/bin/env python + +"""Unit test driver for checkout_externals + +Note: this script assume the path to the checkout_externals.py module is +already in the python path. + +""" + +from __future__ import absolute_import +from __future__ import unicode_literals +from __future__ import print_function + +import unittest + +from manic.repository_factory import create_repository +from manic.repository_git import GitRepository +from manic.repository_svn import SvnRepository +from manic.repository import Repository +from manic.externals_description import ExternalsDescription +from manic.global_constants import EMPTY_STR + + +class TestCreateRepositoryDict(unittest.TestCase): + """Test the create_repository functionality to ensure it returns the + propper type of repository and errors for unknown repository + types. + + """ + + def setUp(self): + """Common data needed for all tests in this class + """ + self._name = 'test_name' + self._repo = {ExternalsDescription.PROTOCOL: None, + ExternalsDescription.REPO_URL: 'junk_root', + ExternalsDescription.TAG: 'junk_tag', + ExternalsDescription.BRANCH: EMPTY_STR, } + + def test_create_repo_git(self): + """Verify that several possible names for the 'git' protocol + create git repository objects. + + """ + protocols = ['git', 'GIT', 'Git', ] + for protocol in protocols: + self._repo[ExternalsDescription.PROTOCOL] = protocol + repo = create_repository(self._name, self._repo) + self.assertIsInstance(repo, GitRepository) + + def test_create_repo_svn(self): + """Verify that several possible names for the 'svn' protocol + create svn repository objects. + """ + protocols = ['svn', 'SVN', 'Svn', ] + for protocol in protocols: + self._repo[ExternalsDescription.PROTOCOL] = protocol + repo = create_repository(self._name, self._repo) + self.assertIsInstance(repo, SvnRepository) + + def test_create_repo_externals_only(self): + """Verify that an externals only repo returns None. + """ + protocols = ['externals_only', ] + for protocol in protocols: + self._repo[ExternalsDescription.PROTOCOL] = protocol + repo = create_repository(self._name, self._repo) + self.assertEqual(None, repo) + + def test_create_repo_unsupported(self): + """Verify that an unsupported protocol generates a runtime error. + """ + protocols = ['not_a_supported_protocol', ] + for protocol in protocols: + self._repo[ExternalsDescription.PROTOCOL] = protocol + with self.assertRaises(RuntimeError): + create_repository(self._name, self._repo) + + +class TestRepository(unittest.TestCase): + """Test the externals description processing used to create the Repository + base class shared by protocol specific repository classes. + + """ + + def test_tag(self): + """Test creation of a repository object with a tag + """ + name = 'test_repo' + protocol = 'test_protocol' + url = 'test_url' + tag = 'test_tag' + repo_info = {ExternalsDescription.PROTOCOL: protocol, + ExternalsDescription.REPO_URL: url, + ExternalsDescription.TAG: tag, + ExternalsDescription.BRANCH: EMPTY_STR, } + repo = Repository(name, repo_info) + print(repo.__dict__) + self.assertEqual(repo.tag(), tag) + self.assertEqual(repo.url(), url) + + def test_branch(self): + """Test creation of a repository object with a branch + """ + name = 'test_repo' + protocol = 'test_protocol' + url = 'test_url' + branch = 'test_branch' + repo_info = {ExternalsDescription.PROTOCOL: protocol, + ExternalsDescription.REPO_URL: url, + ExternalsDescription.BRANCH: branch, + ExternalsDescription.TAG: EMPTY_STR, } + repo = Repository(name, repo_info) + print(repo.__dict__) + self.assertEqual(repo.branch(), branch) + self.assertEqual(repo.url(), url) + + def test_tag_branch(self): + """Test creation of a repository object with a tag and branch raises a + runtimer error. + + """ + name = 'test_repo' + protocol = 'test_protocol' + url = 'test_url' + branch = 'test_branch' + tag = 'test_tag' + repo_info = {ExternalsDescription.PROTOCOL: protocol, + ExternalsDescription.REPO_URL: url, + ExternalsDescription.BRANCH: branch, + ExternalsDescription.TAG: tag, } + with self.assertRaises(RuntimeError): + Repository(name, repo_info) + + def test_no_tag_no_branch(self): + """Test creation of a repository object without a tag or branch raises a + runtimer error. + + """ + name = 'test_repo' + protocol = 'test_protocol' + url = 'test_url' + branch = EMPTY_STR + tag = EMPTY_STR + repo_info = {ExternalsDescription.PROTOCOL: protocol, + ExternalsDescription.REPO_URL: url, + ExternalsDescription.BRANCH: branch, + ExternalsDescription.TAG: tag, } + with self.assertRaises(RuntimeError): + Repository(name, repo_info) + + +if __name__ == '__main__': + unittest.main() diff --git a/test/test_unit_repository_git.py b/test/test_unit_repository_git.py new file mode 100644 index 0000000000..2060911ab4 --- /dev/null +++ b/test/test_unit_repository_git.py @@ -0,0 +1,1013 @@ +#!/usr/bin/env python + +"""Unit test driver for checkout_externals + +Note: this script assume the path to the checkout_externals.py module is +already in the python path. + +""" +# pylint: disable=too-many-lines,protected-access + +from __future__ import absolute_import +from __future__ import unicode_literals +from __future__ import print_function + +import os +import shutil +import string +import unittest + +from manic.repository_git import GitRepository +from manic.externals_status import ExternalStatus +from manic.externals_description import ExternalsDescription +from manic.externals_description import ExternalsDescriptionDict +from manic.global_constants import EMPTY_STR + +# pylint: disable=C0103 +GIT_BRANCH_OUTPUT_DETACHED_BRANCH_v1_8 = ''' +* (detached from origin/feature2) 36418b4 Work on feature2 + master 9b75494 [origin/master] Initialize repository. +''' +# pylint: enable=C0103 + + +GIT_BRANCH_OUTPUT_DETACHED_BRANCH = ''' +* (HEAD detached at origin/feature-2) 36418b4 Work on feature-2 + feature-2 36418b4 [origin/feature-2] Work on feature-2 + feature3 36418b4 Work on feature-2 + master 9b75494 [origin/master] Initialize repository. +''' + +GIT_BRANCH_OUTPUT_DETACHED_HASH = ''' +* (HEAD detached at 36418b4) 36418b4 Work on feature-2 + feature-2 36418b4 [origin/feature-2] Work on feature-2 + feature3 36418b4 Work on feature-2 + master 9b75494 [origin/master] Initialize repository. +''' + +GIT_BRANCH_OUTPUT_DETACHED_TAG = ''' +* (HEAD detached at tag1) 9b75494 Initialize repository. + feature-2 36418b4 [origin/feature-2] Work on feature-2 + feature3 36418b4 Work on feature-2 + master 9b75494 [origin/master] Initialize repository. +''' + +GIT_BRANCH_OUTPUT_UNTRACKED_BRANCH = ''' + feature-2 36418b4 [origin/feature-2] Work on feature-2 +* feature3 36418b4 Work on feature-2 + master 9b75494 [origin/master] Initialize repository. +''' + +GIT_BRANCH_OUTPUT_TRACKING_BRANCH = ''' +* feature-2 36418b4 [origin/feature-2] Work on feature-2 + feature3 36418b4 Work on feature-2 + master 9b75494 [origin/master] Initialize repository. +''' + +# NOTE(bja, 2017-11) order is important here. origin should be a +# subset of other to trap errors on processing remotes! +GIT_REMOTE_OUTPUT_ORIGIN_UPSTREAM = ''' +upstream /path/to/other/repo (fetch) +upstream /path/to/other/repo (push) +other /path/to/local/repo2 (fetch) +other /path/to/local/repo2 (push) +origin /path/to/local/repo (fetch) +origin /path/to/local/repo (push) +''' + + +class TestGitRepositoryCurrentRefBranch(unittest.TestCase): + """test the current_ref_from_branch_command on a git repository + """ + + def setUp(self): + self._name = 'component' + rdata = {ExternalsDescription.PROTOCOL: 'git', + ExternalsDescription.REPO_URL: + '/path/to/local/repo', + ExternalsDescription.TAG: + 'tag1', + ExternalsDescription.BRANCH: EMPTY_STR + } + + data = {self._name: + { + ExternalsDescription.REQUIRED: False, + ExternalsDescription.PATH: 'junk', + ExternalsDescription.EXTERNALS: EMPTY_STR, + ExternalsDescription.REPO: rdata, + }, + } + + model = ExternalsDescriptionDict(data) + repo = model[self._name][ExternalsDescription.REPO] + self._repo = GitRepository('test', repo) + + def test_ref_detached_from_tag(self): + """Test that we correctly identify that the ref is detached from a tag + """ + git_output = GIT_BRANCH_OUTPUT_DETACHED_TAG + expected = self._repo.tag() + result = self._repo._current_ref_from_branch_command( + git_output) + self.assertEqual(result, expected) + + def test_ref_detached_hash(self): + """Test that we can identify ref is detached from a hash + + """ + git_output = GIT_BRANCH_OUTPUT_DETACHED_HASH + expected = '36418b4' + result = self._repo._current_ref_from_branch_command( + git_output) + self.assertEqual(result, expected) + + def test_ref_detached_branch(self): + """Test that we can identify ref is detached from a remote branch + + """ + git_output = GIT_BRANCH_OUTPUT_DETACHED_BRANCH + expected = 'origin/feature-2' + result = self._repo._current_ref_from_branch_command( + git_output) + self.assertEqual(result, expected) + + def test_ref_detached_branch_v1_8(self): + """Test that we can identify ref is detached from a remote branch + + """ + git_output = GIT_BRANCH_OUTPUT_DETACHED_BRANCH_v1_8 + expected = 'origin/feature2' + result = self._repo._current_ref_from_branch_command( + git_output) + self.assertEqual(result, expected) + + def test_ref_tracking_branch(self): + """Test that we correctly identify we are on a tracking branch + """ + git_output = GIT_BRANCH_OUTPUT_TRACKING_BRANCH + expected = 'origin/feature-2' + result = self._repo._current_ref_from_branch_command( + git_output) + self.assertEqual(result, expected) + + def test_ref_untracked_branch(self): + """Test that we correctly identify we are on an untracked branch + """ + git_output = GIT_BRANCH_OUTPUT_UNTRACKED_BRANCH + expected = 'feature3' + result = self._repo._current_ref_from_branch_command( + git_output) + self.assertEqual(result, expected) + + def test_ref_none(self): + """Test that we can handle an empty string for output, e.g. not an git + repo. + + """ + git_output = EMPTY_STR + received = self._repo._current_ref_from_branch_command( + git_output) + self.assertEqual(received, EMPTY_STR) + + +class TestGitRepositoryCheckSync(unittest.TestCase): + """Test whether the GitRepository _check_sync_logic functionality is + correct. + + Note: there are a lot of combinations of state: + + - external description - tag, branch + + - working copy + - doesn't exist (not checked out) + - exists, no git info - incorrect protocol, e.g. svn, or tarball? + - exists, git info + - as expected: + - different from expected: + - detached tag, + - detached hash, + - detached branch (compare remote and branch), + - tracking branch (compare remote and branch), + - same remote + - different remote + - untracked branch + + Test list: + - doesn't exist + - exists no git info + + - num_external * (working copy expected + num_working copy different) + - total tests = 16 + + """ + + # NOTE(bja, 2017-11) pylint complains about long method names, but + # it is hard to differentiate tests without making them more + # cryptic. Also complains about too many public methods, but it + # doesn't really make sense to break this up. + # pylint: disable=invalid-name,too-many-public-methods + + TMP_FAKE_DIR = 'fake' + TMP_FAKE_GIT_DIR = os.path.join(TMP_FAKE_DIR, '.git') + + def setUp(self): + """Setup reusable git repository object + """ + self._name = 'component' + rdata = {ExternalsDescription.PROTOCOL: 'git', + ExternalsDescription.REPO_URL: + '/path/to/local/repo', + ExternalsDescription.TAG: 'tag1', + ExternalsDescription.BRANCH: EMPTY_STR + } + + data = {self._name: + { + ExternalsDescription.REQUIRED: False, + ExternalsDescription.PATH: self.TMP_FAKE_DIR, + ExternalsDescription.EXTERNALS: EMPTY_STR, + ExternalsDescription.REPO: rdata, + }, + } + + model = ExternalsDescriptionDict(data) + repo = model[self._name][ExternalsDescription.REPO] + self._repo = GitRepository('test', repo) + self._create_tmp_git_dir() + + def tearDown(self): + """Cleanup tmp stuff on the file system + """ + self._remove_tmp_git_dir() + + def _create_tmp_git_dir(self): + """Create a temporary fake git directory for testing purposes. + """ + if not os.path.exists(self.TMP_FAKE_GIT_DIR): + os.makedirs(self.TMP_FAKE_GIT_DIR) + + def _remove_tmp_git_dir(self): + """Remove the temporary fake git directory + """ + if os.path.exists(self.TMP_FAKE_DIR): + shutil.rmtree(self.TMP_FAKE_DIR) + + # + # mock methods replacing git system calls + # + @staticmethod + def _git_branch_empty(): + """Return an empty info string. Simulates git info failing. + """ + return EMPTY_STR + + @staticmethod + def _git_branch_detached_tag(): + """Return an info sting that is a checkouted tag + """ + return GIT_BRANCH_OUTPUT_DETACHED_TAG + + @staticmethod + def _git_branch_detached_hash(): + """Return an info string that is a checkout hash + """ + return GIT_BRANCH_OUTPUT_DETACHED_HASH + + @staticmethod + def _git_branch_detached_branch(): + """Return an info string that is a checkout hash + """ + return GIT_BRANCH_OUTPUT_DETACHED_BRANCH + + @staticmethod + def _git_branch_untracked_branch(): + """Return an info string that is a checkout branch + """ + return GIT_BRANCH_OUTPUT_UNTRACKED_BRANCH + + @staticmethod + def _git_branch_tracked_branch(): + """Return an info string that is a checkout branch + """ + return GIT_BRANCH_OUTPUT_TRACKING_BRANCH + + @staticmethod + def _git_remote_origin_upstream(): + """Return an info string that is a checkout hash + """ + return GIT_REMOTE_OUTPUT_ORIGIN_UPSTREAM + + @staticmethod + def _git_remote_none(): + """Return an info string that is a checkout hash + """ + return EMPTY_STR + + # ---------------------------------------------------------------- + # + # Tests where working copy doesn't exist or is invalid + # + # ---------------------------------------------------------------- + def test_sync_dir_not_exist(self): + """Test that a directory that doesn't exist returns an error status + + Note: the Repository classes should be prevented from ever + working on an empty directory by the _Source object. + + """ + stat = ExternalStatus() + self._repo._check_sync(stat, 'invalid_directory_name') + self.assertEqual(stat.sync_state, ExternalStatus.STATUS_ERROR) + # check_dir should only modify the sync_state, not clean_state + self.assertEqual(stat.clean_state, ExternalStatus.DEFAULT) + + def test_sync_dir_exist_no_git_info(self): + """Test that an empty info string returns an unknown status + """ + stat = ExternalStatus() + # Now we over-ride the _git_branch method on the repo to return + # a known value without requiring access to git. + self._repo._git_remote_verbose = self._git_remote_origin_upstream + self._repo._git_branch_vv = self._git_branch_empty + self._repo._check_sync(stat, self.TMP_FAKE_DIR) + self.assertEqual(stat.sync_state, ExternalStatus.UNKNOWN) + # check_sync should only modify the sync_state, not clean_state + self.assertEqual(stat.clean_state, ExternalStatus.DEFAULT) + + # ---------------------------------------------------------------- + # + # Tests where external description specifies a tag + # + # Perturbations of working dir state: on detached + # {tag|branch|hash}, tracking branch, untracked branch. + # + # ---------------------------------------------------------------- + def test_sync_tag_on_detached_tag(self): + """Test expect tag on detached tag --> status ok + + """ + stat = ExternalStatus() + self._repo._git_remote_verbose = self._git_remote_origin_upstream + self._repo._branch = '' + self._repo._tag = 'tag1' + self._repo._git_branch_vv = self._git_branch_detached_tag + self._repo._check_sync_logic(stat, self.TMP_FAKE_DIR) + self.assertEqual(stat.sync_state, ExternalStatus.STATUS_OK) + # check_sync should only modify the sync_state, not clean_state + self.assertEqual(stat.clean_state, ExternalStatus.DEFAULT) + + def test_sync_tag_on_diff_tag(self): + """Test expect tag on diff tag --> status modified + + """ + stat = ExternalStatus() + self._repo._git_remote_verbose = self._git_remote_origin_upstream + self._repo._branch = '' + self._repo._tag = 'tag2' + self._repo._git_branch_vv = self._git_branch_detached_tag + self._repo._check_sync_logic(stat, self.TMP_FAKE_DIR) + self.assertEqual(stat.sync_state, ExternalStatus.MODEL_MODIFIED) + # check_sync should only modify the sync_state, not clean_state + self.assertEqual(stat.clean_state, ExternalStatus.DEFAULT) + + def test_sync_tag_on_detached_hash(self): + """Test expect tag on detached hash --> status modified + + """ + stat = ExternalStatus() + self._repo._git_remote_verbose = self._git_remote_origin_upstream + self._repo._branch = '' + self._repo._tag = 'tag1' + self._repo._git_branch_vv = self._git_branch_detached_hash + self._repo._check_sync_logic(stat, self.TMP_FAKE_DIR) + self.assertEqual(stat.sync_state, ExternalStatus.MODEL_MODIFIED) + # check_sync should only modify the sync_state, not clean_state + self.assertEqual(stat.clean_state, ExternalStatus.DEFAULT) + + def test_sync_tag_on_detached_branch(self): + """Test expect tag on detached branch --> status modified + + """ + stat = ExternalStatus() + self._repo._git_remote_verbose = self._git_remote_origin_upstream + self._repo._branch = '' + self._repo._tag = 'tag1' + self._repo._git_branch_vv = self._git_branch_detached_branch + self._repo._check_sync_logic(stat, self.TMP_FAKE_DIR) + self.assertEqual(stat.sync_state, ExternalStatus.MODEL_MODIFIED) + # check_sync should only modify the sync_state, not clean_state + self.assertEqual(stat.clean_state, ExternalStatus.DEFAULT) + + def test_sync_tag_on_tracking_branch(self): + """Test expect tag on tracking branch --> status modified + + """ + stat = ExternalStatus() + self._repo._git_remote_verbose = self._git_remote_origin_upstream + self._repo._branch = '' + self._repo._tag = 'tag1' + self._repo._git_branch_vv = self._git_branch_tracked_branch + self._repo._check_sync_logic(stat, self.TMP_FAKE_DIR) + self.assertEqual(stat.sync_state, ExternalStatus.MODEL_MODIFIED) + # check_sync should only modify the sync_state, not clean_state + self.assertEqual(stat.clean_state, ExternalStatus.DEFAULT) + + def test_sync_tag_on_untracked_branch(self): + """Test expect tag on untracked branch --> status modified + + """ + stat = ExternalStatus() + self._repo._git_remote_verbose = self._git_remote_origin_upstream + self._repo._branch = '' + self._repo._tag = 'tag1' + self._repo._git_branch_vv = self._git_branch_untracked_branch + self._repo._check_sync_logic(stat, self.TMP_FAKE_DIR) + self.assertEqual(stat.sync_state, ExternalStatus.MODEL_MODIFIED) + # check_sync should only modify the sync_state, not clean_state + self.assertEqual(stat.clean_state, ExternalStatus.DEFAULT) + + # ---------------------------------------------------------------- + # + # Tests where external description specifies a branch + # + # Perturbations of working dir state: on detached + # {tag|branch|hash}, tracking branch, untracked branch. + # + # ---------------------------------------------------------------- + def test_sync_branch_on_detached_branch_same_remote(self): + """Test expect branch on detached branch with same remote --> status ok + + """ + stat = ExternalStatus() + self._repo._git_remote_verbose = self._git_remote_origin_upstream + self._repo._branch = 'feature-2' + self._repo._tag = '' + self._repo._git_branch_vv = self._git_branch_detached_branch + self._repo._check_sync_logic(stat, self.TMP_FAKE_DIR) + self.assertEqual(stat.sync_state, ExternalStatus.STATUS_OK) + # check_sync should only modify the sync_state, not clean_state + self.assertEqual(stat.clean_state, ExternalStatus.DEFAULT) + + def test_sync_branch_on_detached_branch_diff_remote(self): + """Test expect branch on detached branch, different remote --> status modified + + """ + stat = ExternalStatus() + self._repo._git_remote_verbose = self._git_remote_origin_upstream + self._repo._branch = 'feature-2' + self._repo._tag = '' + self._repo._url = '/path/to/other/repo' + self._repo._git_branch_vv = self._git_branch_detached_branch + self._repo._check_sync_logic(stat, self.TMP_FAKE_DIR) + self.assertEqual(stat.sync_state, ExternalStatus.MODEL_MODIFIED) + # check_sync should only modify the sync_state, not clean_state + self.assertEqual(stat.clean_state, ExternalStatus.DEFAULT) + + def test_sync_branch_on_detached_branch_diff_remote2(self): + """Test expect branch on detached branch, different remote --> status modified + + """ + stat = ExternalStatus() + self._repo._git_remote_verbose = self._git_remote_origin_upstream + self._repo._branch = 'feature-2' + self._repo._tag = '' + self._repo._url = '/path/to/local/repo2' + self._repo._git_branch_vv = self._git_branch_detached_branch + self._repo._check_sync_logic(stat, self.TMP_FAKE_DIR) + self.assertEqual(stat.sync_state, ExternalStatus.MODEL_MODIFIED) + # check_sync should only modify the sync_state, not clean_state + self.assertEqual(stat.clean_state, ExternalStatus.DEFAULT) + + def test_sync_branch_on_diff_branch(self): + """Test expect branch on diff branch --> status modified + + """ + stat = ExternalStatus() + self._repo._git_remote_verbose = self._git_remote_origin_upstream + self._repo._branch = 'nice_new_feature' + self._repo._tag = '' + self._repo._git_branch_vv = self._git_branch_detached_branch + self._repo._check_sync_logic(stat, self.TMP_FAKE_DIR) + self.assertEqual(stat.sync_state, ExternalStatus.MODEL_MODIFIED) + # check_sync should only modify the sync_state, not clean_state + self.assertEqual(stat.clean_state, ExternalStatus.DEFAULT) + + def test_sync_branch_on_detached_hash(self): + """Test expect branch on detached hash --> status modified + + """ + stat = ExternalStatus() + self._repo._git_remote_verbose = self._git_remote_origin_upstream + self._repo._branch = 'feature-2' + self._repo._tag = '' + self._repo._git_branch_vv = self._git_branch_detached_hash + self._repo._check_sync_logic(stat, self.TMP_FAKE_DIR) + self.assertEqual(stat.sync_state, ExternalStatus.MODEL_MODIFIED) + # check_sync should only modify the sync_state, not clean_state + self.assertEqual(stat.clean_state, ExternalStatus.DEFAULT) + + def test_sync_branch_on_detached_tag(self): + """Test expect branch on detached tag --> status modified + + """ + stat = ExternalStatus() + self._repo._git_remote_verbose = self._git_remote_origin_upstream + self._repo._branch = 'feature-2' + self._repo._tag = '' + self._repo._git_branch_vv = self._git_branch_detached_tag + self._repo._check_sync_logic(stat, self.TMP_FAKE_DIR) + self.assertEqual(stat.sync_state, ExternalStatus.MODEL_MODIFIED) + # check_sync should only modify the sync_state, not clean_state + self.assertEqual(stat.clean_state, ExternalStatus.DEFAULT) + + def test_sync_branch_on_tracking_branch_same_remote(self): + """Test expect branch on tracking branch with same remote --> status ok + + """ + stat = ExternalStatus() + self._repo._git_remote_verbose = self._git_remote_origin_upstream + self._repo._branch = 'feature-2' + self._repo._tag = '' + self._repo._git_branch_vv = self._git_branch_tracked_branch + self._repo._check_sync_logic(stat, self.TMP_FAKE_DIR) + self.assertEqual(stat.sync_state, ExternalStatus.STATUS_OK) + # check_sync should only modify the sync_state, not clean_state + self.assertEqual(stat.clean_state, ExternalStatus.DEFAULT) + + def test_sync_branch_on_tracking_branch_diff_remote(self): + """Test expect branch on tracking branch with different remote--> + status modified + + """ + stat = ExternalStatus() + self._repo._git_remote_verbose = self._git_remote_origin_upstream + self._repo._branch = 'feature-2' + self._repo._tag = '' + self._repo._url = '/path/to/other/repo' + self._repo._git_branch_vv = self._git_branch_tracked_branch + self._repo._check_sync_logic(stat, self.TMP_FAKE_DIR) + self.assertEqual(stat.sync_state, ExternalStatus.MODEL_MODIFIED) + # check_sync should only modify the sync_state, not clean_state + self.assertEqual(stat.clean_state, ExternalStatus.DEFAULT) + + def test_sync_branch_on_untracked_branch(self): + """Test expect branch on untracked branch --> status modified + + NOTE(bja, 2017-11) the externals description url is always a + remote repository. A local untracked branch only exists + locally, therefore it is always a modified state, even if this + is what the user wants. + + """ + stat = ExternalStatus() + self._repo._git_remote_verbose = self._git_remote_origin_upstream + self._repo._branch = 'feature-2' + self._repo._tag = '' + self._repo._git_branch_vv = self._git_branch_untracked_branch + self._repo._check_sync_logic(stat, self.TMP_FAKE_DIR) + self.assertEqual(stat.sync_state, ExternalStatus.MODEL_MODIFIED) + # check_sync should only modify the sync_state, not clean_state + self.assertEqual(stat.clean_state, ExternalStatus.DEFAULT) + + def test_sync_branch_on_unknown_remote(self): + """Test expect branch, but remote is unknown --> status modified + + """ + stat = ExternalStatus() + self._repo._git_remote_verbose = self._git_remote_origin_upstream + self._repo._branch = 'feature-2' + self._repo._tag = '' + self._repo._url = '/path/to/unknown/repo' + self._repo._git_branch_vv = self._git_branch_untracked_branch + self._repo._check_sync_logic(stat, self.TMP_FAKE_DIR) + self.assertEqual(stat.sync_state, ExternalStatus.MODEL_MODIFIED) + # check_sync should only modify the sync_state, not clean_state + self.assertEqual(stat.clean_state, ExternalStatus.DEFAULT) + + def test_sync_branch_on_untracked_local(self): + """Test expect branch, on untracked branch in local repo --> status ok + + Setting the externals description to '.' indicates that the + user only want's to consider the current local repo state + without fetching from remotes. This is required to preserve + the current branch of a repository during an update. + + NOTE(bja, 2017-11) the externals description is always a + remote repository. A local untracked branch only exists + locally, therefore it is always a modified state, even if this + is what the user wants. + + """ + stat = ExternalStatus() + self._repo._git_remote_verbose = self._git_remote_origin_upstream + self._repo._branch = 'feature3' + self._repo._tag = '' + self._repo._git_branch_vv = self._git_branch_untracked_branch + self._repo._url = '.' + self._repo._check_sync_logic(stat, self.TMP_FAKE_DIR) + self.assertEqual(stat.sync_state, ExternalStatus.STATUS_OK) + # check_sync should only modify the sync_state, not clean_state + self.assertEqual(stat.clean_state, ExternalStatus.DEFAULT) + + +class TestGitRegExp(unittest.TestCase): + """Test that the regular expressions in the GitRepository class + capture intended strings + + """ + + def setUp(self): + """Common constans + """ + self._detached_git_v2_tmpl = string.Template( + '* (HEAD detached at $ref) 36418b4 Work on feature-2') + + self._detached_git_v1_tmpl = string.Template( + '* (detached from $ref) 36418b4 Work on feature-2') + + self._tracking_tmpl = string.Template( + '* feature-2 36418b4 [$ref] Work on feature-2') + + # + # RE_DETACHED + # + def test_re_detached_alphnum(self): + """Test re correctly matches alphnumeric (basic debugging) + """ + value = 'feature2' + input_str = self._detached_git_v2_tmpl.substitute(ref=value) + match = GitRepository.RE_DETACHED.search(input_str) + self.assertIsNotNone(match) + self.assertEqual(match.group(1), value) + input_str = self._detached_git_v1_tmpl.substitute(ref=value) + match = GitRepository.RE_DETACHED.search(input_str) + self.assertIsNotNone(match) + self.assertEqual(match.group(1), value) + + def test_re_detached_underscore(self): + """Test re matches with underscore + """ + value = 'feature_2' + input_str = self._detached_git_v2_tmpl.substitute(ref=value) + match = GitRepository.RE_DETACHED.search(input_str) + self.assertIsNotNone(match) + self.assertEqual(match.group(1), value) + input_str = self._detached_git_v1_tmpl.substitute(ref=value) + match = GitRepository.RE_DETACHED.search(input_str) + self.assertIsNotNone(match) + self.assertEqual(match.group(1), value) + + def test_re_detached_hyphen(self): + """Test re matches - + """ + value = 'feature-2' + input_str = self._detached_git_v2_tmpl.substitute(ref=value) + match = GitRepository.RE_DETACHED.search(input_str) + self.assertIsNotNone(match) + self.assertEqual(match.group(1), value) + input_str = self._detached_git_v1_tmpl.substitute(ref=value) + match = GitRepository.RE_DETACHED.search(input_str) + self.assertIsNotNone(match) + self.assertEqual(match.group(1), value) + + def test_re_detached_period(self): + """Test re matches . + """ + value = 'feature.2' + input_str = self._detached_git_v2_tmpl.substitute(ref=value) + match = GitRepository.RE_DETACHED.search(input_str) + self.assertIsNotNone(match) + self.assertEqual(match.group(1), value) + input_str = self._detached_git_v1_tmpl.substitute(ref=value) + match = GitRepository.RE_DETACHED.search(input_str) + self.assertIsNotNone(match) + self.assertEqual(match.group(1), value) + + def test_re_detached_slash(self): + """Test re matches / + """ + value = 'feature/2' + input_str = self._detached_git_v2_tmpl.substitute(ref=value) + match = GitRepository.RE_DETACHED.search(input_str) + self.assertIsNotNone(match) + self.assertEqual(match.group(1), value) + input_str = self._detached_git_v1_tmpl.substitute(ref=value) + match = GitRepository.RE_DETACHED.search(input_str) + self.assertIsNotNone(match) + self.assertEqual(match.group(1), value) + + # + # RE_TRACKING + # + def test_re_tracking_alphnum(self): + """Test re matches alphanumeric for basic debugging + """ + value = 'feature2' + input_str = self._tracking_tmpl.substitute(ref=value) + match = GitRepository.RE_TRACKING.search(input_str) + self.assertIsNotNone(match) + self.assertEqual(match.group(1), value) + + def test_re_tracking_underscore(self): + """Test re matches _ + """ + value = 'feature_2' + input_str = self._tracking_tmpl.substitute(ref=value) + match = GitRepository.RE_TRACKING.search(input_str) + self.assertIsNotNone(match) + self.assertEqual(match.group(1), value) + + def test_re_tracking_hyphen(self): + """Test re matches - + """ + value = 'feature-2' + input_str = self._tracking_tmpl.substitute(ref=value) + match = GitRepository.RE_TRACKING.search(input_str) + self.assertIsNotNone(match) + self.assertEqual(match.group(1), value) + + def test_re_tracking_period(self): + """Test re match . + """ + value = 'feature.2' + input_str = self._tracking_tmpl.substitute(ref=value) + match = GitRepository.RE_TRACKING.search(input_str) + self.assertIsNotNone(match) + self.assertEqual(match.group(1), value) + + def test_re_tracking_slash(self): + """Test re matches / + """ + value = 'feature/2' + input_str = self._tracking_tmpl.substitute(ref=value) + match = GitRepository.RE_TRACKING.search(input_str) + self.assertIsNotNone(match) + self.assertEqual(match.group(1), value) + + +class TestGitStatusPorcelain(unittest.TestCase): + """Test parsing of output from git status --porcelain=v1 -z + """ + GIT_STATUS_PORCELAIN_V1_ALL = ( + r' D INSTALL\0MM Makefile\0M README.md\0R cmakelists.txt\0' + r'CMakeLists.txt\0D commit-message-template.txt\0A stuff.txt\0' + r'?? junk.txt') + + GIT_STATUS_PORCELAIN_CLEAN = r'' + + def test_porcelain_status_dirty(self): + """Verify that git status output is considered dirty when there are + listed files. + + """ + git_output = self.GIT_STATUS_PORCELAIN_V1_ALL + is_dirty = GitRepository._status_v1z_is_dirty(git_output) + self.assertTrue(is_dirty) + + def test_porcelain_status_clean(self): + """Verify that git status output is considered clean when there are no + listed files. + + """ + git_output = self.GIT_STATUS_PORCELAIN_CLEAN + is_dirty = GitRepository._status_v1z_is_dirty(git_output) + self.assertFalse(is_dirty) + + +class TestGitCreateRemoteName(unittest.TestCase): + """Test the create_remote_name method on the GitRepository class + """ + + def setUp(self): + """Common infrastructure for testing _create_remote_name + """ + self._rdata = {ExternalsDescription.PROTOCOL: 'git', + ExternalsDescription.REPO_URL: + 'empty', + ExternalsDescription.TAG: + 'very_useful_tag', + ExternalsDescription.BRANCH: EMPTY_STR, } + self._repo = GitRepository('test', self._rdata) + + def test_remote_git_proto(self): + """Test remote with git protocol + """ + self._repo._url = 'git@git.github.com:very_nice_org/useful_repo' + remote_name = self._repo._create_remote_name() + self.assertEqual(remote_name, 'very_nice_org_useful_repo') + + def test_remote_https_proto(self): + """Test remote with git protocol + """ + self._repo._url = 'https://www.github.com/very_nice_org/useful_repo' + remote_name = self._repo._create_remote_name() + self.assertEqual(remote_name, 'very_nice_org_useful_repo') + + def test_remote_local_abs(self): + """Test remote with git protocol + """ + self._repo._url = '/path/to/local/repositories/useful_repo' + remote_name = self._repo._create_remote_name() + self.assertEqual(remote_name, 'repositories_useful_repo') + + def test_remote_local_rel(self): + """Test remote with git protocol + """ + os.environ['TEST_VAR'] = '/my/path/to/repos' + self._repo._url = '${TEST_VAR}/../../useful_repo' + remote_name = self._repo._create_remote_name() + self.assertEqual(remote_name, 'path_useful_repo') + del os.environ['TEST_VAR'] + + +class TestVerifyTag(unittest.TestCase): + """Test logic verifying that a tag exists and is unique + + """ + + def setUp(self): + """Setup reusable git repository object + """ + self._name = 'component' + rdata = {ExternalsDescription.PROTOCOL: 'git', + ExternalsDescription.REPO_URL: + '/path/to/local/repo', + ExternalsDescription.TAG: 'tag1', + ExternalsDescription.BRANCH: EMPTY_STR + } + + data = {self._name: + { + ExternalsDescription.REQUIRED: False, + ExternalsDescription.PATH: 'tmp', + ExternalsDescription.EXTERNALS: EMPTY_STR, + ExternalsDescription.REPO: rdata, + }, + } + + model = ExternalsDescriptionDict(data) + repo = model[self._name][ExternalsDescription.REPO] + self._repo = GitRepository('test', repo) + + @staticmethod + def _shell_true(url, remote=None): + _ = url + _ = remote + return 0 + + @staticmethod + def _shell_false(url, remote=None): + _ = url + _ = remote + return 1 + + def test_tag_not_tag_branch_commit(self): + """Verify a non-tag returns false + """ + self._repo._git_showref_tag = self._shell_false + self._repo._git_showref_branch = self._shell_false + self._repo._git_lsremote_branch = self._shell_false + self._repo._git_revparse_commit = self._shell_false + self._repo._tag = 'something' + remote_name = 'origin' + received, _ = self._repo._is_unique_tag(self._repo._tag, remote_name) + self.assertFalse(received) + + def test_tag_not_tag(self): + """Verify a non-tag, untracked remote returns false + """ + self._repo._git_showref_tag = self._shell_false + self._repo._git_showref_branch = self._shell_true + self._repo._git_lsremote_branch = self._shell_true + self._repo._git_revparse_commit = self._shell_false + self._repo._tag = 'tag1' + remote_name = 'origin' + received, _ = self._repo._is_unique_tag(self._repo._tag, remote_name) + self.assertFalse(received) + + def test_tag_indeterminant(self): + """Verify an indeterminant tag/branch returns false + """ + self._repo._git_showref_tag = self._shell_true + self._repo._git_showref_branch = self._shell_true + self._repo._git_lsremote_branch = self._shell_true + self._repo._git_revparse_commit = self._shell_true + self._repo._tag = 'something' + remote_name = 'origin' + received, _ = self._repo._is_unique_tag(self._repo._tag, remote_name) + self.assertFalse(received) + + def test_tag_is_unique(self): + """Verify a unique tag match returns true + """ + self._repo._git_showref_tag = self._shell_true + self._repo._git_showref_branch = self._shell_false + self._repo._git_lsremote_branch = self._shell_false + self._repo._git_revparse_commit = self._shell_true + self._repo._tag = 'tag1' + remote_name = 'origin' + received, _ = self._repo._is_unique_tag(self._repo._tag, remote_name) + self.assertTrue(received) + + def test_tag_is_commit(self): + """Verify a commit hash + """ + self._repo._git_showref_tag = self._shell_false + self._repo._git_showref_branch = self._shell_false + self._repo._git_lsremote_branch = self._shell_false + self._repo._git_revparse_commit = self._shell_true + self._repo._tag = '97ebc0e0' + remote_name = 'origin' + received, _ = self._repo._is_unique_tag(self._repo._tag, remote_name) + self.assertTrue(received) + + +class TestValidRef(unittest.TestCase): + """Test logic verifying that a reference is a valid tag, branch or sha1 + + """ + + def setUp(self): + """Setup reusable git repository object + """ + self._name = 'component' + rdata = {ExternalsDescription.PROTOCOL: 'git', + ExternalsDescription.REPO_URL: + '/path/to/local/repo', + ExternalsDescription.TAG: 'tag1', + ExternalsDescription.BRANCH: EMPTY_STR + } + + data = {self._name: + { + ExternalsDescription.REQUIRED: False, + ExternalsDescription.PATH: 'tmp', + ExternalsDescription.EXTERNALS: EMPTY_STR, + ExternalsDescription.REPO: rdata, + }, + } + + model = ExternalsDescriptionDict(data) + repo = model[self._name][ExternalsDescription.REPO] + self._repo = GitRepository('test', repo) + + @staticmethod + def _shell_true(url, remote=None): + _ = url + _ = remote + return 0 + + @staticmethod + def _shell_false(url, remote=None): + _ = url + _ = remote + return 1 + + def test_valid_ref_is_invalid(self): + """Verify an invalid reference raises an exception + """ + self._repo._git_showref_tag = self._shell_false + self._repo._git_showref_branch = self._shell_false + self._repo._git_lsremote_branch = self._shell_false + self._repo._git_revparse_commit = self._shell_false + self._repo._tag = 'invalid_ref' + with self.assertRaises(RuntimeError): + self._repo._check_for_valid_ref(self._repo._tag) + + def test_valid_tag(self): + """Verify a valid tag return true + """ + self._repo._git_showref_tag = self._shell_true + self._repo._git_showref_branch = self._shell_false + self._repo._git_lsremote_branch = self._shell_false + self._repo._git_revparse_commit = self._shell_true + self._repo._tag = 'tag1' + received = self._repo._check_for_valid_ref(self._repo._tag) + self.assertTrue(received) + + def test_valid_branch(self): + """Verify a valid tag return true + """ + self._repo._git_showref_tag = self._shell_false + self._repo._git_showref_branch = self._shell_true + self._repo._git_lsremote_branch = self._shell_false + self._repo._git_revparse_commit = self._shell_true + self._repo._tag = 'tag1' + received = self._repo._check_for_valid_ref(self._repo._tag) + self.assertTrue(received) + + def test_valid_hash(self): + """Verify a valid tag return true + """ + self._repo._git_showref_tag = self._shell_false + self._repo._git_showref_branch = self._shell_false + self._repo._git_lsremote_branch = self._shell_false + self._repo._git_revparse_commit = self._shell_true + self._repo._tag = '56cc0b5394' + received = self._repo._check_for_valid_ref(self._repo._tag) + self.assertTrue(received) + + +if __name__ == '__main__': + unittest.main() diff --git a/test/test_unit_repository_svn.py b/test/test_unit_repository_svn.py new file mode 100644 index 0000000000..19616d42d4 --- /dev/null +++ b/test/test_unit_repository_svn.py @@ -0,0 +1,489 @@ +#!/usr/bin/env python + +"""Unit test driver for checkout_externals + +Note: this script assume the path to the checkout_externals.py module is +already in the python path. + +""" + +from __future__ import absolute_import +from __future__ import unicode_literals +from __future__ import print_function + +import unittest + +from manic.repository_svn import SvnRepository +from manic.externals_status import ExternalStatus +from manic.externals_description import ExternalsDescription +from manic.externals_description import ExternalsDescriptionDict +from manic.global_constants import EMPTY_STR + +# pylint: disable=W0212 + +SVN_INFO_MOSART = """Path: components/mosart +Working Copy Root Path: /Users/andreb/projects/ncar/git-conversion/clm-dev-experimental/components/mosart +URL: https://svn-ccsm-models.cgd.ucar.edu/mosart/trunk_tags/mosart1_0_26 +Relative URL: ^/mosart/trunk_tags/mosart1_0_26 +Repository Root: https://svn-ccsm-models.cgd.ucar.edu +Repository UUID: fe37f545-8307-0410-aea5-b40df96820b5 +Revision: 86711 +Node Kind: directory +Schedule: normal +Last Changed Author: erik +Last Changed Rev: 86031 +Last Changed Date: 2017-07-07 12:28:10 -0600 (Fri, 07 Jul 2017) +""" +SVN_INFO_CISM = """ +Path: components/cism +Working Copy Root Path: /Users/andreb/projects/ncar/git-conversion/clm-dev-experimental/components/cism +URL: https://svn-ccsm-models.cgd.ucar.edu/glc/trunk_tags/cism2_1_37 +Relative URL: ^/glc/trunk_tags/cism2_1_37 +Repository Root: https://svn-ccsm-models.cgd.ucar.edu +Repository UUID: fe37f545-8307-0410-aea5-b40df96820b5 +Revision: 86711 +Node Kind: directory +Schedule: normal +Last Changed Author: sacks +Last Changed Rev: 85704 +Last Changed Date: 2017-06-15 05:59:28 -0600 (Thu, 15 Jun 2017) +""" + + +class TestSvnRepositoryCheckURL(unittest.TestCase): + """Verify that the svn_check_url function is working as expected. + """ + + def setUp(self): + """Setup reusable svn repository object + """ + self._name = 'component' + rdata = {ExternalsDescription.PROTOCOL: 'svn', + ExternalsDescription.REPO_URL: + 'https://svn-ccsm-models.cgd.ucar.edu/', + ExternalsDescription.TAG: + 'mosart/trunk_tags/mosart1_0_26', + ExternalsDescription.BRANCH: '' + } + + data = {self._name: + { + ExternalsDescription.REQUIRED: False, + ExternalsDescription.PATH: 'junk', + ExternalsDescription.EXTERNALS: '', + ExternalsDescription.REPO: rdata, + }, + } + + model = ExternalsDescriptionDict(data) + repo = model[self._name][ExternalsDescription.REPO] + self._repo = SvnRepository('test', repo) + + def test_check_url_same(self): + """Test that we correctly identify that the correct URL. + """ + svn_output = SVN_INFO_MOSART + expected_url = self._repo.url() + result = self._repo._check_url(svn_output, expected_url) + self.assertEqual(result, ExternalStatus.STATUS_OK) + + def test_check_url_different(self): + """Test that we correctly reject an incorrect URL. + """ + svn_output = SVN_INFO_CISM + expected_url = self._repo.url() + result = self._repo._check_url(svn_output, expected_url) + self.assertEqual(result, ExternalStatus.MODEL_MODIFIED) + + def test_check_url_none(self): + """Test that we can handle an empty string for output, e.g. not an svn + repo. + + """ + svn_output = EMPTY_STR + expected_url = self._repo.url() + result = self._repo._check_url(svn_output, expected_url) + self.assertEqual(result, ExternalStatus.UNKNOWN) + + +class TestSvnRepositoryCheckSync(unittest.TestCase): + """Test whether the SvnRepository svn_check_sync functionality is + correct. + + """ + + def setUp(self): + """Setup reusable svn repository object + """ + self._name = "component" + rdata = {ExternalsDescription.PROTOCOL: 'svn', + ExternalsDescription.REPO_URL: + 'https://svn-ccsm-models.cgd.ucar.edu/', + ExternalsDescription.TAG: + 'mosart/trunk_tags/mosart1_0_26', + ExternalsDescription.BRANCH: EMPTY_STR + } + + data = {self._name: + { + ExternalsDescription.REQUIRED: False, + ExternalsDescription.PATH: 'junk', + ExternalsDescription.EXTERNALS: EMPTY_STR, + ExternalsDescription.REPO: rdata, + }, + } + + model = ExternalsDescriptionDict(data) + repo = model[self._name][ExternalsDescription.REPO] + self._repo = SvnRepository('test', repo) + + @staticmethod + def _svn_info_empty(*_): + """Return an empty info string. Simulates svn info failing. + """ + return '' + + @staticmethod + def _svn_info_synced(*_): + """Return an info sting that is synced with the setUp data + """ + return SVN_INFO_MOSART + + @staticmethod + def _svn_info_modified(*_): + """Return and info string that is modified from the setUp data + """ + return SVN_INFO_CISM + + def test_repo_dir_not_exist(self): + """Test that a directory that doesn't exist returns an error status + + Note: the Repository classes should be prevented from ever + working on an empty directory by the _Source object. + + """ + stat = ExternalStatus() + self._repo._check_sync(stat, 'junk') + self.assertEqual(stat.sync_state, ExternalStatus.STATUS_ERROR) + # check_dir should only modify the sync_state, not clean_state + self.assertEqual(stat.clean_state, ExternalStatus.DEFAULT) + + def test_repo_dir_exist_no_svn_info(self): + """Test that an empty info string returns an unknown status + """ + stat = ExternalStatus() + # Now we over-ride the _svn_info method on the repo to return + # a known value without requiring access to svn. + self._repo._svn_info = self._svn_info_empty + self._repo._check_sync(stat, '.') + self.assertEqual(stat.sync_state, ExternalStatus.UNKNOWN) + # check_dir should only modify the sync_state, not clean_state + self.assertEqual(stat.clean_state, ExternalStatus.DEFAULT) + + def test_repo_dir_synced(self): + """Test that a valid info string that is synced to the repo in the + externals description returns an ok status. + + """ + stat = ExternalStatus() + # Now we over-ride the _svn_info method on the repo to return + # a known value without requiring access to svn. + self._repo._svn_info = self._svn_info_synced + self._repo._check_sync(stat, '.') + self.assertEqual(stat.sync_state, ExternalStatus.STATUS_OK) + # check_dir should only modify the sync_state, not clean_state + self.assertEqual(stat.clean_state, ExternalStatus.DEFAULT) + + def test_repo_dir_modified(self): + """Test that a valid svn info string that is out of sync with the + externals description returns a modified status. + + """ + stat = ExternalStatus() + # Now we over-ride the _svn_info method on the repo to return + # a known value without requiring access to svn. + self._repo._svn_info = self._svn_info_modified + self._repo._check_sync(stat, '.') + self.assertEqual(stat.sync_state, ExternalStatus.MODEL_MODIFIED) + # check_dir should only modify the sync_state, not clean_state + self.assertEqual(stat.clean_state, ExternalStatus.DEFAULT) + + +class TestSVNStatusXML(unittest.TestCase): + """Test parsing of svn status xml output + """ + SVN_STATUS_XML_DIRTY_ALL = ''' + + + + + +sacks +2017-06-15T11:59:00.355419Z + + + + + + +sacks +2013-02-07T16:17:56.412878Z + + + + + + +sacks +2017-05-01T16:48:27.893741Z + + + + + + + + + + + + + + + + +''' + + SVN_STATUS_XML_DIRTY_MISSING = ''' + + + + + +sacks +2017-06-15T11:59:00.355419Z + + + + + + + + +''' + + SVN_STATUS_XML_DIRTY_MODIFIED = ''' + + + + + +sacks +2013-02-07T16:17:56.412878Z + + + + + + + + +''' + + SVN_STATUS_XML_DIRTY_DELETED = ''' + + + + + +sacks +2017-05-01T16:48:27.893741Z + + + + + + + + +''' + + SVN_STATUS_XML_DIRTY_UNVERSION = ''' + + + + + + + + + + + +''' + + SVN_STATUS_XML_DIRTY_ADDED = ''' + + + + + + + + + + + +''' + + SVN_STATUS_XML_CLEAN = ''' + + + + + + + +''' + + def test_xml_status_dirty_missing(self): + """Verify that svn status output is consindered dirty when there is a + missing file. + + """ + svn_output = self.SVN_STATUS_XML_DIRTY_MISSING + is_dirty = SvnRepository.xml_status_is_dirty( + svn_output) + self.assertTrue(is_dirty) + + def test_xml_status_dirty_modified(self): + """Verify that svn status output is consindered dirty when there is a + modified file. + """ + svn_output = self.SVN_STATUS_XML_DIRTY_MODIFIED + is_dirty = SvnRepository.xml_status_is_dirty( + svn_output) + self.assertTrue(is_dirty) + + def test_xml_status_dirty_deleted(self): + """Verify that svn status output is consindered dirty when there is a + deleted file. + """ + svn_output = self.SVN_STATUS_XML_DIRTY_DELETED + is_dirty = SvnRepository.xml_status_is_dirty( + svn_output) + self.assertTrue(is_dirty) + + def test_xml_status_dirty_unversion(self): + """Verify that svn status output is consindered dirty when there is a + unversioned file. + """ + svn_output = self.SVN_STATUS_XML_DIRTY_UNVERSION + is_dirty = SvnRepository.xml_status_is_dirty( + svn_output) + self.assertTrue(is_dirty) + + def test_xml_status_dirty_added(self): + """Verify that svn status output is consindered dirty when there is a + added file. + """ + svn_output = self.SVN_STATUS_XML_DIRTY_ADDED + is_dirty = SvnRepository.xml_status_is_dirty( + svn_output) + self.assertTrue(is_dirty) + + def test_xml_status_dirty_all(self): + """Verify that svn status output is consindered dirty when there are + multiple dirty files.. + + """ + svn_output = self.SVN_STATUS_XML_DIRTY_ALL + is_dirty = SvnRepository.xml_status_is_dirty( + svn_output) + self.assertTrue(is_dirty) + + def test_xml_status_dirty_clean(self): + """Verify that svn status output is consindered clean when there are + no dirty files. + + """ + svn_output = self.SVN_STATUS_XML_CLEAN + is_dirty = SvnRepository.xml_status_is_dirty( + svn_output) + self.assertFalse(is_dirty) + + +if __name__ == '__main__': + unittest.main() diff --git a/test/test_unit_utils.py b/test/test_unit_utils.py new file mode 100644 index 0000000000..53f486848c --- /dev/null +++ b/test/test_unit_utils.py @@ -0,0 +1,278 @@ +#!/usr/bin/env python + +"""Unit test driver for checkout_externals + +Note: this script assume the path to the checkout_externals.py module is +already in the python path. + +""" + +from __future__ import absolute_import +from __future__ import unicode_literals +from __future__ import print_function + +import os +import unittest + +from manic.utils import str_to_bool, execute_subprocess +from manic.utils import is_remote_url, split_remote_url, expand_local_url + + +class TestExecuteSubprocess(unittest.TestCase): + """Test the application logic of execute_subprocess wrapper + """ + + def test_exesub_return_stat_err(self): + """Test that execute_subprocess returns a status code when caller + requests and the executed subprocess fails. + + """ + cmd = ['false'] + status = execute_subprocess(cmd, status_to_caller=True) + self.assertEqual(status, 1) + + def test_exesub_return_stat_ok(self): + """Test that execute_subprocess returns a status code when caller + requests and the executed subprocess succeeds. + + """ + cmd = ['true'] + status = execute_subprocess(cmd, status_to_caller=True) + self.assertEqual(status, 0) + + def test_exesub_except_stat_err(self): + """Test that execute_subprocess raises an exception on error when + caller doesn't request return code + + """ + cmd = ['false'] + with self.assertRaises(RuntimeError): + execute_subprocess(cmd, status_to_caller=False) + + +class TestStrToBool(unittest.TestCase): + """Test the string to boolean conversion routine. + + """ + + def test_case_insensitive_true(self): + """Verify that case insensitive variants of 'true' returns the True + boolean. + + """ + values = ['true', 'TRUE', 'True', 'tRuE', 't', 'T', ] + for value in values: + received = str_to_bool(value) + self.assertTrue(received) + + def test_case_insensitive_false(self): + """Verify that case insensitive variants of 'false' returns the False + boolean. + + """ + values = ['false', 'FALSE', 'False', 'fAlSe', 'f', 'F', ] + for value in values: + received = str_to_bool(value) + self.assertFalse(received) + + def test_invalid_str_error(self): + """Verify that a non-true/false string generates a runtime error. + """ + values = ['not_true_or_false', 'A', '1', '0', + 'false_is_not_true', 'true_is_not_false'] + for value in values: + with self.assertRaises(RuntimeError): + str_to_bool(value) + + +class TestIsRemoteURL(unittest.TestCase): + """Crude url checking to determine if a url is local or remote. + + """ + + def test_url_remote_git(self): + """verify that a remote git url is identified. + """ + url = 'git@somewhere' + is_remote = is_remote_url(url) + self.assertTrue(is_remote) + + def test_url_remote_ssh(self): + """verify that a remote ssh url is identified. + """ + url = 'ssh://user@somewhere' + is_remote = is_remote_url(url) + self.assertTrue(is_remote) + + def test_url_remote_http(self): + """verify that a remote http url is identified. + """ + url = 'http://somewhere' + is_remote = is_remote_url(url) + self.assertTrue(is_remote) + + def test_url_remote_https(self): + """verify that a remote https url is identified. + """ + url = 'https://somewhere' + is_remote = is_remote_url(url) + self.assertTrue(is_remote) + + def test_url_local_user(self): + """verify that a local path with '~/path/to/repo' gets rejected + + """ + url = '~/path/to/repo' + is_remote = is_remote_url(url) + self.assertFalse(is_remote) + + def test_url_local_var_curly(self): + """verify that a local path with env var '${HOME}' gets rejected + """ + url = '${HOME}/path/to/repo' + is_remote = is_remote_url(url) + self.assertFalse(is_remote) + + def test_url_local_var(self): + """verify that a local path with an env var '$HOME' gets rejected + """ + url = '$HOME/path/to/repo' + is_remote = is_remote_url(url) + self.assertFalse(is_remote) + + def test_url_local_abs(self): + """verify that a local abs path gets rejected + """ + url = '/path/to/repo' + is_remote = is_remote_url(url) + self.assertFalse(is_remote) + + def test_url_local_rel(self): + """verify that a local relative path gets rejected + """ + url = '../../path/to/repo' + is_remote = is_remote_url(url) + self.assertFalse(is_remote) + + +class TestSplitRemoteURL(unittest.TestCase): + """Crude url checking to determine if a url is local or remote. + + """ + + def test_url_remote_git(self): + """verify that a remote git url is identified. + """ + url = 'git@somewhere.com:org/repo' + received = split_remote_url(url) + self.assertEqual(received, "org/repo") + + def test_url_remote_ssh(self): + """verify that a remote ssh url is identified. + """ + url = 'ssh://user@somewhere.com/path/to/repo' + received = split_remote_url(url) + self.assertEqual(received, 'somewhere.com/path/to/repo') + + def test_url_remote_http(self): + """verify that a remote http url is identified. + """ + url = 'http://somewhere.org/path/to/repo' + received = split_remote_url(url) + self.assertEqual(received, 'somewhere.org/path/to/repo') + + def test_url_remote_https(self): + """verify that a remote http url is identified. + """ + url = 'http://somewhere.gov/path/to/repo' + received = split_remote_url(url) + self.assertEqual(received, 'somewhere.gov/path/to/repo') + + def test_url_local_url_unchanged(self): + """verify that a local path is unchanged + + """ + url = '/path/to/repo' + received = split_remote_url(url) + self.assertEqual(received, url) + + +class TestExpandLocalURL(unittest.TestCase): + """Crude url checking to determine if a url is local or remote. + + Remote should be unmodified. + + Local, should perform user and variable expansion. + + """ + + def test_url_local_user1(self): + """verify that a local path with '~/path/to/repo' gets expanded to an + absolute path. + + NOTE(bja, 2017-11) we can't test for something like: + '~user/path/to/repo' because the user has to be in the local + machine password directory and we don't know a user name that + is valid on every system....? + + """ + field = 'test' + url = '~/path/to/repo' + received = expand_local_url(url, field) + print(received) + self.assertTrue(os.path.isabs(received)) + + def test_url_local_expand_curly(self): + """verify that a local path with '${HOME}' gets expanded to an absolute path. + """ + field = 'test' + url = '${HOME}/path/to/repo' + received = expand_local_url(url, field) + self.assertTrue(os.path.isabs(received)) + + def test_url_local_expand_var(self): + """verify that a local path with '$HOME' gets expanded to an absolute path. + """ + field = 'test' + url = '$HOME/path/to/repo' + received = expand_local_url(url, field) + self.assertTrue(os.path.isabs(received)) + + def test_url_local_env_missing(self): + """verify that a local path with env var that is missing gets left as-is + + """ + field = 'test' + url = '$TMP_VAR/path/to/repo' + received = expand_local_url(url, field) + print(received) + self.assertEqual(received, url) + + def test_url_local_expand_env(self): + """verify that a local path with another env var gets expanded to an + absolute path. + + """ + field = 'test' + os.environ['TMP_VAR'] = '/some/absolute' + url = '$TMP_VAR/path/to/repo' + received = expand_local_url(url, field) + del os.environ['TMP_VAR'] + print(received) + self.assertTrue(os.path.isabs(received)) + self.assertEqual(received, '/some/absolute/path/to/repo') + + def test_url_local_normalize_rel(self): + """verify that a local path with another env var gets expanded to an + absolute path. + + """ + field = 'test' + url = '/this/is/a/long/../path/to/a/repo' + received = expand_local_url(url, field) + print(received) + self.assertEqual(received, '/this/is/a/path/to/a/repo') + + +if __name__ == '__main__': + unittest.main() From 1c4c159ac56878e03bbb9b859b69b45d90058cef Mon Sep 17 00:00:00 2001 From: Ben Andre Date: Fri, 22 Dec 2017 10:27:14 -0700 Subject: [PATCH 02/31] Add externals description files. Add externals description files CLM.cfg for CLM and CESM.cfg for standalone CLM checkouts. Testing: standalone CLM checkout - ok. git/manage_externals based CESM checkout - ok cime tests - not run. --- CESM.cfg | 37 +++++++++++++++++++++++++++++++++++++ CLM.cfg | 17 +++++++++++++++++ 2 files changed, 54 insertions(+) create mode 100644 CESM.cfg create mode 100644 CLM.cfg diff --git a/CESM.cfg b/CESM.cfg new file mode 100644 index 0000000000..074b088532 --- /dev/null +++ b/CESM.cfg @@ -0,0 +1,37 @@ +[ctsm] +local_path = . +protocol = externals_only +externals = CLM.cfg +required = True + +[cism] +local_path = components/cism +protocol = svn +repo_url = https://svn-ccsm-models.cgd.ucar.edu/glc +tag = trunk_tags/cism2_1_40 +required = True + +[rtm] +local_path = components/rtm +protocol = git +repo_url = https://github.com/ESCOMP/rtm +tag = rtm1_0_63 +required = True + +[mosart] +local_path = components/mosart +protocol = git +repo_url = https://github.com/ESCOMP/mosart +tag = mosart1_0_28 +required = True + +[cime] +local_path = cime +protocol = git +repo_url = https://github.com/CESM-Development/cime +tag = billsacks/always_glcmec_n01 +required = True + +[externals_description] +schema_version = 1.0.0 + diff --git a/CLM.cfg b/CLM.cfg new file mode 100644 index 0000000000..968c5122ea --- /dev/null +++ b/CLM.cfg @@ -0,0 +1,17 @@ +[fates] +local_path = src/fates +protocol = git +repo_url = https://github.com/NCAR/fates-release +tag = fates_s1.3.0_a1.0.0_rev3 +required = True + +[PTCLM] +local_path = tools/PTCLM +protocol = git +repo_url = https://github.com/ESCOMP/ptclm +tag = PTCLM2_171216c +required = True + +[externals_description] +schema_version = 1.0.0 + From 3c7492fa4a4a5d9d09db73a5137734862fdaa3d9 Mon Sep 17 00:00:00 2001 From: Bill Sacks Date: Fri, 22 Dec 2017 15:39:23 -0700 Subject: [PATCH 03/31] Point to a git version of cism Note that this is based off of cism2_1_44, rather than the previously-pointed-to cism2_1_40 --- CESM.cfg | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/CESM.cfg b/CESM.cfg index 074b088532..a8be5bc77f 100644 --- a/CESM.cfg +++ b/CESM.cfg @@ -6,9 +6,10 @@ required = True [cism] local_path = components/cism -protocol = svn -repo_url = https://svn-ccsm-models.cgd.ucar.edu/glc -tag = trunk_tags/cism2_1_40 +protocol = git +repo_url = https://github.com/ESCOMP/cism-wrapper +branch = add_manage_externals_file +externals = CISM.cfg required = True [rtm] From dbb562b0b8c28e9b7a2e99ea08eb8e6d6f6a461d Mon Sep 17 00:00:00 2001 From: Bill Sacks Date: Sat, 23 Dec 2017 10:27:02 -0700 Subject: [PATCH 04/31] Add .config_files.xml to override COMP_ROOT_DIR_LND This is the same as the default entry in cime/config/cesm/config_files.xml except for the value for clm: In a standalone clm checkout, COMP_ROOT_DIR_LND is $SRCROOT rather than $SRCROOT/components/clm. However, because of the way overrides are handled, we need to re-specify the full information here rather than just overriding the value for clm. --- .config_files.xml | 31 +++++++++++++++++++++++++++++++ 1 file changed, 31 insertions(+) create mode 100644 .config_files.xml diff --git a/.config_files.xml b/.config_files.xml new file mode 100644 index 0000000000..8e4868b94f --- /dev/null +++ b/.config_files.xml @@ -0,0 +1,31 @@ + + + + + + + + + char + unset + + $SRCROOT + $CIMEROOT/src/components/data_comps/dlnd + $CIMEROOT/src/components/stub_comps/slnd + $CIMEROOT/src/components/xcpl_comps/xlnd + + case_comps + env_case.xml + Root directory of the case land model component + $CIMEROOT/config/xml_schemas/config_compsets.xsd + + + From 199f1de62363084328a02c8af025804651111571 Mon Sep 17 00:00:00 2001 From: Bill Sacks Date: Sat, 23 Dec 2017 10:42:28 -0700 Subject: [PATCH 05/31] Fix scripts to use COMP_ROOT_DIR_LND We can no longer assume 'components/clm' in the directory structure --- cime_config/buildcpp | 4 ++-- cime_config/buildlib | 31 +++++++++++++++---------------- cime_config/buildnml | 6 +++--- 3 files changed, 20 insertions(+), 21 deletions(-) diff --git a/cime_config/buildcpp b/cime_config/buildcpp index 335da0edc9..adf1f389f8 100644 --- a/cime_config/buildcpp +++ b/cime_config/buildcpp @@ -16,7 +16,7 @@ def buildcpp(case): """ caseroot = case.get_value("CASEROOT") - srcroot = case.get_value("SRCROOT") + lnd_root = case.get_value("COMP_ROOT_DIR_LND") lnd_grid = case.get_value("LND_GRID") mask_grid = case.get_value("MASK_GRID") clm_usrdat_name = case.get_value("CLM_USRDAT_NAME") @@ -35,7 +35,7 @@ def buildcpp(case): if not os.path.isdir(clmconf): os.makedirs(clmconf) - cmd = os.path.join(srcroot,"components","clm","bld","configure") + cmd = os.path.join(lnd_root,"bld","configure") command = "%s %s %s -usr_src %s -comp_intf mct " \ %(cmd, config_opts, clm_config_opts, os.path.join(caseroot,"SourceMods","src.clm")) diff --git a/cime_config/buildlib b/cime_config/buildlib index 105e3ce1b4..6b86bb0b65 100755 --- a/cime_config/buildlib +++ b/cime_config/buildlib @@ -28,7 +28,7 @@ def _main_func(): with Case(caseroot) as case: casetools = case.get_value("CASETOOLS") - srcroot = case.get_value("SRCROOT") + lnd_root = case.get_value("COMP_ROOT_DIR_LND") gmake_j = case.get_value("GMAKE_J") gmake = case.get_value("GMAKE") mach = case.get_value("MACH") @@ -48,7 +48,7 @@ def _main_func(): # create Filepath file and clm_cppdefs for clm4_0 #------------------------------------------------------- # the call to configure here creates BOTH the Filepath file and the clm_cppdefs - cmd = os.path.join(os.path.join(srcroot,"components","clm","cime_config","buildcpp")) + cmd = os.path.join(os.path.join(lnd_root,"cime_config","buildcpp")) logger.info(" ...calling clm buildcpp to set build time options") try: mod = imp.load_source("buildcpp", cmd) @@ -68,22 +68,21 @@ def _main_func(): #------------------------------------------------------- filepath_file = os.path.join(bldroot,"Filepath") if not os.path.isfile(filepath_file): - srcroot = case.get_value("SRCROOT") caseroot = case.get_value("CASEROOT") paths = [os.path.join(caseroot,"SourceMods","src.clm"), - os.path.join(srcroot,"components","clm","src","main"), - os.path.join(srcroot,"components","clm","src","biogeophys"), - os.path.join(srcroot,"components","clm","src","biogeochem"), - os.path.join(srcroot,"components","clm","src","soilbiogeochem"), - os.path.join(srcroot,"components","clm","src","dyn_subgrid"), - os.path.join(srcroot,"components","clm","src","init_interp"), - os.path.join(srcroot,"components","clm","src","fates"), - os.path.join(srcroot,"components","clm","src","fates","main"), - os.path.join(srcroot,"components","clm","src","fates","biogeophys"), - os.path.join(srcroot,"components","clm","src","fates","biogeochem"), - os.path.join(srcroot,"components","clm","src","fates","fire"), - os.path.join(srcroot,"components","clm","src","utils"), - os.path.join(srcroot,"components","clm","src","cpl")] + os.path.join(lnd_root,"src","main"), + os.path.join(lnd_root,"src","biogeophys"), + os.path.join(lnd_root,"src","biogeochem"), + os.path.join(lnd_root,"src","soilbiogeochem"), + os.path.join(lnd_root,"src","dyn_subgrid"), + os.path.join(lnd_root,"src","init_interp"), + os.path.join(lnd_root,"src","fates"), + os.path.join(lnd_root,"src","fates","main"), + os.path.join(lnd_root,"src","fates","biogeophys"), + os.path.join(lnd_root,"src","fates","biogeochem"), + os.path.join(lnd_root,"src","fates","fire"), + os.path.join(lnd_root,"src","utils"), + os.path.join(lnd_root,"src","cpl")] with open(filepath_file, "w") as filepath: filepath.write("\n".join(paths)) filepath.write("\n") diff --git a/cime_config/buildnml b/cime_config/buildnml index 682510e07a..dde7cbb126 100755 --- a/cime_config/buildnml +++ b/cime_config/buildnml @@ -36,7 +36,7 @@ def buildnml(case, caseroot, compname): if compname != "clm": raise AttributeError - srcroot = case.get_value("SRCROOT") + lnd_root = case.get_value("COMP_ROOT_DIR_LND") din_loc_root = case.get_value("DIN_LOC_ROOT") ccsm_co2_ppmv = case.get_value("CCSM_CO2_PPMV") clm_co2_type = case.get_value("CLM_CO2_TYPE") @@ -105,7 +105,7 @@ def buildnml(case, caseroot, compname): if not filecmp.cmp(file1, file2): call_buildcpp = True if call_buildcpp: - cmd = os.path.join(os.path.join(srcroot,"components","clm","cime_config","buildcpp")) + cmd = os.path.join(os.path.join(lnd_root,"cime_config","buildcpp")) logger.info(" ...calling clm buildcpp to set build time options") try: mod = imp.load_source("buildcpp", cmd) @@ -225,7 +225,7 @@ def buildnml(case, caseroot, compname): create_namelist_infile(case, user_nl_file, namelist_infile, "\n".join(infile_lines)) - cmd = os.path.join(srcroot, "components","clm","bld","build-namelist") + cmd = os.path.join(lnd_root,"bld","build-namelist") command = ("%s -infile %s -csmdata %s -inputdata %s %s -namelist \"&clm_inparm start_ymd=%s %s/ \" " "%s %s -res %s %s -clm_start_type %s -envxml_dir %s -l_ncpl %s " From 333a28f509cfd2dd1c12560ecea23614c9b5ed63 Mon Sep 17 00:00:00 2001 From: Bill Sacks Date: Sat, 23 Dec 2017 11:19:48 -0700 Subject: [PATCH 06/31] Point to new tag on always_glcmec branch This cherry-picks the two PRs that are needed for setting COMP_ROOT_DIR_LND This is needed now that we're running CTSM with the new directory structure, in git. I'm not quite ready to update to the latest cime master, so I'm just cherry-picking the specific changes we need. --- CESM.cfg | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/CESM.cfg b/CESM.cfg index a8be5bc77f..af5b19f67c 100644 --- a/CESM.cfg +++ b/CESM.cfg @@ -30,7 +30,7 @@ required = True local_path = cime protocol = git repo_url = https://github.com/CESM-Development/cime -tag = billsacks/always_glcmec_n01 +tag = billsacks/always_glcmec_n02 required = True [externals_description] From 69a8f3528f97238281974c909993b51a40c2d5a0 Mon Sep 17 00:00:00 2001 From: Bill Sacks Date: Wed, 27 Dec 2017 05:41:08 -0700 Subject: [PATCH 07/31] Add some things to .gitignore --- .gitignore | 13 +++++++++---- 1 file changed, 9 insertions(+), 4 deletions(-) diff --git a/.gitignore b/.gitignore index bcd4cec71d..61903b95d5 100644 --- a/.gitignore +++ b/.gitignore @@ -1,3 +1,11 @@ +# directories checked out by manage_externals, and other files created +# by manage_externals +manage_externals.log +src/fates/ +tools/PTCLM/ +cime/ +components/ + # ignore svn directories **/.svn/** .svn/ @@ -25,7 +33,4 @@ CMakeFiles/ core.* *.gz *.log !run.log - - - - +cime_config/buildnmlc From b07e50326edac9df592b3b7cd191255800cf60c0 Mon Sep 17 00:00:00 2001 From: Bill Sacks Date: Wed, 27 Dec 2017 10:40:52 -0700 Subject: [PATCH 08/31] Point to CISM tag --- CESM.cfg | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/CESM.cfg b/CESM.cfg index af5b19f67c..f2be2dfc18 100644 --- a/CESM.cfg +++ b/CESM.cfg @@ -8,7 +8,7 @@ required = True local_path = components/cism protocol = git repo_url = https://github.com/ESCOMP/cism-wrapper -branch = add_manage_externals_file +tag = cism2_1_45 externals = CISM.cfg required = True From 0f6f4021b85ae70dd94aeedbb89d95bfe181b0da Mon Sep 17 00:00:00 2001 From: Bill Sacks Date: Fri, 29 Dec 2017 12:43:09 -0700 Subject: [PATCH 09/31] Point to new cime version This cherry-picks a commit that's needed to pick up system tests defined by ctsm --- CESM.cfg | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/CESM.cfg b/CESM.cfg index f2be2dfc18..2ac8ed208a 100644 --- a/CESM.cfg +++ b/CESM.cfg @@ -30,7 +30,7 @@ required = True local_path = cime protocol = git repo_url = https://github.com/CESM-Development/cime -tag = billsacks/always_glcmec_n02 +tag = billsacks/always_glcmec_n03 required = True [externals_description] From 62ab3818fcafddcdfe1d81838797b6399225da4e Mon Sep 17 00:00:00 2001 From: Bill Sacks Date: Fri, 29 Dec 2017 13:51:08 -0700 Subject: [PATCH 10/31] Do not build shr_cal_mod as part of the unit test build This wasn't actually needed now, but will be needed when we update cime --- src/CMakeLists.txt | 18 ++++++++++++++++++ 1 file changed, 18 insertions(+) diff --git a/src/CMakeLists.txt b/src/CMakeLists.txt index 4d392b1fb1..3a14c4cb18 100644 --- a/src/CMakeLists.txt +++ b/src/CMakeLists.txt @@ -54,6 +54,24 @@ foreach (sourcefile ${share_sources}) endif() endforeach() +# Remove shr_cal_mod from share_sources. +# +# shr_cal_mod depends on ESMF (or the lightweight esmf wrf timemgr, at +# least). Since CTSM doesn't currently use shr_cal_mod, we're avoiding +# the extra overhead of including esmf_wrf_timemgr sources in this +# build. +# +# TODO: like above, this should be moved into a general-purpose function +# in Sourcelist_utils. Then this block of code could be replaced with a +# single call, like: remove_source_file(${share_sources} +# "shr_cal_mod.F90") +foreach (sourcefile ${share_sources}) + string(REGEX MATCH "shr_cal_mod.F90" match_found ${sourcefile}) + if(match_found) + list(REMOVE_ITEM share_sources ${sourcefile}) + endif() +endforeach() + # Build libraries containing stuff needed for the unit tests. # Eventually, these add_library calls should probably be distributed into the correct location, rather than being in this top-level CMakeLists.txt file. add_library(csm_share ${share_sources} ${drv_sources_needed}) From bf54f33c43701a8cc05d80c1733b1cb07dfa4182 Mon Sep 17 00:00:00 2001 From: Bill Sacks Date: Fri, 29 Dec 2017 14:11:50 -0700 Subject: [PATCH 11/31] Allow unit tests to run with a flexible directory structure Required a change in cime (for which I'll submit a PR to master) as well as minor changes to CLM's CMakeLists.txt. --- CESM.cfg | 2 +- src/CMakeLists.txt | 13 ++++++------- 2 files changed, 7 insertions(+), 8 deletions(-) diff --git a/CESM.cfg b/CESM.cfg index 2ac8ed208a..cba625798f 100644 --- a/CESM.cfg +++ b/CESM.cfg @@ -30,7 +30,7 @@ required = True local_path = cime protocol = git repo_url = https://github.com/CESM-Development/cime -tag = billsacks/always_glcmec_n03 +tag = billsacks/always_glcmec_n04 required = True [externals_description] diff --git a/src/CMakeLists.txt b/src/CMakeLists.txt index 3a14c4cb18..3e4ab93151 100644 --- a/src/CMakeLists.txt +++ b/src/CMakeLists.txt @@ -8,7 +8,6 @@ project(clm45_tests Fortran C) include(CIME_utils) set(CLM_ROOT "..") -set(CESM_ROOT "${CLM_ROOT}/../../") # This definition is needed to avoid having ESMF depend on mpi add_definitions(-DHIDE_MPI) @@ -17,10 +16,10 @@ add_definitions(-DHIDE_MPI) # done first, so that in case of name collisions, the CLM versions take # precedence (when there are two files with the same name, the one added later # wins). -add_subdirectory(${CESM_ROOT}/cime/src/share/util csm_share) -add_subdirectory(${CESM_ROOT}/cime/src/share/unit_test_stubs/util csm_share_stubs) -add_subdirectory(${CESM_ROOT}/cime/src/share/esmf_wrf_timemgr esmf_wrf_timemgr) -add_subdirectory(${CESM_ROOT}/cime/src/drivers/mct/shr drv_share) +add_subdirectory(${CIMEROOT}/src/share/util csm_share) +add_subdirectory(${CIMEROOT}/src/share/unit_test_stubs/util csm_share_stubs) +add_subdirectory(${CIMEROOT}/src/share/esmf_wrf_timemgr esmf_wrf_timemgr) +add_subdirectory(${CIMEROOT}/src/drivers/mct/shr drv_share) # Extract just the files we need from drv_share set (drv_sources_needed_base @@ -83,11 +82,11 @@ add_dependencies(esmf_wrf_timemgr csm_share) add_dependencies(clm csm_share esmf_wrf_timemgr) # We need to look for header files here, in order to pick up shr_assert.h -include_directories(${CESM_ROOT}/cime/src/share/include) +include_directories(${CIMEROOT}/src/share/include) # And we need to look for header files here, for some include files needed by # the esmf_wrf_timemgr code -include_directories(${CESM_ROOT}/cime/src/share/esmf_wrf_timemgr) +include_directories(${CIMEROOT}/src/share/esmf_wrf_timemgr) # Tell cmake to look for libraries & mod files here, because this is where we built libraries include_directories(${CMAKE_CURRENT_BINARY_DIR}) From a82cda856ee5bd02f066d7bd7ae23ed3edad0f25 Mon Sep 17 00:00:00 2001 From: Bill Sacks Date: Wed, 17 Jan 2018 12:19:17 -0700 Subject: [PATCH 12/31] Use cimeroot rather than hard-coded relative path This is needed to support a standalone checkout, where cime appears in a different location relative to the bld directory --- bld/CLMBuildNamelist.pm | 12 +++++++----- bld/configure | 9 ++++++--- cime_config/buildcpp | 5 +++-- cime_config/buildnml | 4 ++-- 4 files changed, 18 insertions(+), 12 deletions(-) diff --git a/bld/CLMBuildNamelist.pm b/bld/CLMBuildNamelist.pm index 01118dab67..562f21d2fe 100755 --- a/bld/CLMBuildNamelist.pm +++ b/bld/CLMBuildNamelist.pm @@ -59,6 +59,7 @@ SYNOPSIS Create the namelist for CLM REQUIRED OPTIONS + -cimeroot "directory" Path to cime directory -config "filepath" Read the given CLM configuration cache file. Default: "config_cache.xml". -d "directory" Directory where output namelist file will be written @@ -245,7 +246,8 @@ sub process_commandline { # the array! $nl_flags->{'cmdline'} = "@ARGV"; - my %opts = ( config => "config_cache.xml", + my %opts = ( cimeroot => undef, + config => "config_cache.xml", csmdata => undef, clm_usr_name => undef, co2_type => undef, @@ -282,6 +284,7 @@ sub process_commandline { ); GetOptions( + "cimeroot=s" => \$opts{'cimeroot'}, "clm_demand=s" => \$opts{'clm_demand'}, "co2_ppmv=f" => \$opts{'co2_ppmv'}, "co2_type=s" => \$opts{'co2_type'}, @@ -345,9 +348,9 @@ sub check_for_perl_utils { my $cfgdir = shift; my $opts_ref = shift; - # Determine CESM root directory and perl5lib root directory - my $cesmroot = abs_path( "$cfgdir/../../../"); - my $perl5lib_dir = "$cesmroot/cime/utils/perl5lib"; + # Determine CIME root directory and perl5lib root directory + my $cimeroot = $opts_ref->{'cimeroot'}; + my $perl5lib_dir = "$cimeroot/utils/perl5lib"; #----------------------------------------------------------------------------- # Add $perl5lib_dir to the list of paths that Perl searches for modules @@ -4381,7 +4384,6 @@ sub main { my $cfg = read_configure_definition($cfgdir, \%opts); my $physv = config_files::clm_phys_vers->new( $cfg->get('phys') ); - my $cesmroot = abs_path( "$cfgdir/../../../"); my $definition = read_namelist_definition($cfgdir, \%opts, \%nl_flags, $physv); my $defaults = read_namelist_defaults($cfgdir, \%opts, \%nl_flags, $cfg, $physv); diff --git a/bld/configure b/bld/configure index 099d738818..b333259a98 100755 --- a/bld/configure +++ b/bld/configure @@ -52,6 +52,7 @@ OPTIONS -cache Name of output cache file (default: config_cache.xml). -cachedir Name of directory where output cache file is written (default: CLM build directory). + -cimeroot

REQUIRED: Path to cime directory -clm_root Root directory of clm source code (default: directory above location of this script) -cppdefs A string of user specified CPP defines. Appended to @@ -122,6 +123,7 @@ my %opts = ( phys => "clm4_0", nofire => undef, noio => undef, + cimeroot => undef, clm_root => undef, spinup => "normal", ); @@ -131,6 +133,7 @@ GetOptions( "cache=s" => \$opts{'cache'}, "cachedir=s" => \$opts{'cachedir'}, "snicar_frc=s" => \$opts{'snicar_frc'}, + "cimeroot=s" => \$opts{'cimeroot'}, "clm_root=s" => \$opts{'clm_root'}, "cppdefs=s" => \$opts{'cppdefs'}, "comp_intf=s" => \$opts{'comp_intf'}, @@ -176,9 +179,9 @@ my %cfg = (); # build configuration # Make sure we can find required perl modules and configuration files. # Look for them in the directory that contains the configure script. -my $cesmroot = abs_path( "$cfgdir/../../../"); -my $casecfgdir = "$cesmroot/cime/scripts/Tools"; -my $perl5lib = "$cesmroot/cime/utils/perl5lib/"; +my $cimeroot = $opts{'cimeroot'}; +my $casecfgdir = "$cimeroot/scripts/Tools"; +my $perl5lib = "$cimeroot/utils/perl5lib/"; # The Build::Config module provides utilities to store and manipulate the configuration. my $file = "$perl5lib/Build/Config.pm"; diff --git a/cime_config/buildcpp b/cime_config/buildcpp index adf1f389f8..7a3bf0083e 100644 --- a/cime_config/buildcpp +++ b/cime_config/buildcpp @@ -16,6 +16,7 @@ def buildcpp(case): """ caseroot = case.get_value("CASEROOT") + cimeroot = case.get_value("CIMEROOT") lnd_root = case.get_value("COMP_ROOT_DIR_LND") lnd_grid = case.get_value("LND_GRID") mask_grid = case.get_value("MASK_GRID") @@ -36,8 +37,8 @@ def buildcpp(case): os.makedirs(clmconf) cmd = os.path.join(lnd_root,"bld","configure") - command = "%s %s %s -usr_src %s -comp_intf mct " \ - %(cmd, config_opts, clm_config_opts, os.path.join(caseroot,"SourceMods","src.clm")) + command = "%s -cimeroot %s %s %s -usr_src %s -comp_intf mct " \ + %(cmd, cimeroot, config_opts, clm_config_opts, os.path.join(caseroot,"SourceMods","src.clm")) run_cmd_no_fail(command, from_dir=clmconf) diff --git a/cime_config/buildnml b/cime_config/buildnml index dde7cbb126..dc11d2735b 100755 --- a/cime_config/buildnml +++ b/cime_config/buildnml @@ -227,11 +227,11 @@ def buildnml(case, caseroot, compname): cmd = os.path.join(lnd_root,"bld","build-namelist") - command = ("%s -infile %s -csmdata %s -inputdata %s %s -namelist \"&clm_inparm start_ymd=%s %s/ \" " + command = ("%s -cimeroot %s -infile %s -csmdata %s -inputdata %s %s -namelist \"&clm_inparm start_ymd=%s %s/ \" " "%s %s -res %s %s -clm_start_type %s -envxml_dir %s -l_ncpl %s " "-lnd_frac %s -glc_nec %s -co2_ppmv %s -co2_type %s -config %s " "%s %s %s" - %(cmd, infile, din_loc_root, inputdata_file, ignore, start_ymd, clm_namelist_opts, + %(cmd, _CIMEROOT, infile, din_loc_root, inputdata_file, ignore, start_ymd, clm_namelist_opts, nomeg, usecase, lnd_grid, clmusr, start_type, caseroot, lnd_ncpl, lndfrac_file, glc_nec, ccsm_co2_ppmv, clm_co2_type, config_cache_file, clm_bldnml_opts, spinup, tuning)) From 6e30f2390e19b479e308f74efcc78da2f584d0df Mon Sep 17 00:00:00 2001 From: Bill Sacks Date: Fri, 19 Jan 2018 16:05:03 -0700 Subject: [PATCH 13/31] Remove no longer needed SVN_EXTERNAL_DIRECTORIES files --- SVN_EXTERNAL_DIRECTORIES | 2 -- SVN_EXTERNAL_DIRECTORIES.standalone | 5 ----- 2 files changed, 7 deletions(-) delete mode 100644 SVN_EXTERNAL_DIRECTORIES delete mode 100644 SVN_EXTERNAL_DIRECTORIES.standalone diff --git a/SVN_EXTERNAL_DIRECTORIES b/SVN_EXTERNAL_DIRECTORIES deleted file mode 100644 index 40ac793f7f..0000000000 --- a/SVN_EXTERNAL_DIRECTORIES +++ /dev/null @@ -1,2 +0,0 @@ -src/fates https://github.com/NCAR/fates-release/tags/fates_s1.3.0_a1.0.0_rev3 -tools/PTCLM https://github.com/ESCOMP/ptclm/tags/PTCLM2_171216c diff --git a/SVN_EXTERNAL_DIRECTORIES.standalone b/SVN_EXTERNAL_DIRECTORIES.standalone deleted file mode 100644 index ff91f394af..0000000000 --- a/SVN_EXTERNAL_DIRECTORIES.standalone +++ /dev/null @@ -1,5 +0,0 @@ -cime https://github.com/CESM-Development/cime/tags/billsacks/always_glcmec_n01 -components/clm/tools/gen_domain https://github.com/CESM-Development/cime/tags/billsacks/always_glcmec_n01/tools/mapping/gen_domain_files -components/cism https://svn-ccsm-models.cgd.ucar.edu/glc/trunk_tags/cism2_1_40 -components/rtm https://github.com/ESCOMP/rtm/tags/rtm1_0_63 -components/mosart https://github.com/ESCOMP/mosart/tags/mosart1_0_28 From 4c368cd92e9d4fb7f2c01561c25c260d3a548bc9 Mon Sep 17 00:00:00 2001 From: Ben Andre Date: Fri, 19 Jan 2018 17:26:53 -0700 Subject: [PATCH 14/31] Squashed 'manage_externals/' changes from d29760f7..6c3d4dec 6c3d4dec Change status symbol and improve error message 9fee6a0c Fix lint issues on branch. bfd95977 Change 'm' to 's' and improve message when externals are dirty 013b82c6 Ignore untracked files when running checkout 7c8638b2 If we're already in sync, do not show output from the checkout command 8067c59c Refine verbose output b2ee78f0 Implement intermediate verbosity level f535defc Rename verbose status functions. 002de3c2 Better error messages for failed commands 1dcb2f40 Bugfix: initialize logging during system tests 290c205f Fixes from autopep8 and fixes for pylint 2c98c94f Fix error in docstring 5cf62214 Abort if we find an svn external in dirty state following switch fc093a7b Give more complete error message for failed command 2e0de468 Add command line option to disable logging 2739ebbd Update style/lint rules in test makefile bdb29ca5 Bugfix: fetch all tags from a remote 15f60d2d Update documentation git-subtree-dir: manage_externals git-subtree-split: 6c3d4dec7fcceb734b1d740d92d32901f0ced659 --- README.md | 19 ++- checkout_externals | 6 - manic/checkout.py | 72 +++++---- manic/externals_status.py | 44 +++++- manic/global_constants.py | 4 + manic/repository.py | 10 +- manic/repository_git.py | 92 ++++++------ manic/repository_svn.py | 96 ++++++++---- manic/sourcetree.py | 96 ++++++------ manic/utils.py | 97 ++++++++++--- test/Makefile | 5 +- .../24/4386e788c9bc608613e127a329c742450a60e4 | Bin 0 -> 164 bytes .../4d/837135915ed93eed6fff6b439f284ce317296f | Bin 0 -> 89 bytes .../5f/1d4786d12e52d7ab28d2f2f1118c1059a9f1ae | Bin 0 -> 93 bytes .../a4/2fe9144f5707bc1e9515ce1b44681f7aba6f95 | 3 + .../refs/tags/abandoned-feature | 1 + test/test_sys_checkout.py | 137 +++++++++++++++--- test/test_unit_repository_git.py | 1 + test/test_unit_repository_svn.py | 28 +++- test/test_unit_utils.py | 72 +++++++++ 20 files changed, 575 insertions(+), 208 deletions(-) create mode 100644 test/repos/simple-ext-fork.git/objects/24/4386e788c9bc608613e127a329c742450a60e4 create mode 100644 test/repos/simple-ext-fork.git/objects/4d/837135915ed93eed6fff6b439f284ce317296f create mode 100644 test/repos/simple-ext-fork.git/objects/5f/1d4786d12e52d7ab28d2f2f1118c1059a9f1ae create mode 100644 test/repos/simple-ext-fork.git/objects/a4/2fe9144f5707bc1e9515ce1b44681f7aba6f95 create mode 100644 test/repos/simple-ext-fork.git/refs/tags/abandoned-feature diff --git a/README.md b/README.md index 90ed2d1459..a847b7a2cc 100644 --- a/README.md +++ b/README.md @@ -68,8 +68,8 @@ The root of the source tree will be referred to as `${SRC_ROOT}` below. If there are *any* modifications to *any* working copy according to the git or svn 'status' command, checkout_externals will not update any external repositories. Modifications - include: modified files, added files, removed files, missing - files or untracked files, + include: modified files, added files, removed files, or missing + files. * Checkout all required components from a user specified externals description file: @@ -83,7 +83,7 @@ The root of the source tree will be referred to as `${SRC_ROOT}` below. $ ./manage_externals/checkout_externals --status ./cime - m ./components/cism + s ./components/cism ./components/mosart e-o ./components/rtm M ./src/fates @@ -95,17 +95,18 @@ The root of the source tree will be referred to as `${SRC_ROOT}` below. * column two indicates whether the working copy has modified files. * column three shows how the repository is managed, optional or required - Colunm one will be one of these values: - * m : modified : repository is modefied compared to the externals description + Column one will be one of these values: + * s : out-of-sync : repository is checked out at a different commit + compared with the externals description * e : empty : directory does not exist - checkout_externals has not been run * ? : unknown : directory exists but .git or .svn directories are missing - Colunm two will be one of these values: - * M : Modified : untracked, modified, added, deleted or missing files + Column two will be one of these values: + * M : Modified : modified, added, deleted or missing files * : blank / space : clean * - : dash : no meaningful state, for empty repositories - Colunm three will be one of these values: + Column three will be one of these values: * o : optional : optionally repository * : blank / space : required repository @@ -154,6 +155,8 @@ The root of the source tree will be referred to as `${SRC_ROOT}` below. * tag (string) : tag to checkout + This can also be a git SHA-1 + * branch (string) : branch to checkout Note: either tag or branch must be supplied, but not both. diff --git a/checkout_externals b/checkout_externals index 0167049bff..a0698baef0 100755 --- a/checkout_externals +++ b/checkout_externals @@ -10,7 +10,6 @@ from __future__ import absolute_import from __future__ import unicode_literals from __future__ import print_function -import logging import sys import traceback @@ -26,11 +25,6 @@ if sys.hexversion < 0x02070000: if __name__ == '__main__': - logging.basicConfig(filename=manic.global_constants.LOG_FILE_NAME, - format='%(levelname)s : %(asctime)s : %(message)s', - datefmt='%Y-%m-%d %H:%M:%S', - level=logging.DEBUG) - ARGS = manic.checkout.commandline_arguments() try: RET_STATUS, _ = manic.checkout.main(ARGS) diff --git a/manic/checkout.py b/manic/checkout.py index 6d0108b656..be5a4864a4 100755 --- a/manic/checkout.py +++ b/manic/checkout.py @@ -15,14 +15,13 @@ import os import os.path import sys -import textwrap from manic.externals_description import create_externals_description from manic.externals_description import read_externals_description_file from manic.externals_status import check_safe_to_update_repos from manic.sourcetree import SourceTree from manic.utils import printlog -from manic.global_constants import VERSION_SEPERATOR +from manic.global_constants import VERSION_SEPERATOR, LOG_FILE_NAME if sys.hexversion < 0x02070000: print(70 * '*') @@ -93,8 +92,8 @@ def commandline_arguments(args=None): If there are *any* modifications to *any* working copy according to the git or svn 'status' command, %(prog)s will not update any external repositories. Modifications - include: modified files, added files, removed files, missing - files or untracked files, + include: modified files, added files, removed files, or missing + files. * Checkout all required components from a user specified externals description file: @@ -108,7 +107,7 @@ def commandline_arguments(args=None): $ ./manage_externals/%(prog)s --status ./cime - m ./components/cism + s ./components/cism ./components/mosart e-o ./components/rtm M ./src/fates @@ -121,17 +120,18 @@ def commandline_arguments(args=None): * column two indicates whether the working copy has modified files. * column three shows how the repository is managed, optional or required - Colunm one will be one of these values: - * m : modified : repository is modefied compared to the externals description + Column one will be one of these values: + * s : out-of-sync : repository is checked out at a different commit + compared with the externals description * e : empty : directory does not exist - %(prog)s has not been run * ? : unknown : directory exists but .git or .svn directories are missing - Colunm two will be one of these values: - * M : Modified : untracked, modified, added, deleted or missing files + Column two will be one of these values: + * M : Modified : modified, added, deleted or missing files * : blank / space : clean * - : dash : no meaningful state, for empty repositories - Colunm three will be one of these values: + Column three will be one of these values: * o : optional : optionally repository * : blank / space : required repository @@ -180,6 +180,8 @@ def commandline_arguments(args=None): * tag (string) : tag to checkout + This can also be a git SHA-1 + * branch (string) : branch to checkout Note: either tag or branch must be supplied, but not both. @@ -214,9 +216,11 @@ def commandline_arguments(args=None): '%(prog)s. By default only summary information ' 'is provided. Use verbose output to see details.') - parser.add_argument('-v', '--verbose', action='store_true', default=False, + parser.add_argument('-v', '--verbose', action='count', default=0, help='Output additional information to ' - 'the screen and log file.') + 'the screen and log file. This flag can be ' + 'used up to two times, increasing the ' + 'verbosity level each time.') # # developer options @@ -229,6 +233,9 @@ def commandline_arguments(args=None): help='DEVELOPER: output additional debugging ' 'information to the screen and log file.') + parser.add_argument('--no-logging', action='store_true', + help='DEVELOPER: disable logging.') + if args: options = parser.parse_args(args) else: @@ -247,7 +254,14 @@ def main(args): Parse externals file and load required repositories or all repositories if the --all option is passed. """ - logging.info('Begining of checkout_externals') + if not args.no_logging: + logging.basicConfig(filename=LOG_FILE_NAME, + format='%(levelname)s : %(asctime)s : %(message)s', + datefmt='%Y-%m-%d %H:%M:%S', + level=logging.DEBUG) + + program_name = os.path.basename(sys.argv[0]) + logging.info('Beginning of %s', program_name) load_all = False if args.optional: @@ -265,31 +279,37 @@ def main(args): if args.status: # user requested status-only for comp in sorted(tree_status.keys()): - msg = str(tree_status[comp]) - printlog(msg) - if args.verbose: - # user requested verbose status dump of the git/svn status commands - source_tree.verbose_status() + tree_status[comp].log_status_message(args.verbose) else: # checkout / update the external repositories. safe_to_update = check_safe_to_update_repos(tree_status) if not safe_to_update: # print status for comp in sorted(tree_status.keys()): - msg = str(tree_status[comp]) - printlog(msg) + tree_status[comp].log_status_message(args.verbose) # exit gracefully - msg = textwrap.fill( - 'Some external repositories that are not in a clean ' - 'state. Please ensure all external repositories are clean ' - 'before updating.') + msg = """The external repositories labeled with 'M' above are not in a clean state. + +The following are two options for how to proceed: + +(1) Go into each external that is not in a clean state and issue either + an 'svn status' or a 'git status' command. Either revert or commit + your changes so that all externals are in a clean state. (Note, + though, that it is okay to have untracked files in your working + directory.) Then rerun {program_name}. + +(2) Alternatively, you do not have to rely on {program_name}. Instead, you + can manually update out-of-sync externals (labeled with 's' above) + as described in the configuration file {config_file}. +""".format(program_name=program_name, config_file=args.externals) + printlog('-' * 70) printlog(msg) printlog('-' * 70) else: - source_tree.checkout(load_all) + source_tree.checkout(args.verbose, load_all) printlog('') - logging.info('checkout_externals completed without exceptions.') + logging.info('%s completed without exceptions.', program_name) # NOTE(bja, 2017-11) tree status is used by the systems tests return 0, tree_status diff --git a/manic/externals_status.py b/manic/externals_status.py index bae2cfd6f1..d3d238f289 100644 --- a/manic/externals_status.py +++ b/manic/externals_status.py @@ -9,6 +9,8 @@ from __future__ import print_function from .global_constants import EMPTY_STR +from .utils import printlog, indent_string +from .global_constants import VERBOSITY_VERBOSE, VERBOSITY_DUMP class ExternalStatus(object): @@ -30,7 +32,7 @@ class ExternalStatus(object): DEFAULT = '-' UNKNOWN = '?' EMPTY = 'e' - MODEL_MODIFIED = 'm' + MODEL_MODIFIED = 's' # a.k.a. out-of-sync DIRTY = 'M' STATUS_OK = ' ' @@ -46,12 +48,48 @@ def __init__(self): self.clean_state = self.DEFAULT self.source_type = self.DEFAULT self.path = EMPTY_STR + self.current_version = EMPTY_STR + self.expected_version = EMPTY_STR + self.status_output = EMPTY_STR - def __str__(self): + def log_status_message(self, verbosity): + """Write status message to the screen and log file + """ + self._default_status_message() + if verbosity >= VERBOSITY_VERBOSE: + self._verbose_status_message() + if verbosity >= VERBOSITY_DUMP: + self._dump_status_message() + + def _default_status_message(self): + """Return the default terse status message string + """ msg = '{sync}{clean}{src_type} {path}'.format( sync=self.sync_state, clean=self.clean_state, src_type=self.source_type, path=self.path) - return msg + printlog(msg) + + def _verbose_status_message(self): + """Return the verbose status message string + """ + clean_str = self.DEFAULT + if self.clean_state == self.STATUS_OK: + clean_str = 'clean sandbox' + elif self.clean_state == self.DIRTY: + clean_str = 'modified sandbox' + + sync_str = 'on {0}'.format(self.current_version) + if self.sync_state != self.STATUS_OK: + sync_str = '{current} --> {expected}'.format( + current=self.current_version, expected=self.expected_version) + msg = ' {clean}, {sync}'.format(clean=clean_str, sync=sync_str) + printlog(msg) + + def _dump_status_message(self): + """Return the dump status message string + """ + msg = indent_string(self.status_output, 12) + printlog(msg) def safe_to_update(self): """Report if it is safe to update a repository. Safe is defined as: diff --git a/manic/global_constants.py b/manic/global_constants.py index c71c29071a..0e91cffc90 100644 --- a/manic/global_constants.py +++ b/manic/global_constants.py @@ -12,3 +12,7 @@ VERSION_SEPERATOR = '.' LOG_FILE_NAME = 'manage_externals.log' PPRINTER = pprint.PrettyPrinter(indent=4) + +VERBOSITY_DEFAULT = 0 +VERBOSITY_VERBOSE = 1 +VERBOSITY_DUMP = 2 diff --git a/manic/repository.py b/manic/repository.py index 8025eb6ac8..9baa0667ca 100644 --- a/manic/repository.py +++ b/manic/repository.py @@ -30,7 +30,7 @@ def __init__(self, component_name, repo): if self._tag is not EMPTY_STR and self._branch is not EMPTY_STR: fatal_error('repo cannot have both a tag and a branch element') - def checkout(self, base_dir_path, repo_dir_name): # pylint: disable=unused-argument + def checkout(self, base_dir_path, repo_dir_name, verbosity): # pylint: disable=unused-argument """ If the repo destination directory exists, ensure it is correct (from correct URL, correct branch or tag), and possibly update the source. @@ -49,14 +49,6 @@ def status(self, stat, repo_dir_path): # pylint: disable=unused-argument 'repository classes! {0}'.format(self.__class__.__name__)) fatal_error(msg) - def verbose_status(self, repo_dir_path): # pylint: disable=unused-argument - """Display the raw repo status to the user. - - """ - msg = ('DEV_ERROR: status method must be implemented in all ' - 'repository classes! {0}'.format(self.__class__.__name__)) - fatal_error(msg) - def url(self): """Public access of repo url. """ diff --git a/manic/repository_git.py b/manic/repository_git.py index be330ae724..d1198796ed 100644 --- a/manic/repository_git.py +++ b/manic/repository_git.py @@ -10,10 +10,11 @@ import re from .global_constants import EMPTY_STR, LOCAL_PATH_INDICATOR +from .global_constants import VERBOSITY_VERBOSE from .repository import Repository from .externals_status import ExternalStatus from .utils import expand_local_url, split_remote_url, is_remote_url -from .utils import log_process_output, fatal_error +from .utils import fatal_error, printlog from .utils import execute_subprocess @@ -56,7 +57,7 @@ def __init__(self, component_name, repo): # Public API, defined by Repository # # ---------------------------------------------------------------- - def checkout(self, base_dir_path, repo_dir_name): + def checkout(self, base_dir_path, repo_dir_name, verbosity): """ If the repo destination directory exists, ensure it is correct (from correct URL, correct branch or tag), and possibly update the source. @@ -65,38 +66,31 @@ def checkout(self, base_dir_path, repo_dir_name): """ repo_dir_path = os.path.join(base_dir_path, repo_dir_name) if not os.path.exists(repo_dir_path): - self._clone_repo(base_dir_path, repo_dir_name) - self._checkout_ref(repo_dir_path) + self._clone_repo(base_dir_path, repo_dir_name, verbosity) + self._checkout_ref(repo_dir_path, verbosity) def status(self, stat, repo_dir_path): """ If the repo destination directory exists, ensure it is correct (from correct URL, correct branch or tag), and possibly update the source. - If the repo destination directory does not exist, checkout the correce + If the repo destination directory does not exist, checkout the correct branch or tag. """ self._check_sync(stat, repo_dir_path) if os.path.exists(repo_dir_path): self._status_summary(stat, repo_dir_path) - def verbose_status(self, repo_dir_path): - """Display the raw repo status to the user. - - """ - if os.path.exists(repo_dir_path): - self._status_verbose(repo_dir_path) - # ---------------------------------------------------------------- # # Internal work functions # # ---------------------------------------------------------------- - def _clone_repo(self, base_dir_path, repo_dir_name): + def _clone_repo(self, base_dir_path, repo_dir_name, verbosity): """Prepare to execute the clone by managing directory location """ cwd = os.getcwd() os.chdir(base_dir_path) - self._git_clone(self._url, repo_dir_name) + self._git_clone(self._url, repo_dir_name, verbosity) os.chdir(cwd) def _current_ref_from_branch_command(self, git_output): @@ -163,6 +157,7 @@ def _current_ref_from_branch_command(self, git_output): current_ref = match.group(1) except BaseException: msg = 'DEV_ERROR: regex to detect tracking branch failed.' + fatal_error(msg) else: # assumed local branch current_ref = ref.split()[1] @@ -214,25 +209,31 @@ def compare_refs(current_ref, expected_ref): cwd = os.getcwd() os.chdir(repo_dir_path) + git_output = self._git_branch_vv() current_ref = self._current_ref_from_branch_command(git_output) - if current_ref == EMPTY_STR: - stat.sync_state = ExternalStatus.UNKNOWN - elif self._branch: + + if self._branch: if self._url == LOCAL_PATH_INDICATOR: expected_ref = self._branch - stat.sync_state = compare_refs(current_ref, expected_ref) else: remote_name = self._determine_remote_name() if not remote_name: # git doesn't know about this remote. by definition - # this is a modefied state. - stat.sync_state = ExternalStatus.MODEL_MODIFIED + # this is a modified state. + expected_ref = "unknown_remote/{0}".format(self._branch) else: expected_ref = "{0}/{1}".format(remote_name, self._branch) - stat.sync_state = compare_refs(current_ref, expected_ref) else: - stat.sync_state = compare_refs(current_ref, self._tag) + expected_ref = self._tag + + stat.sync_state = compare_refs(current_ref, expected_ref) + if current_ref == EMPTY_STR: + stat.sync_state = ExternalStatus.UNKNOWN + + stat.current_version = current_ref + stat.expected_version = expected_ref + os.chdir(cwd) def _determine_remote_name(self): @@ -305,19 +306,19 @@ def _create_remote_name(self): remote_name = "{0}_{1}".format(base_name, repo_name) return remote_name - def _checkout_ref(self, repo_dir): + def _checkout_ref(self, repo_dir, verbosity): """Checkout the user supplied reference """ # import pdb; pdb.set_trace() cwd = os.getcwd() os.chdir(repo_dir) if self._url.strip() == LOCAL_PATH_INDICATOR: - self._checkout_local_ref() + self._checkout_local_ref(verbosity) else: - self._checkout_external_ref() + self._checkout_external_ref(verbosity) os.chdir(cwd) - def _checkout_local_ref(self): + def _checkout_local_ref(self, verbosity): """Checkout the reference considering the local repo only. Do not fetch any additional remotes or specify the remote when checkout out the ref. @@ -328,9 +329,9 @@ def _checkout_local_ref(self): else: ref = self._branch self._check_for_valid_ref(ref) - self._git_checkout_ref(ref) + self._git_checkout_ref(ref, verbosity) - def _checkout_external_ref(self): + def _checkout_external_ref(self, verbosity): """Checkout the reference from a remote repository """ remote_name = self._determine_remote_name() @@ -348,7 +349,7 @@ def _checkout_external_ref(self): ref = self._tag else: ref = '{0}/{1}'.format(remote_name, self._branch) - self._git_checkout_ref(ref) + self._git_checkout_ref(ref, verbosity) def _check_for_valid_ref(self, ref): """Try some basic sanity checks on the user supplied reference so we @@ -500,23 +501,16 @@ def _status_summary(self, stat, repo_dir_path): cwd = os.getcwd() os.chdir(repo_dir_path) git_output = self._git_status_porcelain_v1z() - os.chdir(cwd) is_dirty = self._status_v1z_is_dirty(git_output) if is_dirty: stat.clean_state = ExternalStatus.DIRTY else: stat.clean_state = ExternalStatus.STATUS_OK - def _status_verbose(self, repo_dir_path): - """Display raw git status output to the user - - """ - cwd = os.getcwd() - os.chdir(repo_dir_path) - git_output = self._git_status_verbose() + # Now save the verbose status output incase the user wants to + # see it. + stat.status_output = self._git_status_verbose() os.chdir(cwd) - log_process_output(git_output) - print(git_output) @staticmethod def _status_v1z_is_dirty(git_output): @@ -526,11 +520,13 @@ def _status_v1z_is_dirty(git_output): * modified files * missing files * added files - * untracked files * removed * renamed * unmerged + Whether untracked files are considered depends on how the status + command was run (i.e., whether it was run with the '-u' option). + NOTE: Based on the above definition, the porcelain status should be an empty string to be considered 'clean'. Of course this assumes we only get an empty string from an status @@ -606,11 +602,13 @@ def _git_revparse_commit(ref): def _git_status_porcelain_v1z(): """Run git status to obtain repository information. - The machine parable format that is guarenteed not to change + This is run with '--untracked=no' to ignore untracked files. + + The machine-portable format that is guaranteed not to change between git versions or *user configuration*. """ - cmd = ['git', 'status', '--porcelain', '-z'] + cmd = ['git', 'status', '--untracked-files=no', '--porcelain', '-z'] git_output = execute_subprocess(cmd, output_to_caller=True) return git_output @@ -636,10 +634,12 @@ def _git_remote_verbose(): # # ---------------------------------------------------------------- @staticmethod - def _git_clone(url, repo_dir_name): + def _git_clone(url, repo_dir_name, verbosity): """Run git clone for the side effect of creating a repository. """ cmd = ['git', 'clone', url, repo_dir_name] + if verbosity >= VERBOSITY_VERBOSE: + printlog(' {0}'.format(' '.join(cmd))) execute_subprocess(cmd) @staticmethod @@ -653,11 +653,11 @@ def _git_remote_add(name, url): def _git_fetch(remote_name): """Run the git fetch command to for the side effect of updating the repo """ - cmd = ['git', 'fetch', remote_name] + cmd = ['git', 'fetch', '--tags', remote_name] execute_subprocess(cmd) @staticmethod - def _git_checkout_ref(ref): + def _git_checkout_ref(ref, verbosity): """Run the git checkout command to for the side effect of updating the repo Param: ref is a reference to a local or remote object in the @@ -665,4 +665,6 @@ def _git_checkout_ref(ref): """ cmd = ['git', 'checkout', ref] + if verbosity >= VERBOSITY_VERBOSE: + printlog(' {0}'.format(' '.join(cmd))) execute_subprocess(cmd) diff --git a/manic/repository_svn.py b/manic/repository_svn.py index 27617ca0c9..b11d36e662 100644 --- a/manic/repository_svn.py +++ b/manic/repository_svn.py @@ -9,9 +9,10 @@ import re import xml.etree.ElementTree as ET +from .global_constants import EMPTY_STR, VERBOSITY_VERBOSE from .repository import Repository from .externals_status import ExternalStatus -from .utils import fatal_error, log_process_output +from .utils import fatal_error, indent_string, printlog from .utils import execute_subprocess @@ -54,7 +55,7 @@ def __init__(self, component_name, repo): # Public API, defined by Repository # # ---------------------------------------------------------------- - def checkout(self, base_dir_path, repo_dir_name): + def checkout(self, base_dir_path, repo_dir_name, verbosity): """Checkout or update the working copy If the repo destination directory exists, switch the sandbox to @@ -68,10 +69,15 @@ def checkout(self, base_dir_path, repo_dir_name): if os.path.exists(repo_dir_path): cwd = os.getcwd() os.chdir(repo_dir_path) - self._svn_switch(self._url) + self._svn_switch(self._url, verbosity) + # svn switch can lead to a conflict state, but it gives a + # return code of 0. So now we need to make sure that we're + # in a clean (non-conflict) state. + self._abort_if_dirty(repo_dir_path, + "Expected clean state following switch") os.chdir(cwd) else: - self._svn_checkout(self._url, repo_dir_path) + self._svn_checkout(self._url, repo_dir_path, verbosity) def status(self, stat, repo_dir_path): """ @@ -80,14 +86,6 @@ def status(self, stat, repo_dir_path): self._check_sync(stat, repo_dir_path) if os.path.exists(repo_dir_path): self._status_summary(stat, repo_dir_path) - return stat - - def verbose_status(self, repo_dir_path): - """Display the raw repo status to the user. - - """ - if os.path.exists(repo_dir_path): - self._status_verbose(repo_dir_path) # ---------------------------------------------------------------- # @@ -110,7 +108,40 @@ def _check_sync(self, stat, repo_dir_path): # directory removed or incomplete checkout? stat.sync_state = ExternalStatus.UNKNOWN else: - stat.sync_state = self._check_url(svn_output, self._url) + stat.sync_state, stat.current_version = \ + self._check_url(svn_output, self._url) + stat.expected_version = '/'.join(self._url.split('/')[3:]) + + def _abort_if_dirty(self, repo_dir_path, message): + """Check if the repo is in a dirty state; if so, abort with a + helpful message. + + """ + + stat = ExternalStatus() + self._status_summary(stat, repo_dir_path) + if stat.clean_state != ExternalStatus.STATUS_OK: + status = self._svn_status_verbose(repo_dir_path) + status = indent_string(status, 4) + errmsg = """In directory + {cwd} + +svn status now shows: +{status} + +ERROR: {message} + +One possible cause of this problem is that there may have been untracked +files in your working directory that had the same name as tracked files +in the new revision. + +To recover: Clean up the above directory (resolving conflicts, etc.), +then rerun checkout_externals. +""".format(cwd=repo_dir_path, + message=message, + status=status) + + fatal_error(errmsg) @staticmethod def _check_url(svn_output, expected_url): @@ -129,7 +160,13 @@ def _check_url(svn_output, expected_url): status = ExternalStatus.STATUS_OK else: status = ExternalStatus.MODEL_MODIFIED - return status + + if url: + current_version = '/'.join(url.split('/')[3:]) + else: + current_version = EMPTY_STR + + return status, current_version def _status_summary(self, stat, repo_dir_path): """Report whether the svn repository is in-sync with the model @@ -143,13 +180,9 @@ def _status_summary(self, stat, repo_dir_path): else: stat.clean_state = ExternalStatus.STATUS_OK - def _status_verbose(self, repo_dir_path): - """Display the raw svn status output to the user. - - """ - svn_output = self._svn_status_verbose(repo_dir_path) - log_process_output(svn_output) - print(svn_output) + # Now save the verbose status output incase the user wants to + # see it. + stat.status_output = self._svn_status_verbose(repo_dir_path) @staticmethod def xml_status_is_dirty(svn_output): @@ -160,13 +193,15 @@ def xml_status_is_dirty(svn_output): * added files * deleted files * missing files - * unversioned files - The only acceptable state returned from svn is 'external' + Unversioned files do not affect the clean/dirty status. + + 'external' is also an acceptable state """ # pylint: disable=invalid-name SVN_EXTERNAL = 'external' + SVN_UNVERSIONED = 'unversioned' # pylint: enable=invalid-name is_dirty = False @@ -176,8 +211,13 @@ def xml_status_is_dirty(svn_output): for entry in entries: status = entry.find('./wc-status') item = status.get('item') - if item != SVN_EXTERNAL: + if item == SVN_EXTERNAL: + continue + if item == SVN_UNVERSIONED: + continue + else: is_dirty = True + break return is_dirty # ---------------------------------------------------------------- @@ -216,17 +256,21 @@ def _svn_status_xml(repo_dir_path): # # ---------------------------------------------------------------- @staticmethod - def _svn_checkout(url, repo_dir_path): + def _svn_checkout(url, repo_dir_path, verbosity): """ Checkout a subversion repository (repo_url) to checkout_dir. """ cmd = ['svn', 'checkout', url, repo_dir_path] + if verbosity >= VERBOSITY_VERBOSE: + printlog(' {0}'.format(' '.join(cmd))) execute_subprocess(cmd) @staticmethod - def _svn_switch(url): + def _svn_switch(url, verbosity): """ Switch branches for in an svn sandbox """ cmd = ['svn', 'switch', url] + if verbosity >= VERBOSITY_VERBOSE: + printlog(' {0}'.format(' '.join(cmd))) execute_subprocess(cmd) diff --git a/manic/sourcetree.py b/manic/sourcetree.py index 20d124ea74..1f2d5a59e4 100644 --- a/manic/sourcetree.py +++ b/manic/sourcetree.py @@ -14,6 +14,7 @@ from .externals_status import ExternalStatus from .utils import fatal_error, printlog from .global_constants import EMPTY_STR, LOCAL_PATH_INDICATOR +from .global_constants import VERBOSITY_VERBOSE class _External(object): @@ -41,6 +42,7 @@ def __init__(self, root_dir, name, ext_description): self._repo = None self._externals = EMPTY_STR self._externals_sourcetree = None + self._stat = ExternalStatus() # Parse the sub-elements # _path : local path relative to the containing source tree @@ -85,29 +87,37 @@ def status(self): If load_all is True, also load all of the the externals sub-externals. """ - stat = ExternalStatus() - stat.path = self.get_local_path() + self._stat.path = self.get_local_path() if not self._required: - stat.source_type = ExternalStatus.OPTIONAL + self._stat.source_type = ExternalStatus.OPTIONAL elif self._local_path == LOCAL_PATH_INDICATOR: # LOCAL_PATH_INDICATOR, '.' paths, are standalone # component directories that are not managed by # checkout_externals. - stat.source_type = ExternalStatus.STANDALONE + self._stat.source_type = ExternalStatus.STANDALONE else: # managed by checkout_externals - stat.source_type = ExternalStatus.MANAGED + self._stat.source_type = ExternalStatus.MANAGED ext_stats = {} if not os.path.exists(self._repo_dir_path): - stat.sync_state = ExternalStatus.EMPTY + self._stat.sync_state = ExternalStatus.EMPTY msg = ('status check: repository directory for "{0}" does not ' 'exist.'.format(self._name)) logging.info(msg) + self._stat.current_version = 'not checked out' + # NOTE(bja, 2018-01) directory doesn't exist, so we cannot + # use repo to determine the expected version. We just take + # a best-guess based on the assumption that only tag or + # branch should be set, but not both. + if not self._repo: + self._stat.expected_version = 'unknown' + else: + self._stat.expected_version = self._repo.tag() + self._repo.branch() else: if self._repo: - self._repo.status(stat, self._repo_dir_path) + self._repo.status(self._stat, self._repo_dir_path) if self._externals and self._externals_sourcetree: # we expect externals and they exist @@ -124,30 +134,14 @@ def status(self): if self._local_path != LOCAL_PATH_INDICATOR: # store the stats under tha local_path, not comp name so # it will be sorted correctly - all_stats[stat.path] = stat + all_stats[self._stat.path] = self._stat if ext_stats: all_stats.update(ext_stats) return all_stats - def verbose_status(self): - """Display the verbose status to the user. This is just the raw output - from the repository 'status' command. - - """ - if not os.path.exists(self._repo_dir_path): - msg = ('status check: repository directory for "{0}" does not ' - 'exist!'.format(self._name)) - logging.info(msg) - else: - cwd = os.getcwd() - os.chdir(self._repo_dir_path) - if self._repo: - self._repo.verbose_status(self._repo_dir_path) - os.chdir(cwd) - - def checkout(self, load_all): + def checkout(self, verbosity, load_all): """ If the repo destination directory exists, ensure it is correct (from correct URL, correct branch or tag), and possibly update the external. @@ -170,16 +164,32 @@ def checkout(self, load_all): self._base_dir_path) fatal_error(msg) - if self._repo: - self._repo.checkout(self._base_dir_path, self._repo_dir_name) + if self._stat.source_type != ExternalStatus.STANDALONE: + if verbosity >= VERBOSITY_VERBOSE: + # NOTE(bja, 2018-01) probably do not want to pass + # verbosity in this case, because if (verbosity == + # VERBOSITY_DUMP), then the previous status output would + # also be dumped, adding noise to the output. + self._stat.log_status_message(VERBOSITY_VERBOSE) - def checkout_externals(self, load_all): + if self._repo: + if self._stat.sync_state == ExternalStatus.STATUS_OK: + # If we're already in sync, avoid showing verbose output + # from the checkout command, unless the verbosity level + # is 2 or more. + checkout_verbosity = verbosity - 1 + else: + checkout_verbosity = verbosity + self._repo.checkout(self._base_dir_path, + self._repo_dir_name, checkout_verbosity) + + def checkout_externals(self, verbosity, load_all): """Checkout the sub-externals for this object """ if self._externals: if not self._externals_sourcetree: self._create_externals_sourcetree() - self._externals_sourcetree.checkout(load_all) + self._externals_sourcetree.checkout(verbosity, load_all) def _create_externals_sourcetree(self): """ @@ -259,16 +269,7 @@ def status(self, relative_path_base=LOCAL_PATH_INDICATOR): return summary - def verbose_status(self): - """Display verbose status to the user. This is just the raw output of - the git and svn status commands. - - """ - load_comps = self._all_components.keys() - for comp in load_comps: - self._all_components[comp].verbose_status() - - def checkout(self, load_all, load_comp=None): + def checkout(self, verbosity, load_all, load_comp=None): """ Checkout or update indicated components into the the configured subdirs. @@ -277,7 +278,11 @@ def checkout(self, load_all, load_comp=None): If load_all is False, load_comp is an optional set of components to load. If load_all is True and load_comp is None, only load the required externals. """ - printlog('Checking out externals: ', end='') + if verbosity >= VERBOSITY_VERBOSE: + printlog('Checking out externals: ') + else: + printlog('Checking out externals: ', end='') + if load_all: load_comps = self._all_components.keys() elif load_comp is not None: @@ -287,10 +292,15 @@ def checkout(self, load_all, load_comp=None): # checkout the primary externals for comp in load_comps: - printlog('{0}, '.format(comp), end='') - self._all_components[comp].checkout(load_all) + if verbosity < VERBOSITY_VERBOSE: + printlog('{0}, '.format(comp), end='') + else: + # verbose output handled by the _External object, just + # output a newline + printlog(EMPTY_STR) + self._all_components[comp].checkout(verbosity, load_all) printlog('') # now give each external an opportunitity to checkout it's externals. for comp in load_comps: - self._all_components[comp].checkout_externals(load_all) + self._all_components[comp].checkout_externals(verbosity, load_all) diff --git a/manic/utils.py b/manic/utils.py index c63abcba0d..04f037fd70 100644 --- a/manic/utils.py +++ b/manic/utils.py @@ -10,7 +10,6 @@ import logging import os -import string import subprocess import sys @@ -18,7 +17,7 @@ # --------------------------------------------------------------------- # -# screen and logging output +# screen and logging output and functions to massage text for output # # --------------------------------------------------------------------- @@ -48,11 +47,57 @@ def printlog(msg, **kwargs): sys.stdout.flush() +def last_n_lines(the_string, n_lines, truncation_message=None): + """Returns the last n lines of the given string + + Args: + the_string: str + n_lines: int + truncation_message: str, optional + + Returns a string containing the last n lines of the_string + + If truncation_message is provided, the returned string begins with + the given message if and only if the string is greater than n lines + to begin with. + """ + + lines = the_string.splitlines(True) + if len(lines) <= n_lines: + return the_string + else: + lines_subset = lines[-n_lines:] + str_truncated = ''.join(lines_subset) + if truncation_message: + str_truncated = truncation_message + '\n' + str_truncated + return str_truncated + + +def indent_string(the_string, indent_level): + """Indents the given string by a given number of spaces + + Args: + the_string: str + indent_level: int + + Returns a new string that is the same as the_string, except that + each line is indented by 'indent_level' spaces. + + In python3, this can be done with textwrap.indent. + """ + + lines = the_string.splitlines(True) + padding = ' ' * indent_level + lines_indented = [padding + line for line in lines] + return ''.join(lines_indented) + # --------------------------------------------------------------------- # # error handling # # --------------------------------------------------------------------- + + def fatal_error(message): """ Error output function @@ -199,10 +244,12 @@ def execute_subprocess(commands, status_to_caller=False, # responsibility determine if an error occurred and handle it # appropriately. if not return_to_caller: + msg_context = ('Process did not run successfully; ' + 'returned status {0}'.format(error.returncode)) msg = failed_command_msg( - 'Called process did not run successfully.\n' - 'Returned status: {0}'.format(error.returncode), - commands) + msg_context, + commands, + output=error.output) logging.error(error) logging.error(msg) log_process_output(error.output) @@ -221,17 +268,33 @@ def execute_subprocess(commands, status_to_caller=False, return ret_value -def failed_command_msg(msg_context, command): +def failed_command_msg(msg_context, command, output=None): """Template for consistent error messages from subprocess calls. + + If 'output' is given, it should provide the output from the failed + command """ - error_msg = string.Template("""$context -Failed command: - $command -Please check the log file "$log" for more details.""") - values = { - 'context': msg_context, - 'command': ' '.join(command), - 'log': LOG_FILE_NAME, - } - msg = error_msg.substitute(values) - return msg + + if output: + output_truncated = last_n_lines(output, 20, + truncation_message='[... Output truncated for brevity ...]') + errmsg = ('Failed with output:\n' + + indent_string(output_truncated, 4) + + '\nERROR: ') + else: + errmsg = '' + + command_str = ' '.join(command) + errmsg += """In directory + {cwd} +{context}: + {command} +""".format(cwd=os.getcwd(), context=msg_context, command=command_str) + + if output: + errmsg += 'See above for output from failed command.\n' + + errmsg += 'Please check the log file {log} for more details.'.format( + log=LOG_FILE_NAME) + + return errmsg diff --git a/test/Makefile b/test/Makefile index 6acb8194a9..0507597ab3 100644 --- a/test/Makefile +++ b/test/Makefile @@ -85,9 +85,12 @@ style : FORCE $(AUTOPEP8) $(AUTOPEP8_ARGS) --recursive $(SRC) $(TEST_DIR)/test_*.py .PHONY : lint -lint : style +lint : FORCE $(PYLINT) $(PYLINT_ARGS) $(SRC) $(TEST_DIR)/test_*.py +.PHONY : stylint +stylint : style lint + .PHONY : coverage coverage : FORCE $(PYPATH) $(COVERAGE) erase diff --git a/test/repos/simple-ext-fork.git/objects/24/4386e788c9bc608613e127a329c742450a60e4 b/test/repos/simple-ext-fork.git/objects/24/4386e788c9bc608613e127a329c742450a60e4 new file mode 100644 index 0000000000000000000000000000000000000000..b6284f8413d5d18a7a3efc746b340b0f6e877452 GIT binary patch literal 164 zcmV;V09*ff0hNwR4gw(%1zmHBzJP`X8ivFe^(K589hJclCSKq12<~1gsme>O)_H(Z zem6QZm^^8RnmiI`ubHzgrPye+FKRN0H9F;O5%17>8Q`NMJ?ehWT|!t)2i0Np3Z=u$N9svC-|`;J-!jY5fUp SfzGuJhQeX2oy8Y4sYkDN{z{Sn literal 0 HcmV?d00001 diff --git a/test/repos/simple-ext-fork.git/objects/4d/837135915ed93eed6fff6b439f284ce317296f b/test/repos/simple-ext-fork.git/objects/4d/837135915ed93eed6fff6b439f284ce317296f new file mode 100644 index 0000000000000000000000000000000000000000..7f3bcb88c4de3df16345072464740e276036c399 GIT binary patch literal 89 zcmV-f0H*(V0V^p=O;s>AV=y!@Ff%bx&`ZxO$xP47FG^)_lzn%LNPZurX=iP=VtPL2T>`g? zkh3=;83|{%kTn0{lH8#Nev_`XVPmImRbRpwOIgehnBL{IWwXg literal 0 HcmV?d00001 diff --git a/test/repos/simple-ext-fork.git/objects/a4/2fe9144f5707bc1e9515ce1b44681f7aba6f95 b/test/repos/simple-ext-fork.git/objects/a4/2fe9144f5707bc1e9515ce1b44681f7aba6f95 new file mode 100644 index 0000000000..d8ba654548 --- /dev/null +++ b/test/repos/simple-ext-fork.git/objects/a4/2fe9144f5707bc1e9515ce1b44681f7aba6f95 @@ -0,0 +1,3 @@ +xUÌ[ +Â0…aŸ³ŠÙ@%Is+ˆ¨;™¤c/˜DÂq÷VðÅ×Ã>Æ ”w‡WJ Ú˜>8ò!¤!&'ƒS=)í±×CòF+ÑI2‚ßO‚Ts^Xðn`Ä2ÖBcw'ä­Ñw¨Á +\ËØNqÝ›F—)ãò8îç3(«¬Œ2:é¥ÿü0x-<×!6,i ª9 \ No newline at end of file diff --git a/test/repos/simple-ext-fork.git/refs/tags/abandoned-feature b/test/repos/simple-ext-fork.git/refs/tags/abandoned-feature new file mode 100644 index 0000000000..8a18bf08e9 --- /dev/null +++ b/test/repos/simple-ext-fork.git/refs/tags/abandoned-feature @@ -0,0 +1 @@ +a42fe9144f5707bc1e9515ce1b44681f7aba6f95 diff --git a/test/test_sys_checkout.py b/test/test_sys_checkout.py index d988d83c87..adf6470cdf 100644 --- a/test/test_sys_checkout.py +++ b/test/test_sys_checkout.py @@ -34,6 +34,7 @@ from __future__ import unicode_literals from __future__ import print_function +import logging import os import os.path import shutil @@ -44,7 +45,8 @@ from manic.externals_status import ExternalStatus from manic.repository_git import GitRepository from manic.utils import printlog, execute_subprocess -from manic.global_constants import LOCAL_PATH_INDICATOR +from manic.global_constants import LOCAL_PATH_INDICATOR, VERBOSITY_DEFAULT +from manic.global_constants import LOG_FILE_NAME from manic import checkout # ConfigParser was renamed in python2 to configparser. In python2, @@ -88,6 +90,10 @@ def setUpModule(): # pylint: disable=C0103 """Setup for all tests in this module. It is called once per module! """ + logging.basicConfig(filename=LOG_FILE_NAME, + format='%(levelname)s : %(asctime)s : %(message)s', + datefmt='%Y-%m-%d %H:%M:%S', + level=logging.DEBUG) repo_root = os.path.join(os.getcwd(), TMP_REPO_DIR_NAME) repo_root = os.path.abspath(repo_root) # delete if it exists from previous runs @@ -486,18 +492,31 @@ def setup_test_repo(self, parent_repo_name): dest_dir = os.path.join(os.environ[MANIC_TEST_TMP_REPO_ROOT], test_dir_name) # pylint: disable=W0212 - GitRepository._git_clone(parent_repo_dir, dest_dir) + GitRepository._git_clone(parent_repo_dir, dest_dir, VERBOSITY_DEFAULT) return dest_dir @staticmethod - def _add_file_to_repo(under_test_dir, filename): + def _add_file_to_repo(under_test_dir, filename, tracked): """Add a file to the repository so we can put it into a dirty state """ - dirty_path = os.path.join(under_test_dir, filename) - with open(dirty_path, 'w') as tmp: + cwd = os.getcwd() + os.chdir(under_test_dir) + with open(filename, 'w') as tmp: tmp.write('Hello, world!') + if tracked: + # NOTE(bja, 2018-01) brittle hack to obtain repo dir and + # file name + path_data = filename.split('/') + repo_dir = os.path.join(path_data[0], path_data[1]) + os.chdir(repo_dir) + tracked_file = path_data[2] + cmd = ['git', 'add', tracked_file] + execute_subprocess(cmd) + + os.chdir(cwd) + @staticmethod def execute_cmd_in_dir(under_test_dir, args): """Extecute the checkout command in the appropriate repo dir with the @@ -785,7 +804,9 @@ def test_container_simple_dirty(self): self._check_container_simple_required_checkout(overall, tree) # add a file to the repo - self._add_file_to_repo(under_test_dir, 'externals/simp_tag/tmp.txt') + tracked = True + self._add_file_to_repo(under_test_dir, 'externals/simp_tag/tmp.txt', + tracked) # checkout: pre-checkout status should be dirty, did not # modify working copy. @@ -798,6 +819,35 @@ def test_container_simple_dirty(self): self.status_args) self._check_container_simple_optional_st_dirty(overall, tree) + def test_container_simple_untracked(self): + """Verify that a container with simple subrepos and a untracked files + is not considered 'dirty' and will attempt an update. + + """ + under_test_dir = self.setup_test_repo(CONTAINER_REPO_NAME) + self._generator.container_simple_required(under_test_dir) + + # checkout + overall, tree = self.execute_cmd_in_dir(under_test_dir, + self.checkout_args) + self._check_container_simple_required_checkout(overall, tree) + + # add a file to the repo + tracked = False + self._add_file_to_repo(under_test_dir, 'externals/simp_tag/tmp.txt', + tracked) + + # checkout: pre-checkout status should be clean, ignoring the + # untracked file. + overall, tree = self.execute_cmd_in_dir(under_test_dir, + self.checkout_args) + self._check_container_simple_required_post_checkout(overall, tree) + + # verify status is still clean + overall, tree = self.execute_cmd_in_dir(under_test_dir, + self.status_args) + self._check_container_simple_required_post_checkout(overall, tree) + def test_container_remote_branch(self): """Verify that a container with remote branch change works @@ -831,9 +881,11 @@ def test_container_remote_branch(self): self.status_args) self._check_container_simple_required_post_checkout(overall, tree) - def test_container_remote_tag(self): + def test_container_remote_tag_same_branch(self): """Verify that a container with remote tag change works. The new tag - should not be in the original repo, only the new remote fork. + should not be in the original repo, only the new remote + fork. The new tag is automatically fetched because it is on + the branch. """ # create repo @@ -866,6 +918,44 @@ def test_container_remote_tag(self): self.status_args) self._check_container_simple_required_post_checkout(overall, tree) + def test_container_remote_tag_fetch_all(self): + """Verify that a container with remote tag change works. The new tag + should not be in the original repo, only the new remote + fork. It should also not be on a branch that will be fetch, + and therefore not fetched by default with 'git fetch'. It will + only be retreived by 'git fetch --tags' + + """ + # create repo + under_test_dir = self.setup_test_repo(CONTAINER_REPO_NAME) + self._generator.container_simple_required(under_test_dir) + + # checkout + overall, tree = self.execute_cmd_in_dir(under_test_dir, + self.checkout_args) + self._check_container_simple_required_checkout(overall, tree) + + # update the config file to point to a different remote with + # the tag instead of branch. Tag MUST NOT be in the original + # repo! + self._generator.update_tag(under_test_dir, 'simp_branch', + 'abandoned-feature', SIMPLE_FORK_NAME) + + # status of simp_branch should be out of sync + overall, tree = self.execute_cmd_in_dir(under_test_dir, + self.status_args) + self._check_container_simple_required_sb_modified(overall, tree) + + # checkout new externals + overall, tree = self.execute_cmd_in_dir(under_test_dir, + self.checkout_args) + self._check_container_simple_required_sb_modified(overall, tree) + + # status should be synced + overall, tree = self.execute_cmd_in_dir(under_test_dir, + self.status_args) + self._check_container_simple_required_post_checkout(overall, tree) + def test_container_preserve_dot(self): """Verify that after inital checkout, modifying an external git repo url to '.' and the current branch will leave it unchanged. @@ -1010,6 +1100,12 @@ def _check_container_simple_svn_sb_dirty_st_mod(self, overall, tree): self._check_svn_tag_modified(tree) self._check_svn_branch_dirty(tree) + def _check_container_simple_svn_sb_clean_st_mod(self, overall, tree): + self.assertEqual(overall, 0) + self._check_simple_tag_ok(tree) + self._check_svn_tag_modified(tree) + self._check_svn_branch_ok(tree) + @staticmethod def have_svn_access(): """Check if we have svn access so we can enable tests that use svn. @@ -1062,26 +1158,33 @@ def test_container_simple_svn(self): self.status_args) self._check_container_simple_svn_post_checkout(overall, tree) - # add a file to the repo + # add an untracked file to the repo + tracked = False self._add_file_to_repo(under_test_dir, - 'externals/svn_branch/tmp.txt') + 'externals/svn_branch/tmp.txt', tracked) + + # run a no-op checkout: pre-checkout status should be clean, + # ignoring the untracked file. + overall, tree = self.execute_cmd_in_dir(under_test_dir, + self.checkout_args) + self._check_container_simple_svn_post_checkout(overall, tree) # update description file to make the branch into a tag and - # trigger a modified status + # trigger a modified sync status self._generator.update_svn_branch(under_test_dir, 'svn_tag', 'tags/cesm2.0.beta07') - # checkout: pre-checkout status should be dirty and modified, - # did not modify working copy. + # checkout: pre-checkout status should be clean and modified, + # will modify working copy. overall, tree = self.execute_cmd_in_dir(under_test_dir, self.checkout_args) - self._check_container_simple_svn_sb_dirty_st_mod(overall, tree) + self._check_container_simple_svn_sb_clean_st_mod(overall, tree) - # verify status is still dirty and modified with verbose, last - # checkout did not modify working dir state. + # verify status is still clean and unmodified, last + # checkout modified the working dir state. overall, tree = self.execute_cmd_in_dir(under_test_dir, self.verbose_args) - self._check_container_simple_svn_sb_dirty_st_mod(overall, tree) + self._check_container_simple_svn_post_checkout(overall, tree) class TestSysCheckoutErrors(BaseTestSysCheckout): diff --git a/test/test_unit_repository_git.py b/test/test_unit_repository_git.py index 2060911ab4..44a1353b05 100644 --- a/test/test_unit_repository_git.py +++ b/test/test_unit_repository_git.py @@ -749,6 +749,7 @@ def test_re_tracking_slash(self): class TestGitStatusPorcelain(unittest.TestCase): """Test parsing of output from git status --porcelain=v1 -z """ + # pylint: disable=C0103 GIT_STATUS_PORCELAIN_V1_ALL = ( r' D INSTALL\0MM Makefile\0M README.md\0R cmakelists.txt\0' r'CMakeLists.txt\0D commit-message-template.txt\0A stuff.txt\0' diff --git a/test/test_unit_repository_svn.py b/test/test_unit_repository_svn.py index 19616d42d4..f49e9898b8 100644 --- a/test/test_unit_repository_svn.py +++ b/test/test_unit_repository_svn.py @@ -84,16 +84,20 @@ def test_check_url_same(self): """ svn_output = SVN_INFO_MOSART expected_url = self._repo.url() - result = self._repo._check_url(svn_output, expected_url) + result, current_version = \ + self._repo._check_url(svn_output, expected_url) self.assertEqual(result, ExternalStatus.STATUS_OK) + self.assertEqual(current_version, 'mosart/trunk_tags/mosart1_0_26') def test_check_url_different(self): """Test that we correctly reject an incorrect URL. """ svn_output = SVN_INFO_CISM expected_url = self._repo.url() - result = self._repo._check_url(svn_output, expected_url) + result, current_version = \ + self._repo._check_url(svn_output, expected_url) self.assertEqual(result, ExternalStatus.MODEL_MODIFIED) + self.assertEqual(current_version, 'glc/trunk_tags/cism2_1_37') def test_check_url_none(self): """Test that we can handle an empty string for output, e.g. not an svn @@ -102,8 +106,10 @@ def test_check_url_none(self): """ svn_output = EMPTY_STR expected_url = self._repo.url() - result = self._repo._check_url(svn_output, expected_url) + result, current_version = \ + self._repo._check_url(svn_output, expected_url) self.assertEqual(result, ExternalStatus.UNKNOWN) + self.assertEqual(current_version, '') class TestSvnRepositoryCheckSync(unittest.TestCase): @@ -415,6 +421,13 @@ class TestSVNStatusXML(unittest.TestCase): props="none"> + + + + ''' @@ -447,13 +460,14 @@ def test_xml_status_dirty_deleted(self): self.assertTrue(is_dirty) def test_xml_status_dirty_unversion(self): - """Verify that svn status output is consindered dirty when there is a - unversioned file. + """Verify that svn status output ignores unversioned files when making + the clean/dirty decision. + """ svn_output = self.SVN_STATUS_XML_DIRTY_UNVERSION is_dirty = SvnRepository.xml_status_is_dirty( svn_output) - self.assertTrue(is_dirty) + self.assertFalse(is_dirty) def test_xml_status_dirty_added(self): """Verify that svn status output is consindered dirty when there is a @@ -476,7 +490,7 @@ def test_xml_status_dirty_all(self): def test_xml_status_dirty_clean(self): """Verify that svn status output is consindered clean when there are - no dirty files. + no 'dirty' files. This means accepting untracked and externals. """ svn_output = self.SVN_STATUS_XML_CLEAN diff --git a/test/test_unit_utils.py b/test/test_unit_utils.py index 53f486848c..c994e58ebe 100644 --- a/test/test_unit_utils.py +++ b/test/test_unit_utils.py @@ -14,6 +14,7 @@ import os import unittest +from manic.utils import last_n_lines, indent_string from manic.utils import str_to_bool, execute_subprocess from manic.utils import is_remote_url, split_remote_url, expand_local_url @@ -50,6 +51,77 @@ def test_exesub_except_stat_err(self): execute_subprocess(cmd, status_to_caller=False) +class TestLastNLines(unittest.TestCase): + """Test the last_n_lines function. + + """ + + def test_last_n_lines_short(self): + """With a message with <= n lines, result of last_n_lines should + just be the original message. + + """ + mystr = """three +line +string +""" + + mystr_truncated = last_n_lines( + mystr, 3, truncation_message='[truncated]') + self.assertEqual(mystr, mystr_truncated) + + def test_last_n_lines_long(self): + """With a message with > n lines, result of last_n_lines should + be a truncated string. + + """ + mystr = """a +big +five +line +string +""" + expected = """[truncated] +five +line +string +""" + + mystr_truncated = last_n_lines( + mystr, 3, truncation_message='[truncated]') + self.assertEqual(expected, mystr_truncated) + + +class TestIndentStr(unittest.TestCase): + """Test the indent_string function. + + """ + + def test_indent_string_singleline(self): + """Test the indent_string function with a single-line string + + """ + mystr = 'foo' + result = indent_string(mystr, 4) + expected = ' foo' + self.assertEqual(expected, result) + + def test_indent_string_multiline(self): + """Test the indent_string function with a multi-line string + + """ + mystr = """hello +hi +goodbye +""" + result = indent_string(mystr, 2) + expected = """ hello + hi + goodbye +""" + self.assertEqual(expected, result) + + class TestStrToBool(unittest.TestCase): """Test the string to boolean conversion routine. From 4a2838b20fdce493529f95d995622d102e0e7bb2 Mon Sep 17 00:00:00 2001 From: Bill Sacks Date: Mon, 22 Jan 2018 10:26:19 -0700 Subject: [PATCH 15/31] Add some files to .gitignore These files are generated by builds --- .gitignore | 1 + src/dyn_subgrid/.gitignore | 4 ++++ src/unit_test_shr/.gitignore | 2 ++ src/unit_test_stubs/main/.gitignore | 3 +++ src/unit_test_stubs/utils/.gitignore | 2 ++ src/utils/.gitignore | 2 ++ 6 files changed, 14 insertions(+) create mode 100644 src/dyn_subgrid/.gitignore create mode 100644 src/unit_test_shr/.gitignore create mode 100644 src/unit_test_stubs/main/.gitignore create mode 100644 src/unit_test_stubs/utils/.gitignore create mode 100644 src/utils/.gitignore diff --git a/.gitignore b/.gitignore index 61903b95d5..1806df3561 100644 --- a/.gitignore +++ b/.gitignore @@ -33,4 +33,5 @@ CMakeFiles/ core.* *.gz *.log !run.log +*.pyc cime_config/buildnmlc diff --git a/src/dyn_subgrid/.gitignore b/src/dyn_subgrid/.gitignore new file mode 100644 index 0000000000..52f24730b5 --- /dev/null +++ b/src/dyn_subgrid/.gitignore @@ -0,0 +1,4 @@ +# files generated by genf90 in the unit test build +dynVarMod.F90 +dynVarTimeInterpMod.F90 +dynVarTimeUninterpMod.F90 diff --git a/src/unit_test_shr/.gitignore b/src/unit_test_shr/.gitignore new file mode 100644 index 0000000000..70bacd9621 --- /dev/null +++ b/src/unit_test_shr/.gitignore @@ -0,0 +1,2 @@ +# files generated by genf90 in the unit test build +unittestArrayMod.F90 diff --git a/src/unit_test_stubs/main/.gitignore b/src/unit_test_stubs/main/.gitignore new file mode 100644 index 0000000000..a457979e10 --- /dev/null +++ b/src/unit_test_stubs/main/.gitignore @@ -0,0 +1,3 @@ +# files generated by genf90 in the unit test build +ncdio_pio_fake.F90 +ncdio_var.F90 diff --git a/src/unit_test_stubs/utils/.gitignore b/src/unit_test_stubs/utils/.gitignore new file mode 100644 index 0000000000..9e11aa7f64 --- /dev/null +++ b/src/unit_test_stubs/utils/.gitignore @@ -0,0 +1,2 @@ +# files generated by genf90 in the unit test build +restUtilMod_stub.F90 diff --git a/src/utils/.gitignore b/src/utils/.gitignore new file mode 100644 index 0000000000..79fcba6e26 --- /dev/null +++ b/src/utils/.gitignore @@ -0,0 +1,2 @@ +# files generated by genf90 in the unit test build +array_utils.F90 From a3917e348d9cc3ce72059b5b05ee5160bccb6229 Mon Sep 17 00:00:00 2001 From: Bill Sacks Date: Mon, 22 Jan 2018 15:54:38 -0700 Subject: [PATCH 16/31] Point to new cime tag This cime tag merges together Erik's changes with mine --- CESM.cfg | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/CESM.cfg b/CESM.cfg index a2f171c614..acfb4f26f7 100644 --- a/CESM.cfg +++ b/CESM.cfg @@ -30,7 +30,7 @@ required = True local_path = cime protocol = git repo_url = https://github.com/CESM-Development/cime -tag = clm4518/n02/cime5.4.0-alpha.03 +tag = clm4518/n03/cime5.4.0-alpha.03 required = True [externals_description] From e25d5aab69342d225a4b0a34a24100581a00cf76 Mon Sep 17 00:00:00 2001 From: Ben Andre Date: Mon, 22 Jan 2018 14:58:44 -0700 Subject: [PATCH 17/31] Rename ctsm to clm in standalone externals configuration file. Rename 'ctsm' back to 'clm' in the standalone externals configuration file for consistency with cesm. Will be changed back to ctsm in a future rebranding. Testing: manually run checkout_externals. --- CESM.cfg | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/CESM.cfg b/CESM.cfg index acfb4f26f7..c960bd2635 100644 --- a/CESM.cfg +++ b/CESM.cfg @@ -1,4 +1,4 @@ -[ctsm] +[clm] local_path = . protocol = externals_only externals = CLM.cfg From f0e575e7d77f7aa8c89c9f892ed665d6cf412079 Mon Sep 17 00:00:00 2001 From: Ben Andre Date: Mon, 22 Jan 2018 15:01:54 -0700 Subject: [PATCH 18/31] Update copyright date in license file. Testing: none --- LICENSE | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/LICENSE b/LICENSE index cb8a9e9f5a..2ba2f9c2d8 100644 --- a/LICENSE +++ b/LICENSE @@ -1,4 +1,4 @@ -Copyright (c) 2005-2017, University Corporation for Atmospheric Research (UCAR) +Copyright (c) 2005-2018, University Corporation for Atmospheric Research (UCAR) All rights reserved. Developed by: From bde155a38e9ba2b9ba7c3a948316ab6d7aa49590 Mon Sep 17 00:00:00 2001 From: Ben Andre Date: Mon, 22 Jan 2018 15:17:55 -0700 Subject: [PATCH 19/31] Rename externals description files Rename the externals description files to correspond to the new default naming convention in an in manage_externals v0.8.0. Testing: status and checkout ok with 'checkout_externals --externals Externals.cfg' --- CESM.cfg => Externals.cfg | 2 +- CLM.cfg => Externals_CLM.cfg | 0 2 files changed, 1 insertion(+), 1 deletion(-) rename CESM.cfg => Externals.cfg (95%) rename CLM.cfg => Externals_CLM.cfg (100%) diff --git a/CESM.cfg b/Externals.cfg similarity index 95% rename from CESM.cfg rename to Externals.cfg index c960bd2635..0719004d6f 100644 --- a/CESM.cfg +++ b/Externals.cfg @@ -1,7 +1,7 @@ [clm] local_path = . protocol = externals_only -externals = CLM.cfg +externals = Externals_CLM.cfg required = True [cism] diff --git a/CLM.cfg b/Externals_CLM.cfg similarity index 100% rename from CLM.cfg rename to Externals_CLM.cfg From c843f07448cfe28badc439021a68b37d4add91cb Mon Sep 17 00:00:00 2001 From: Ben Andre Date: Mon, 22 Jan 2018 17:20:09 -0700 Subject: [PATCH 20/31] Squashed 'manage_externals/' changes from 6c3d4dec..5fc8709a 5fc8709a Change default externals description filename to Externals.cfg f56be929 Update copyright year in license git-subtree-dir: manage_externals git-subtree-split: 5fc8709a77f2408e29a59a1de750a9ef919675e4 --- LICENSE.txt | 2 +- README.md | 9 ++++++--- manic/checkout.py | 3 ++- 3 files changed, 9 insertions(+), 5 deletions(-) diff --git a/LICENSE.txt b/LICENSE.txt index 69d97201e3..665ee03fbc 100644 --- a/LICENSE.txt +++ b/LICENSE.txt @@ -1,4 +1,4 @@ -Copyright (c) 2017, University Corporation for Atmospheric Research (UCAR) +Copyright (c) 2017-2018, University Corporation for Atmospheric Research (UCAR) All rights reserved. Developed by: diff --git a/README.md b/README.md index a847b7a2cc..b699a26081 100644 --- a/README.md +++ b/README.md @@ -5,7 +5,7 @@ ``` usage: checkout_externals [-h] [-e [EXTERNALS]] [-o] [-S] [-v] [--backtrace] - [-d] + [-d] [--no-logging] checkout_externals manages checking out CESM externals from revision control based on a externals description file. By default only the required @@ -19,7 +19,8 @@ synchronize the working copy with the externals description. optional arguments: -h, --help show this help message and exit -e [EXTERNALS], --externals [EXTERNALS] - The externals description filename. Default: CESM.cfg. + The externals description filename. Default: + Externals.cfg. -o, --optional By default only the required externals are checked out. This flag will also checkout the optional externals. @@ -28,11 +29,13 @@ optional arguments: information is provided. Use verbose output to see details. -v, --verbose Output additional information to the screen and log - file. + file. This flag can be used up to two times, + increasing the verbosity level each time. --backtrace DEVELOPER: show exception backtraces as extra debugging output -d, --debug DEVELOPER: output additional debugging information to the screen and log file. + --no-logging DEVELOPER: disable logging. ``` NOTE: checkout_externals *MUST* be run from the root of the source tree it diff --git a/manic/checkout.py b/manic/checkout.py index be5a4864a4..42854e3d59 100755 --- a/manic/checkout.py +++ b/manic/checkout.py @@ -202,7 +202,8 @@ def commandline_arguments(args=None): # # user options # - parser.add_argument('-e', '--externals', nargs='?', default='CESM.cfg', + parser.add_argument('-e', '--externals', nargs='?', + default='Externals.cfg', help='The externals description filename. ' 'Default: %(default)s.') From 23d4df44db092ca8312452ad822f3cc1d03a2d85 Mon Sep 17 00:00:00 2001 From: Ben Andre Date: Mon, 22 Jan 2018 17:22:33 -0700 Subject: [PATCH 21/31] Update the externals documentation Update the externals documentation to describe manage_externals by copying the cesm readme file into readme_externals.rst. Testing: none --- README_EXTERNALS | 56 ------------- README_EXTERNALS.rst | 183 +++++++++++++++++++++++++++++++++++++++++++ 2 files changed, 183 insertions(+), 56 deletions(-) delete mode 100644 README_EXTERNALS create mode 100644 README_EXTERNALS.rst diff --git a/README_EXTERNALS b/README_EXTERNALS deleted file mode 100644 index 66afc744f6..0000000000 --- a/README_EXTERNALS +++ /dev/null @@ -1,56 +0,0 @@ -Some guidelines on working with externals in CLM: - -Also see: - - https://wiki.ucar.edu/display/ccsm/Creating+a+CLM+Tag - - https://wiki.ucar.edu/display/ccsm/Using+SVN+to+Work+with+CLM+Development+Branches - -Example taken from bulletin board forum for "Subversion Issues" in the -thread for "Introduction to Subversion"...(070208) - -Working with externals: - -checkout the HEAD of clm's trunk into working copy directory -> svn co $SVN/clm2/trunk clm_trunk_head_wc - -view the property set for clm's external definitions -> svn propget svn:externals clm_trunk_head_wc - -view revision, URL and other useful information specific to external files -> cd clm_trunk_head_wc/components/clm/src -> svn info main - -create new clm branch for mods required of clm -> svn copy $SVN/clm2/trunk_tags/ $SVN/clm2/branches/ -m "appropriate message" - -have external directories in working copy refer to new clm branch to make changes -> svn switch $SVN/clm2/branches//src/main main - ---make changes to clm files-- - -when satisfied with changes and testing, commit to HEAD of clm branch -> svn commit main -m "appropriate message" - -tag new version of clm branch - review naming conventions! -> svn copy $SVN/clm2/branches/ $SVN/clm2/branch_tags/_tags/ -m "appropriate message" - -have external directories in working copy refer to new clm tag -> svn switch $SVN/clm2/branch_tags/_tags//src/main main - -modify clm's property for external definitions in working copy -> vi clm_trunk_head_wc/SVN_EXTERNAL_DIRECTORIES - ---point definition to URL of new-tag-name-- - -set the property - don't forget the 'dot' at the end! -> svn propset svn:externals -F SVN_EXTERNAL_DIRECTORIES clm_trunk_head_wc - ---continue with other clm mods-- - -commit changes from working copy directory to HEAD of clm trunk - NOTE: a commit from here will *NOT* recurse to external directories -> cd clm_trunk_head_wc -> svn commit -m "appropriate message" - -tag new version of clm trunk -> svn copy $SVN/clm2/trunk $SVN/clm2/trunk_tags/ -m "appropriate message" diff --git a/README_EXTERNALS.rst b/README_EXTERNALS.rst new file mode 100644 index 0000000000..5d328619ff --- /dev/null +++ b/README_EXTERNALS.rst @@ -0,0 +1,183 @@ +================================== + The Community Earth System Model +================================== + +See the CESM web site for documentation and information: + +http://www.cesm.ucar.edu + +This repository provides tools for managing the external components that +make up a CESM tag - alpha, beta and release. CESM tag creation should +be coordinated through CSEG at NCAR. + +.. sectnum:: + +.. contents:: + +Obtaining the full model code and associated scripting infrastructure +===================================================================== + +CESM2.0 is now released via github. You will need some familiarity with git in order +to modify the code and commit these changes. However, to simply checkout and run the +code, no git knowledge is required other than what is documented in the following steps. + +To obtain the CESM2.0 code you need to do the following: + +#. Clone the repository. :: + + git clone https://github.com/escomp/cesm.git my_cesm_sandbox + + This will create a directory ``my_cesm_sandbox/`` in your current working directory. + +#. Go into the newly created CESM repository and determine what version of CESM you want. + To see what cesm tags are available, simply issue the **git tag** command. :: + + cd my_cesm_sandbox + git tag + +#. Do a git checkout of the tag you want. If you want to checkout cesm2.0.beta07, you would issue the following. :: + + git checkout cesm2.0.beta07 + + (It is normal and expected to get a message about being in 'detached + HEAD' state. For now you can ignore this, but it becomes important if + you want to make changes to your Externals.cfg file and commit those + changes to a branch.) + +#. Run the script **manage_externals/checkout_externals**. :: + + ./manage_externals/checkout_externals + + The **checkout_externals** script is a package manager that will + populate the cesm directory with the relevant versions of each of the + components along with the CIME infrastructure code. + +At this point you have a working version of CESM. + +To see full details of how to set up a case, compile and run, see the CIME documentation at http://esmci.github.io/cime/ . + +More details on checkout_externals +---------------------------------- + +The file **Externals.cfg** in your top-level CESM directory tells +**checkout_externals** which tag/branch of each component should be +brought in to generate your sandbox. (This file serves the same purpose +as SVN_EXTERNAL_DIRECTORIES when CESM was in a subversion repository.) + +NOTE: Just like svn externals, checkout_externals will always attempt +to make the working copy exactly match the externals description. If +you manually modify an external without updating Externals.cfg, e.g. switch +to a different tag, then rerunning checkout_externals will switch you +back to the external described in Externals.cfg. See below +documentation `Customizing your CESM sandbox`_ for more details. + +**You need to rerun checkout_externals whenever Externals.cfg has +changed** (unless you have already manually updated the relevant +external(s) to have the correct branch/tag checked out). Common times +when this is needed are: + +* After checking out a new CESM branch/tag + +* After merging some other CESM branch/tag into your currently + checked-out branch + +**checkout_externals** must be run from the root of the source +tree. For example, if you cloned CESM with:: + + git clone https://github.com/escomp/cesm.git my_cesm_sandbox + +then you must run **checkout_externals** from +``/path/to/my_cesm_sandbox``. + +To see more details of **checkout_externals**, issue :: + + ./manage_externals/checkout_externals --help + +Customizing your CESM sandbox +============================= + +There are several use cases to consider when you want to customize or modify your CESM sandbox. + +Switching to a different CESM tag +--------------------------------- + +If you have already checked out a tag and **HAVE NOT MADE ANY +MODIFICATIONS** it is simple to change your sandbox. Say that you +checked out cesm2.0.beta07 but really wanted to have cesm2.0.beta08; +you would simply do the following:: + + git checkout cesm2.0.beta08 + ./manage_externals/checkout_externals + +You should **not** use this method if you have made any source code +changes, or if you have any ongoing CESM cases that were created from +this sandbox. In these cases, it is often easiest to do a second **git +clone**. + +Pointing to a different version of a component +---------------------------------------------- + +Each entry in **Externals.cfg** has the following form (we use CAM as an +example below):: + + [cam] + tag = trunk_tags/cam5_4_143/components/cam + protocol = svn + repo_url = https://svn-ccsm-models.cgd.ucar.edu/cam1 + local_path = components/cam + required = True + +Each entry specifies either a tag or a branch. To point to a new tag: + +#. Modify the relevant entry/entries in **Externals.cfg** (e.g., changing + ``cam5_4_143`` to ``cam5_4_144`` above) + +#. Checkout the new component(s):: + + ./manage_externals/checkout_externals + +Keep in mind that changing individual components from a tag may result +in an invalid model (won't compile, won't run, not scientifically +meaningful) and is unsupported. + +Committing your change to Externals.cfg +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +After making this change, it's a good idea to commit the change in your +local CESM git repository. First create a CESM branch in your local +repository, then commit it. (Unlike with subversion, branches are stored +locally unless you explicitly push them up to github. Feel free to +create whatever local branches you'd like.) For example:: + + git checkout -b my_cesm_branch + git add Externals.cfg + git commit -m "Update CAM to cam5_4_144" + +Modifying a component +--------------------- + +If you'd like to modify a component via a branch and point to that +branch in your CESM sandbox, use the following procedure (again, using +CAM as an example): + +#. Create a CAM branch. Since CAM originates from a subversion + repository, you will first need to create a branch in that + repository. Let's assume you have created this branch and called it + **my_branch**. + +#. Update **Externals.cfg** to point to your branch. You can replace the + **tag** entry with a **branch** entry, as follows:: + + [cam] + branch = branches/my_branch/components/cam + protocol = svn + repo_url = https://svn-ccsm-models.cgd.ucar.edu/cam1 + local_path = components/cam + required = True + +#. Checkout your branch:: + + ./manage_externals/checkout_externals + +It's a good idea to commit your **Externals.cfg** file changes. See the above +documentation, `Committing your change to Externals.cfg`_. From bbdbd94a50c35259d0f6c12205572f989f2a1800 Mon Sep 17 00:00:00 2001 From: Bill Sacks Date: Tue, 23 Jan 2018 10:28:18 -0700 Subject: [PATCH 22/31] Update unit testing instructions --- src/README.unit_testing | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/src/README.unit_testing b/src/README.unit_testing index 60dd187364..de503ca8d3 100644 --- a/src/README.unit_testing +++ b/src/README.unit_testing @@ -1,6 +1,4 @@ -# To run all CLM unit tests on caldera, run the following command: -# -# Note that, on yellowstone/caldera, this requires 'module load all-python-libs' +# To run all CTSM unit tests on caldera, run the following command: # # The creation of a temporary directory ensures that you are doing a completely # clean build of the unit tests. (The use of the --clean flag to run_tests.py @@ -8,5 +6,8 @@ # rerunning the tests after an incremental change, you can instead use an # existing build directory. -../../../cime/scripts/fortran_unit_testing/run_tests.py --build-dir `mktemp -d --tmpdir=. unit_tests.XXXXXXXX` +# From a standalone CTSM checkout: +../cime/scripts/fortran_unit_testing/run_tests.py --build-dir `mktemp -d --tmpdir=. unit_tests.XXXXXXXX` +# If you are within a full CESM checkout, you would instead do: +# ../../../cime/scripts/fortran_unit_testing/run_tests.py --build-dir `mktemp -d --tmpdir=. unit_tests.XXXXXXXX` From cdf5835a31e3831bb628f868b1c83d2d7830deaf Mon Sep 17 00:00:00 2001 From: Bill Sacks Date: Tue, 23 Jan 2018 14:52:36 -0700 Subject: [PATCH 23/31] Update CISM version This just pulls the latest manage_externals into CISM, along with renaming the externals cfg files. --- Externals.cfg | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/Externals.cfg b/Externals.cfg index 0719004d6f..dffafb9483 100644 --- a/Externals.cfg +++ b/Externals.cfg @@ -8,8 +8,8 @@ required = True local_path = components/cism protocol = git repo_url = https://github.com/ESCOMP/cism-wrapper -tag = cism2_1_45 -externals = CISM.cfg +tag = cism2_1_46 +externals = Externals_CISM.cfg required = True [rtm] From c913af78f3640728e29cafd7e8682775ec0e42a5 Mon Sep 17 00:00:00 2001 From: Bill Sacks Date: Tue, 23 Jan 2018 15:15:43 -0700 Subject: [PATCH 24/31] Edit README_EXTERNALS.rst - Talk about this in the context of CTSM rather than CESM - Cut documentation on modifying a component, because we currently don't have a great workflow for doing this with components in git (due to https://github.com/NCAR/manage_externals/issues/34) --- README_EXTERNALS.rst | 127 ++++++++++++------------------------------- 1 file changed, 34 insertions(+), 93 deletions(-) diff --git a/README_EXTERNALS.rst b/README_EXTERNALS.rst index 5d328619ff..01c8f669aa 100644 --- a/README_EXTERNALS.rst +++ b/README_EXTERNALS.rst @@ -1,136 +1,105 @@ -================================== - The Community Earth System Model -================================== - -See the CESM web site for documentation and information: - -http://www.cesm.ucar.edu - -This repository provides tools for managing the external components that -make up a CESM tag - alpha, beta and release. CESM tag creation should -be coordinated through CSEG at NCAR. - -.. sectnum:: - -.. contents:: - Obtaining the full model code and associated scripting infrastructure ===================================================================== -CESM2.0 is now released via github. You will need some familiarity with git in order +CTSM is released via github. You will need some familiarity with git in order to modify the code and commit these changes. However, to simply checkout and run the code, no git knowledge is required other than what is documented in the following steps. -To obtain the CESM2.0 code you need to do the following: +To obtain the CTSM code you need to do the following: #. Clone the repository. :: - git clone https://github.com/escomp/cesm.git my_cesm_sandbox - - This will create a directory ``my_cesm_sandbox/`` in your current working directory. - -#. Go into the newly created CESM repository and determine what version of CESM you want. - To see what cesm tags are available, simply issue the **git tag** command. :: - - cd my_cesm_sandbox - git tag - -#. Do a git checkout of the tag you want. If you want to checkout cesm2.0.beta07, you would issue the following. :: + git clone https://github.com/escomp/ctsm.git my_ctsm_sandbox - git checkout cesm2.0.beta07 - - (It is normal and expected to get a message about being in 'detached - HEAD' state. For now you can ignore this, but it becomes important if - you want to make changes to your Externals.cfg file and commit those - changes to a branch.) + This will create a directory ``my_ctsm_sandbox/`` in your current working directory. #. Run the script **manage_externals/checkout_externals**. :: ./manage_externals/checkout_externals The **checkout_externals** script is a package manager that will - populate the cesm directory with the relevant versions of each of the + populate the ctsm directory with the relevant versions of each of the components along with the CIME infrastructure code. -At this point you have a working version of CESM. +At this point you have a working version of CTSM. To see full details of how to set up a case, compile and run, see the CIME documentation at http://esmci.github.io/cime/ . More details on checkout_externals ---------------------------------- -The file **Externals.cfg** in your top-level CESM directory tells +The file **Externals.cfg** in your top-level CTSM directory tells **checkout_externals** which tag/branch of each component should be brought in to generate your sandbox. (This file serves the same purpose -as SVN_EXTERNAL_DIRECTORIES when CESM was in a subversion repository.) +as SVN_EXTERNAL_DIRECTORIES when CLM was in a subversion repository.) NOTE: Just like svn externals, checkout_externals will always attempt to make the working copy exactly match the externals description. If you manually modify an external without updating Externals.cfg, e.g. switch to a different tag, then rerunning checkout_externals will switch you back to the external described in Externals.cfg. See below -documentation `Customizing your CESM sandbox`_ for more details. +documentation `Customizing your CTSM sandbox`_ for more details. **You need to rerun checkout_externals whenever Externals.cfg has changed** (unless you have already manually updated the relevant external(s) to have the correct branch/tag checked out). Common times when this is needed are: -* After checking out a new CESM branch/tag +* After checking out a new CTSM branch/tag -* After merging some other CESM branch/tag into your currently +* After merging some other CTSM branch/tag into your currently checked-out branch **checkout_externals** must be run from the root of the source -tree. For example, if you cloned CESM with:: +tree. For example, if you cloned CTSM with:: - git clone https://github.com/escomp/cesm.git my_cesm_sandbox + git clone https://github.com/escomp/ctsm.git my_ctsm_sandbox then you must run **checkout_externals** from -``/path/to/my_cesm_sandbox``. +``/path/to/my_ctsm_sandbox``. To see more details of **checkout_externals**, issue :: ./manage_externals/checkout_externals --help -Customizing your CESM sandbox +Customizing your CTSM sandbox ============================= -There are several use cases to consider when you want to customize or modify your CESM sandbox. +There are several use cases to consider when you want to customize or modify your CTSM sandbox. -Switching to a different CESM tag ---------------------------------- +Switching to a different CTSM branch or tag +------------------------------------------- -If you have already checked out a tag and **HAVE NOT MADE ANY +If you have already checked out a branch or tag and **HAVE NOT MADE ANY MODIFICATIONS** it is simple to change your sandbox. Say that you -checked out cesm2.0.beta07 but really wanted to have cesm2.0.beta08; +checked out ctsm1.0.0 but really wanted to have ctsm1.1.0; you would simply do the following:: - git checkout cesm2.0.beta08 + git checkout ctsm1.1.0 ./manage_externals/checkout_externals You should **not** use this method if you have made any source code -changes, or if you have any ongoing CESM cases that were created from +changes, or if you have any ongoing CTSM cases that were created from this sandbox. In these cases, it is often easiest to do a second **git clone**. Pointing to a different version of a component ---------------------------------------------- -Each entry in **Externals.cfg** has the following form (we use CAM as an +Each entry in **Externals.cfg** has the following form (we use CIME as an example below):: - - [cam] - tag = trunk_tags/cam5_4_143/components/cam - protocol = svn - repo_url = https://svn-ccsm-models.cgd.ucar.edu/cam1 - local_path = components/cam + + [cime] + local_path = cime + protocol = git + repo_url = https://github.com/CESM-Development/cime + tag = cime5.4.0-alpha.20 required = True Each entry specifies either a tag or a branch. To point to a new tag: #. Modify the relevant entry/entries in **Externals.cfg** (e.g., changing - ``cam5_4_143`` to ``cam5_4_144`` above) + ``cime5.4.0-alpha.20`` to ``cime5.4.0-alpha.21`` above) #. Checkout the new component(s):: @@ -141,43 +110,15 @@ in an invalid model (won't compile, won't run, not scientifically meaningful) and is unsupported. Committing your change to Externals.cfg -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ After making this change, it's a good idea to commit the change in your -local CESM git repository. First create a CESM branch in your local +local CTSM git repository. First create a branch in your local repository, then commit it. (Unlike with subversion, branches are stored locally unless you explicitly push them up to github. Feel free to create whatever local branches you'd like.) For example:: - git checkout -b my_cesm_branch + git checkout -b my_ctsm_branch git add Externals.cfg - git commit -m "Update CAM to cam5_4_144" - -Modifying a component ---------------------- - -If you'd like to modify a component via a branch and point to that -branch in your CESM sandbox, use the following procedure (again, using -CAM as an example): - -#. Create a CAM branch. Since CAM originates from a subversion - repository, you will first need to create a branch in that - repository. Let's assume you have created this branch and called it - **my_branch**. - -#. Update **Externals.cfg** to point to your branch. You can replace the - **tag** entry with a **branch** entry, as follows:: - - [cam] - branch = branches/my_branch/components/cam - protocol = svn - repo_url = https://svn-ccsm-models.cgd.ucar.edu/cam1 - local_path = components/cam - required = True - -#. Checkout your branch:: - - ./manage_externals/checkout_externals + git commit -m "Update CIME to cime5.4.0-alpha.20" -It's a good idea to commit your **Externals.cfg** file changes. See the above -documentation, `Committing your change to Externals.cfg`_. From 51b19f244a31370c5804e34555fc8d661c329139 Mon Sep 17 00:00:00 2001 From: Bill Sacks Date: Tue, 23 Jan 2018 15:37:03 -0700 Subject: [PATCH 25/31] Rework UpdateChangelog to just work with files in the doc directory We'll no longer maintain separate copies of ChangeLog/ChangeSum in the root directory and the doc directory: we'll just use the doc directory. Accordingly, move the UpdateChangelog.pl script and the .ChangeLog_template file into the doc/ directory. --- .../.ChangeLog_template | 0 UpDateChangeLog.pl => doc/UpdateChangelog.pl | 51 ------------------- 2 files changed, 51 deletions(-) rename .ChangeLog_template => doc/.ChangeLog_template (100%) rename UpDateChangeLog.pl => doc/UpdateChangelog.pl (70%) diff --git a/.ChangeLog_template b/doc/.ChangeLog_template similarity index 100% rename from .ChangeLog_template rename to doc/.ChangeLog_template diff --git a/UpDateChangeLog.pl b/doc/UpdateChangelog.pl similarity index 70% rename from UpDateChangeLog.pl rename to doc/UpdateChangelog.pl index 27e4803122..49e5a67f13 100755 --- a/UpDateChangeLog.pl +++ b/doc/UpdateChangelog.pl @@ -28,10 +28,6 @@ sub usage { $ProgName [options] OPTIONS - -compbrnch version Enter clm branch version to compare to (under branch_tags in repo). - [or -cb] - -comptrunk version Enter clm trunk version to compare to (under trunk_tags in repo). - [or -ct] -help [or -h] Help on this script. -update [or -u] Just update the date/time for the latest tag In this case no other arguments should be given. @@ -46,24 +42,16 @@ sub usage { To document a new tag $ProgName clm4_5_2_r097 "Description of this tag" - - To document a new tag and compare expected fails to previous tag. - - $ProgName clm4_5_2_r097 "Description of this tag" -ct clm4_5_2_r096 EOF } my %opts = { help => 0, update => 0, - comptrunk => undef, - compbrnch => undef, }; GetOptions( "h|help" => \$opts{'help'}, "u|update" => \$opts{'update'}, - "ct|comptrunk=s" => \$opts{'comptrunk'}, - "cb|compbrnch=s" => \$opts{'compbrnch'}, ); if ( $opts{'help'} ) { usage(); @@ -138,17 +126,6 @@ sub usage { } elsif ( $_ =~ /One-line Summary:/ ) { chomp( $_ ); print $fh "$_ $sum\n"; - } elsif ( $_ =~ /CLM tag used for the baseline comparison tests if applicable:/ ) { - chomp( $_ ); - if ( defined($opts{'comptrunk'}) ) { - print $fh "$_ $opts{'comptrunk'}\n"; - &AddExpectedFailDiff( $fh, "trunk_tags/$opts{'comptrunk'}" ); - } elsif ( defined($opts{'compbrnch'}) ) { - print $fh "$_ $opts{'compbrnch'}\n"; - &AddExpectedFailDiff( $fh, "branch_tags/$opts{'compbrnch'}" ); - } else { - print $fh "$_\n"; - } } else { print $fh $_; } @@ -228,31 +205,3 @@ sub usage { system( "$EDITOR $changelog" ); system( "$EDITOR $changesum" ); } -system( "/bin/cp -fp $changelog components/clm/doc/." ); -system( "/bin/cp -fp $changesum components/clm/doc/." ); -system( "/bin/chmod 0444 components/clm/doc/$changelog" ); -system( "/bin/chmod 0444 components/clm/doc/$changesum" ); - -sub AddExpectedFailDiff { -# -# Add information about the expected fail difference -# - my $fh = shift; - my $version = shift; - - my $SVN_MOD_URL = "https://svn-ccsm-models.cgd.ucar.edu/clm2/"; - my $expectedFail = `find . -name 'expected*Fail*.xml' -print`; - if ( $expectedFail eq "" ) { - die "ERROR:: expectedFails file NOT found here\n"; - } - - `svn ls $SVN_MOD_URL/$version` || die "ERROR:: Bad version to compare to: $version\n"; - `svn ls $SVN_MOD_URL/$version/$expectedFail` || die "ERROR:: expectedFails file NOT found in: $version\n"; - print $fh "\nDifference in expected fails from testing:\n\n"; - my $diff = `svn diff --old $SVN_MOD_URL/$version/$expectedFail \ \n --new $expectedFail`; - if ( $diff eq "" ) { - print $fh " No change in expected failures in testing\n"; - } else { - print $fh $diff; - } -} From 062634fafff5df05e09c15c373718bcfff57bfed Mon Sep 17 00:00:00 2001 From: Bill Sacks Date: Wed, 24 Jan 2018 15:40:05 -0700 Subject: [PATCH 26/31] Rework top-level readme --- README.rst | 14 ++++++++------ 1 file changed, 8 insertions(+), 6 deletions(-) diff --git a/README.rst b/README.rst index a67ec63abc..90f6eae015 100644 --- a/README.rst +++ b/README.rst @@ -1,10 +1,12 @@ -=== -CLM -=== +==== +CTSM +==== -The Community Land Model, CLM, is part of the Community Earth System Model. +The Community Terrestrial Systems Model. -See the CESM web site for documentation and information: +This includes the Community Land Model (CLM5 and CLM4.5) of the Community Earth System Model. -http://www.cesm.ucar.edu +For documentation, quick start, diagnostics, model output and +references, see +http://www.cesm.ucar.edu/models/cesm2.0/land/ From f8ec605471693234ffd8ada42d2864bf5b68d6e7 Mon Sep 17 00:00:00 2001 From: Bill Sacks Date: Wed, 24 Jan 2018 16:32:07 -0700 Subject: [PATCH 27/31] Add an item in the trunk checklist --- .CLMTrunkChecklist | 2 ++ 1 file changed, 2 insertions(+) diff --git a/.CLMTrunkChecklist b/.CLMTrunkChecklist index 6d76a47a12..8774800891 100644 --- a/.CLMTrunkChecklist +++ b/.CLMTrunkChecklist @@ -42,6 +42,8 @@ This should show no diffs (8) Make the trunk tag +(9) Send an email to clm-dev with the contents of the latest ChangeLog +entry (until we have automated this for the git repo) NOTES: From b52611d8b2cda5f1de14f8a086625cbd41e23dad Mon Sep 17 00:00:00 2001 From: Bill Sacks Date: Thu, 25 Jan 2018 21:06:24 -0700 Subject: [PATCH 28/31] Add buildcppc to .gitignore And track .gitignore in a separate file in the cime_config directory rather than in the top-level .gitignore. (I don't have strong feelings about this, but this is consistent with what I've done elsewhere.) --- .gitignore | 1 - cime_config/.gitignore | 3 +++ 2 files changed, 3 insertions(+), 1 deletion(-) create mode 100644 cime_config/.gitignore diff --git a/.gitignore b/.gitignore index 1806df3561..7ec31341da 100644 --- a/.gitignore +++ b/.gitignore @@ -34,4 +34,3 @@ core.* *.gz *.log !run.log *.pyc -cime_config/buildnmlc diff --git a/cime_config/.gitignore b/cime_config/.gitignore new file mode 100644 index 0000000000..5bf1840ccf --- /dev/null +++ b/cime_config/.gitignore @@ -0,0 +1,3 @@ +buildnmlc +buildcppc + From ba5dfb67ec6e1c0cf5b019e7ebd6749a01a11b4a Mon Sep 17 00:00:00 2001 From: Bill Sacks Date: Fri, 26 Jan 2018 12:56:08 -0700 Subject: [PATCH 29/31] Organize expected fails by category This way we can easily see which tests are expected to fail from aux_clm, to facilitate keeping the expected fail list up to date. Also, remove an old yellowstone test from the list --- cime_config/testdefs/ExpectedTestFails.xml | 53 ++++++++++++---------- 1 file changed, 28 insertions(+), 25 deletions(-) diff --git a/cime_config/testdefs/ExpectedTestFails.xml b/cime_config/testdefs/ExpectedTestFails.xml index 2d51f524f2..df0c2815c8 100644 --- a/cime_config/testdefs/ExpectedTestFails.xml +++ b/cime_config/testdefs/ExpectedTestFails.xml @@ -1,29 +1,32 @@ - FAIL ERP_D_Lm9.f10_f10_musgs.IHistClm50BgcCrop.cheyenne_intel.clm-ciso_monthly RUN - FAIL SMS_D_Lm13.f10_f10_musgs.I1850Clm50BgcCrop.cheyenne_intel.clm-ciso_monthly RUN - FAIL SMS_D_Lm13.f10_f10_musgs.IHistClm50BgcCrop.cheyenne_intel.clm-ciso_monthly RUN - FAIL ERP_D_Ld10.f10_f10_musgs.IHistClm50BgcCrop.cheyenne_intel.clm-ciso_decStart SHAREDLIB_BUILD - FAIL NCK_Ld1.f10_f10_musgs.I2000Clm50Sp.cheyenne_intel.clm-default COMPARE_base_multiinst - FAIL NCK_Ld1.f10_f10_musgs.I2000Clm50Sp.cheyenne_intel.clm-default COMPARE_base_multiinst - FAIL ERI_N2_Ld9.f19_g17.I2000Clm50BgcCrop.cheyenne_intel.clm-default RUN - FAIL SMS_Ld5_D_P48x1.f10_f10_musgs.IHistClm50Bgc.hobart_nag.clm-decStart RUN - FAIL ERP_D_P48x1.f10_f10_musgs.IHistClm50Bgc.hobart_nag.clm-decStart RUN - FAIL ERP_D.f10_f10_musgs.IHistClm50Bgc.cheyenne_gnu.clm-decStart RUN - FAIL ERS_Ly5_P72x1.f10_f10_musgs.IHistClm45BgcCrop.cheyenne_intel.clm-cropMonthOutput RUN - FAIL ERS_Lm20_Mmpi-serial.1x1_smallvilleIA.I2000Clm50BgcCropGs.cheyenne_gnu.clm-monthly RUN - FAIL SMS_D_Ld5.f45_f45_mg37.I2000Clm50Fates.cheyenne_intel.clm-Fates RUN - FAIL SMS_D_Lm6.f45_f45_mg37.I2000Clm50Fates.cheyenne_intel.clm-Fates RUN - FAIL SMS_D_Lm6_P144x1.f45_f45_mg37.I2000Clm50Fates.cheyenne_intel.clm-Fates RUN - FAIL ERP_Ld5.f10_f10_musgs.I2000Clm50Vic.cheyenne_gnu.clm-decStart COMPARE_base_rest - FAIL ERP_Ld9.f45_f45.I2000Clm45Fates.hobart_nag.clm-FatesAllVars COMPARE_base_rest - FAIL ERS_Ld60.f45_f45.I2000Clm45Fates.cheyenne_intel.clm-FatesLogging COMPARE_base_rest - FAIL ERS_Ld60.f45_f45.I2000Clm45Fates.cheyenne_intel.clm-Fates COMPARE_base_rest - FAIL ERS_Ld60.f45_f45.I2000Clm45Fates.cheyenne_intel.clm-FatesNoFire COMPARE_base_rest - FAIL ERS_Ld60.f45_f45.I2000Clm45Fates.cheyenne_intel.clm-FatesPPhys COMPARE_base_rest - FAIL ERS_Ld60.f45_f45.I2000Clm45Fates.cheyenne_intel.clm-FatesST3 COMPARE_base_rest - FAIL ERP_Ld9.f45_f45.I2000Clm45Fates.cheyenne_intel.clm-FatesAllVars COMPARE_base_rest - FAIL ERS_Ld60.f45_f45.I2000Clm45Fates.cheyenne_gnu.clm-Fates COMPARE_base_rest - FAIL ERS_D_Mmpi-serial_Ld5.1x1_brazil.I2000Clm50FatesGs.yellowstone_pgi.clm-fates COMPARE_base_rest + + FAIL ERP_D_Lm9.f10_f10_musgs.IHistClm50BgcCrop.cheyenne_intel.clm-ciso_monthly RUN + FAIL SMS_D_Lm13.f10_f10_musgs.I1850Clm50BgcCrop.cheyenne_intel.clm-ciso_monthly RUN + FAIL SMS_D_Lm13.f10_f10_musgs.IHistClm50BgcCrop.cheyenne_intel.clm-ciso_monthly RUN + FAIL ERP_D_Ld10.f10_f10_musgs.IHistClm50BgcCrop.cheyenne_intel.clm-ciso_decStart SHAREDLIB_BUILD + FAIL NCK_Ld1.f10_f10_musgs.I2000Clm50Sp.cheyenne_intel.clm-default COMPARE_base_multiinst + FAIL NCK_Ld1.f10_f10_musgs.I2000Clm50Sp.cheyenne_intel.clm-default COMPARE_base_multiinst + FAIL ERI_N2_Ld9.f19_g17.I2000Clm50BgcCrop.cheyenne_intel.clm-default RUN + FAIL SMS_Ld5_D_P48x1.f10_f10_musgs.IHistClm50Bgc.hobart_nag.clm-decStart RUN + FAIL ERP_D_P48x1.f10_f10_musgs.IHistClm50Bgc.hobart_nag.clm-decStart RUN + FAIL ERP_D.f10_f10_musgs.IHistClm50Bgc.cheyenne_gnu.clm-decStart RUN + FAIL ERS_Ly5_P72x1.f10_f10_musgs.IHistClm45BgcCrop.cheyenne_intel.clm-cropMonthOutput RUN + FAIL ERS_Lm20_Mmpi-serial.1x1_smallvilleIA.I2000Clm50BgcCropGs.cheyenne_gnu.clm-monthly RUN + FAIL SMS_D_Ld5.f45_f45_mg37.I2000Clm50Fates.cheyenne_intel.clm-Fates RUN + FAIL SMS_D_Lm6.f45_f45_mg37.I2000Clm50Fates.cheyenne_intel.clm-Fates RUN + FAIL SMS_D_Lm6_P144x1.f45_f45_mg37.I2000Clm50Fates.cheyenne_intel.clm-Fates RUN + + + FAIL ERP_Ld5.f10_f10_musgs.I2000Clm50Vic.cheyenne_gnu.clm-decStart COMPARE_base_rest + FAIL ERP_Ld9.f45_f45.I2000Clm45Fates.hobart_nag.clm-FatesAllVars COMPARE_base_rest + FAIL ERS_Ld60.f45_f45.I2000Clm45Fates.cheyenne_intel.clm-FatesLogging COMPARE_base_rest + FAIL ERS_Ld60.f45_f45.I2000Clm45Fates.cheyenne_intel.clm-Fates COMPARE_base_rest + FAIL ERS_Ld60.f45_f45.I2000Clm45Fates.cheyenne_intel.clm-FatesNoFire COMPARE_base_rest + FAIL ERS_Ld60.f45_f45.I2000Clm45Fates.cheyenne_intel.clm-FatesPPhys COMPARE_base_rest + FAIL ERS_Ld60.f45_f45.I2000Clm45Fates.cheyenne_intel.clm-FatesST3 COMPARE_base_rest + FAIL ERP_Ld9.f45_f45.I2000Clm45Fates.cheyenne_intel.clm-FatesAllVars COMPARE_base_rest + FAIL ERS_Ld60.f45_f45.I2000Clm45Fates.cheyenne_gnu.clm-Fates COMPARE_base_rest + From 77679ef21e657b23a8401848bb8642f8eef502dc Mon Sep 17 00:00:00 2001 From: Bill Sacks Date: Fri, 26 Jan 2018 15:26:39 -0700 Subject: [PATCH 30/31] Update ChangeLog --- doc/.ChangeLog_template | 4 +- doc/ChangeLog | 179 ++++++++++++++++++++++++++++++++++++++++ doc/ChangeSum | 1 + 3 files changed, 182 insertions(+), 2 deletions(-) diff --git a/doc/.ChangeLog_template b/doc/.ChangeLog_template index 5c11f0f6b7..527502df56 100644 --- a/doc/.ChangeLog_template +++ b/doc/.ChangeLog_template @@ -13,7 +13,7 @@ Purpose of changes Bugs fixed or introduced ------------------------ -Issues fixed (include CCTSM Issue #): [If none, remove this line] +Issues fixed (include CTSM Issue #): [If none, remove this line] CIME Issues fixed (include issue #): [If none, remove this line] Known bugs introduced in this tag (include github issue ID): [If none, remove this line] @@ -112,7 +112,7 @@ Changes answers relative to baseline: Detailed list of changes ------------------------ -List any svn externals directories updated (cime, rtm, mosart, cism, etc.): +List any externals directories updated (cime, rtm, mosart, cism, fates, etc.): List all files eliminated: diff --git a/doc/ChangeLog b/doc/ChangeLog index 9aa280dfc1..0e10c18ffe 100644 --- a/doc/ChangeLog +++ b/doc/ChangeLog @@ -1,4 +1,183 @@ =============================================================== +Tag name: clm4_5_18_r273 +Originator(s): sacks (Bill Sacks) +Date: Fri Jan 26 15:26:06 MST 2018 +One-line Summary: Support a standalone checkout from git + +Purpose of changes +------------------ + +This PR allows a standalone git-based checkout to work. Starting with +this tag, we'll be using git rather than svn for CLM/CTSM development. + +The biggest changes here are pulling in manage_externals, which serves +the role of svn externals. After cloning the repository, you can get all +of the necessary externals by running: + +./manage_externals/checkout_externals + +Other than that, this PR mostly involves changes to allow you to run +from the new directory structure of a standalone checkout: Now all of +the CLM/CTSM directories appear at the top level of the checkout, rather +than nested under components/clm. + +Bugs fixed or introduced +------------------------ + +Known bugs introduced in this tag (include github issue ID): + +- https://github.com/ESCOMP/ctsm/issues/220 For new standalone + organization, need to generalize paths for tools and testing + + +Notes of particular relevance for users +--------------------------------------- + +Caveats for users (e.g., need to interpolate initial conditions): + +- The svn trunk is now frozen. All future development will occur in the + master branch of https://github.com/ESCOMP/ctsm + +- A standalone git checkout will have all CLM directories (src, bld, + etc.) at the top level, rather than under components/clm. + +- To get all externals after cloning the git repository, run + ./manage_externals/checkout_externals + +- More details on working with the git repository will be coming soon + +Changes to CLM's user interface (e.g., new/renamed XML or namelist variables): none + +Changes made to namelist defaults (e.g., changed parameter values): none + +Changes to the datasets (e.g., parameter, surface or initial files): none + +Substantial timing or memory changes: none + +Notes of particular relevance for developers: (including Code reviews and testing) +--------------------------------------------- + +Caveats for developers (e.g., code that is duplicated that requires double maintenance): + +- build-namelist unit tests currently do not run (#220) + +- tools tests, and the tools builds themselves, are expected not to work (#220) + +Changes to tests or testing: none + +Code reviewed by: Ben Andre, Erik Kluzek + +Did you follow the steps in .CLMTrunkChecklist: yes + +CLM testing: + + [PASS means all tests PASS and OK means tests PASS other than expected fails.] + + build-namelist tests: + + cheyenne - FAIL (#220) + + unit-tests (components/clm/src): + + cheyenne - pass + + tools-tests (components/clm/test/tools): + + cheyenne - NOT RUN; EXPECTED TO FAIL (#220) + + PTCLM testing (components/clm/tools/shared/PTCLM/test): + + cheyenne - NOT RUN; MAY FAIL DUE TO #220 + + regular tests (aux_clm): + + cheyenne_intel ---- pass + cheyenne_gnu ------ pass + hobart_nag -------- pass + hobart_pgi -------- pass + hobart_intel ------ pass + +CLM tag used for the baseline comparisons: clm4_5_18_r272 +Answer changes +-------------- + +Changes answers relative to baseline: NO + +Detailed list of changes +------------------------ + +List any externals directories updated (cime, rtm, mosart, cism, fates, etc.): + +- cime: clm4518/n02/cime5.4.0-alpha.03 -> clm4518/n03/cime5.4.0-alpha.03 + Has some changes needed for new standalone directory structure to work + (these changes are now on cime master, though see #225 before updating + to cime master) + +- cism: cism2_1_40 -> cism2_1_46 + Points to cism in git + cism2_1_41, 42, 43 and 44 contained very minor changes + cism2_1_45 and 46 are the git migration + +List all files eliminated: + +========= Handle externals via manage_externals rather than svn externals +D README_EXTERNALS +D SVN_EXTERNAL_DIRECTORIES +D SVN_EXTERNAL_DIRECTORIES.standalone + +List all files added and what they do: + +========= Added this directory and everything below it. Pulled in via + git subtree from + https://github.com/NCAR/manage_externals/. This is the new + tool for pulling in and updating externals (replacing svn + externals). +A manage_externals/ + +========= Handle externals via manage_externals rather than svn externals +A Externals.cfg +A Externals_CLM.cfg +A README_EXTERNALS.rst + +========= Handle new directory structure of standalone checkouts +A .config_files.xml + +========= Add some .gitignore files +A src/dyn_subgrid/.gitignore +A src/unit_test_shr/.gitignore +A src/unit_test_stubs/main/.gitignore +A src/unit_test_stubs/utils/.gitignore +A src/utils/.gitignore +A cime_config/.gitignore + +List all existing files that have been modified, and describe the changes: + +========= Handle new directory structure of standalone checkouts +M bld/CLMBuildNamelist.pm +M bld/configure +M cime_config/buildcpp +M cime_config/buildlib +M cime_config/buildnml +M src/CMakeLists.txt + +========= Documentation updates for move to git +M .CLMTrunkChecklist +M .gitignore +M LICENSE +M README.rst +M src/README.unit_testing + +========= Separately document expected fails for aux_clm vs fates test + lists +M cime_config/testdefs/ExpectedTestFails.xml + +========= ChangeLog now only stored in doc directory, not in top-level + directory +R095 .ChangeLog_template doc/.ChangeLog_template +R070 UpDateChangeLog.pl doc/UpdateChangelog.pl + +=============================================================== +=============================================================== Tag name: clm4_5_18_r272 Originator(s): erik (Erik Kluzek) Date: Thu Jan 25 01:17:01 MST 2018 diff --git a/doc/ChangeSum b/doc/ChangeSum index d0df950aaf..1db9102fd6 100644 --- a/doc/ChangeSum +++ b/doc/ChangeSum @@ -1,5 +1,6 @@ Tag Who Date Summary ============================================================================================================================ + clm4_5_18_r273 sacks 01/26/2018 Support a standalone checkout from git clm4_5_18_r272 erik 01/25/2018 Bring in latest FATES release version to CLM trunk: fates_s1.4.1_a3.0.0_rev2 clm4_5_18_r271 erik 01/20/2018 Update testlist to v2 and remove yellowstone clm4_5_18_r270 sacks 12/20/2017 Always use multiple elevation classes for glacier, even with stub glc From e056b60183db31e76dc84e1bc04d6af643f36f54 Mon Sep 17 00:00:00 2001 From: Bill Sacks Date: Fri, 26 Jan 2018 15:32:14 -0700 Subject: [PATCH 31/31] add a bit to changelog --- doc/ChangeLog | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/doc/ChangeLog b/doc/ChangeLog index 0e10c18ffe..d31fbf27e9 100644 --- a/doc/ChangeLog +++ b/doc/ChangeLog @@ -16,6 +16,9 @@ of the necessary externals by running: ./manage_externals/checkout_externals +See the file README_EXTERNALS.rst at the top level of the repository for +more details on using this tool. + Other than that, this PR mostly involves changes to allow you to run from the new directory structure of a standalone checkout: Now all of the CLM/CTSM directories appear at the top level of the checkout, rather @@ -44,6 +47,9 @@ Caveats for users (e.g., need to interpolate initial conditions): - To get all externals after cloning the git repository, run ./manage_externals/checkout_externals + See the file README_EXTERNALS.rst at the top level of the repository + for more details on using this tool. + - More details on working with the git repository will be coming soon Changes to CLM's user interface (e.g., new/renamed XML or namelist variables): none