-
Notifications
You must be signed in to change notification settings - Fork 6
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
Merge pull request #75 from CDAT/release_process
Release process
- Loading branch information
Showing
4 changed files
with
355 additions
and
241 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
|
@@ -11,11 +11,16 @@ | |
|
||
from Utils import run_cmd, run_cmds, run_cmd_capture_output | ||
from Utils import SUCCESS, FAILURE | ||
from release_tools import prep_conda_env, check_if_conda_forge_pkg, clone_feedstock | ||
from release_tools import clone_repo, prepare_recipe_in_local_feedstock_repo | ||
from release_tools import prepare_recipe_in_local_repo, rerender, do_build | ||
from release_tools import rerender_in_local_feedstock, build_in_local_feedstock | ||
from release_tools import rerender_in_local_repo, build_in_local_repo | ||
|
||
p = subprocess.Popen(["git", "rev-parse", "--short", "HEAD"], stdout=subprocess.PIPE) | ||
git_rev_parse = p.stdout.read().decode('utf-8') | ||
git_rev = "g{0}".format(git_rev_parse).strip() | ||
print("XXX XXX XXX git_rev: {g}".format(g=git_rev)) | ||
print("git_rev: {g}".format(g=git_rev)) | ||
if "VERSION" in os.environ.keys(): | ||
last_stable=os.environ['VERSION'] | ||
else: | ||
|
@@ -106,244 +111,6 @@ | |
shell_cmd = False | ||
verbose = True | ||
|
||
def prep_conda_env(): | ||
if do_conda_clean: | ||
cmd = "conda clean --all" | ||
ret = run_cmd(cmd, join_stderr, shell_cmd, verbose, workdir) | ||
|
||
pkgs = "conda-build anaconda-client conda-smithy conda-verify conda-forge-pinning conda-forge-build-setup conda-forge-ci-setup" | ||
cmds = [ | ||
#"conda update -y -q conda", | ||
"conda config --add channels conda-forge --force", | ||
"conda config --set channel_priority strict", | ||
"conda install -n base -c conda-forge {p}".format(p=pkgs), | ||
"conda config --set anaconda_upload no" | ||
] | ||
ret = run_cmds(cmds) | ||
return ret | ||
|
||
def check_if_conda_forge_pkg(pkg_name): | ||
url = "https://www.github.com/conda-forge/{pkg}-feedstock".format(pkg=pkg_name) | ||
try: | ||
request = requests.get(url) | ||
if request.status_code == 200: | ||
print("{p} is a conda-forge package".format(p=pkg_name)) | ||
return True | ||
else: | ||
print("{p} is not a conda-forge package".format(p=pkg_name)) | ||
return False | ||
except requests.ConnectionError: | ||
print("Web site does not exist") | ||
print("{p} is not a conda-forge package".format(p=pkg_name)) | ||
return False | ||
|
||
def clone_feedstock(pkg_name, workdir): | ||
pkg_feedstock = "{p}-feedstock".format(p=pkg_name) | ||
conda_forge_pkg_feedstock = "conda-forge/{p}".format(p=pkg_feedstock) | ||
|
||
feedstock_repo_dir = os.path.join(workdir, pkg_feedstock) | ||
if os.path.exists(feedstock_repo_dir): | ||
print("REMOVING existing {d}".format(d=feedstock_repo_dir)) | ||
shutil.rmtree(feedstock_repo_dir) | ||
|
||
cmd = "git clone [email protected]:{c}.git".format(c=conda_forge_pkg_feedstock) | ||
ret = run_cmd(cmd, join_stderr, shell_cmd, verbose, workdir) | ||
|
||
return ret | ||
|
||
def clone_repo(repo_name, branch, workdir): | ||
repo_dir = os.path.join(workdir, repo_name) | ||
if os.path.exists(repo_dir): | ||
shutil.rmtree(repo_dir) | ||
|
||
repo_url = "https://github.com/{o}/{r}.git\n\n".format(o=organization, | ||
r=repo_name) | ||
if branch == "master": | ||
cmd = "git clone {u}".format(u=repo_url) | ||
else: | ||
cmd = "git clone -b {b} {u}".format(b=branch, u=repo_url) | ||
ret = run_cmd(cmd, join_stderr, shell_cmd, verbose, workdir) | ||
|
||
return ret, repo_dir | ||
|
||
def prepare_recipe_in_local_feedstock_repo(pkg_name, repo_name, branch, pkg_version, repo_dir, workdir): | ||
repo_url = "https://github.com/{o}/{r}.git\n\n".format(o=organization,r=repo_name) | ||
|
||
pkg_feedstock = "{p}-feedstock".format(p=pkg_name) | ||
feedstock_dir = os.path.join(workdir, pkg_feedstock) | ||
recipe_file = os.path.join(feedstock_dir, 'recipe', 'meta.yaml') | ||
|
||
# | ||
# if repo has a recipe/meta.yaml.in, this means the branch is updating | ||
# the recipe, use this recipe to build. | ||
# NOTE: when we build the package for conda-forge, we will need to | ||
# merge this recipe to feedstock and delete the recipe from the repo. | ||
# | ||
repo_recipe = os.path.join(repo_dir, "recipe", "meta.yaml.in") | ||
if os.path.isfile(repo_recipe): | ||
print("\nNOTE: {r} exists, we will build using this recipe.\n".format(r=repo_recipe)) | ||
recipe_file_source = repo_recipe | ||
else: | ||
print("\nNOTE: building with feedstock recipe with modified package source\n") | ||
recipe_file_source = os.path.join(feedstock_dir, 'recipe', 'meta.yaml.SRC') | ||
|
||
cmd = "mv {src} {dest}".format(src=recipe_file, dest=recipe_file_source) | ||
ret = run_cmd(cmd, join_stderr, shell_cmd, verbose) | ||
if ret != SUCCESS: | ||
return ret | ||
|
||
orig_fh = open(recipe_file_source, "r") | ||
output_fh = open(recipe_file, "w") | ||
|
||
output_fh.write("package:\n") | ||
output_fh.write(" name: {n}\n".format(n=pkg_name)) | ||
output_fh.write(" version: {v}\n\n".format(v=pkg_version)) | ||
|
||
output_fh.write("source:\n") | ||
output_fh.write(" git_rev: {b}\n".format(b=branch)) | ||
output_fh.write(" git_url: {r}\n".format(r=repo_url)) | ||
|
||
start_copy = False | ||
lines = orig_fh.readlines() | ||
for l in lines: | ||
match_obj = re.match("build:", l) | ||
if match_obj: | ||
start_copy = True | ||
|
||
match_build_number = re.match("\s+number:", l) | ||
if match_build_number: | ||
output_fh.write(" number: {b}\n".format(b=build)) | ||
continue | ||
if start_copy: | ||
output_fh.write(l) | ||
else: | ||
continue | ||
output_fh.close() | ||
orig_fh.close() | ||
|
||
cmd = "cat {f}".format(f=recipe_file) | ||
#ret = run_cmd(cmd, join_stderr, shell_cmd, verbose) | ||
print("CMD: {c}".format(c=cmd)) | ||
os.system(cmd) | ||
|
||
return SUCCESS | ||
|
||
def prepare_recipe_in_local_repo(branch, build, version, repo_dir): | ||
|
||
recipe_in_file = os.path.join(repo_dir, "recipe", "meta.yaml.in") | ||
recipe_file = os.path.join(repo_dir, "recipe", "meta.yaml") | ||
if not os.path.isfile(recipe_in_file): | ||
print("Cannot find {r} file".format(r=recipe_in_file)) | ||
return FAILURE | ||
|
||
with open(recipe_in_file, "r") as recipe_in_fh: | ||
s = recipe_in_fh.read() | ||
s = s.replace("@UVCDAT_BRANCH@", branch) | ||
s = s.replace("@BUILD_NUMBER@", build) | ||
s = s.replace("@VERSION@", version) | ||
|
||
# write it out to recipe/meta.yaml file | ||
with open(recipe_file, "w") as f: | ||
f.write(s) | ||
|
||
cmd = "cat {f}".format(f=recipe_file) | ||
print("CMD: {c}".format(c=cmd)) | ||
os.system(cmd) | ||
|
||
return SUCCESS | ||
|
||
def rerender(dir): | ||
# pkg_feedstock = "{p}-feedstock".format(p=pkg_name) | ||
# repo_dir = "{w}/{p}".format(w=workdir, p=pkg_feedstock) | ||
|
||
cmd = "conda smithy rerender" | ||
ret = run_cmd(cmd, join_stderr, shell_cmd, verbose, dir) | ||
if ret != SUCCESS: | ||
return ret | ||
return ret | ||
|
||
def do_build(dir, py_version): | ||
ret = SUCCESS | ||
variant_files_dir = os.path.join(dir, ".ci_support") | ||
if py_version == "noarch": | ||
variant_file = os.path.join(variant_files_dir, "linux_.yaml") | ||
cmd = "conda build -m {v} recipe/".format(v=variant_file) | ||
ret = run_cmd(cmd, join_stderr, shell_cmd, verbose, dir) | ||
else: | ||
if sys.platform == 'darwin': | ||
variant_files = glob.glob("{d}/.ci_support/osx*{v}*.yaml".format(d=dir, v=py_version)) | ||
else: | ||
variant_files = glob.glob("{d}/.ci_support/linux*{v}*.yaml".format(d=dir, v=py_version)) | ||
|
||
for variant_file in variant_files: | ||
cmd = "conda build -m {v} recipe/".format(v=variant_file) | ||
ret = run_cmd(cmd, join_stderr, shell_cmd, verbose, dir) | ||
if ret != SUCCESS: | ||
print("FAIL: {c}".format(c=cmd)) | ||
break | ||
|
||
return ret | ||
|
||
def rerender_in_local_feedstock(pkg_name, workdir): | ||
pkg_feedstock = "{p}-feedstock".format(p=pkg_name) | ||
repo_dir = os.path.join(workdir, pkg_feedstock) | ||
|
||
ret = rerender(repo_dir) | ||
if ret != SUCCESS: | ||
print("FAIL...rerender in {d}".format(d=repo_dir)) | ||
return ret | ||
|
||
def build_in_local_feedstock(pkg_name, workdir, py_version): | ||
pkg_feedstock = "{p}-feedstock".format(p=pkg_name) | ||
repo_dir = os.path.join(workdir, pkg_feedstock) | ||
|
||
ret = do_build(repo_dir, py_version) | ||
return ret | ||
|
||
def update_variant_files(dir): | ||
if sys.platform == 'darwin': | ||
variant_config_files = glob.glob("{d}/.ci_support/osx*.yaml".format(d=dir)) | ||
else: | ||
variant_config_files = glob.glob("{d}/.ci_support/linux*.yaml".format(d=dir)) | ||
for f in variant_config_files: | ||
tmp_f = "{fname}.tmp".format(fname=f) | ||
tmp_fh = open(tmp_f, "w") | ||
orig_fh = open(f, "r") | ||
channel_source = False | ||
for l in orig_fh: | ||
match_obj = re.match("channel_sources:", l) | ||
if match_obj: | ||
channel_source = True | ||
tmp_fh.write(l) | ||
continue | ||
if channel_source: | ||
tmp_fh.write("- cdat/label/nightly,conda-forge,defaults\n") | ||
channel_source = False | ||
continue | ||
tmp_fh.write(l) | ||
orig_fh.close() | ||
tmp_fh.close() | ||
shutil.move(tmp_f, f) | ||
return SUCCESS | ||
|
||
def rerender_in_local_repo(repo_dir): | ||
|
||
conda_forge_yml = os.path.join(repo_dir, "conda-forge.yml") | ||
fh = open(conda_forge_yml, "w") | ||
fh.write("recipe_dir: recipe\n") | ||
fh.close() | ||
|
||
ret = rerender(repo_dir) | ||
return ret | ||
|
||
ret = update_variant_files(repo_dir) | ||
return ret | ||
|
||
def build_in_local_repo(repo_dir, py_version): | ||
|
||
ret = do_build(repo_dir, py_version) | ||
return ret | ||
|
||
# | ||
# main | ||
# | ||
|
@@ -355,7 +122,7 @@ def build_in_local_repo(repo_dir, py_version): | |
if status != SUCCESS: | ||
sys.exit(status) | ||
|
||
ret, repo_dir = clone_repo(repo_name, branch, workdir) | ||
ret, repo_dir = clone_repo(organization, repo_name, branch, workdir) | ||
if ret != SUCCESS: | ||
sys.exit(ret) | ||
|
||
|
@@ -365,7 +132,7 @@ def build_in_local_repo(repo_dir, py_version): | |
if status != SUCCESS: | ||
sys.exit(status) | ||
|
||
status = prepare_recipe_in_local_feedstock_repo(pkg_name, repo_name, branch, version, repo_dir, workdir) | ||
status = prepare_recipe_in_local_feedstock_repo(pkg_name, organization, repo_name, branch, version, build, repo_dir, workdir) | ||
if status != SUCCESS: | ||
sys.exit(status) | ||
|
||
|
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,34 @@ | ||
import sys | ||
import json | ||
import argparse | ||
|
||
parser = argparse.ArgumentParser( | ||
description='conda build upload', | ||
formatter_class=argparse.ArgumentDefaultsHelpFormatter) | ||
|
||
parser.add_argument("-r", "--cdat_release", | ||
help="cdat_release_version, ex: 8.2.1") | ||
parser.add_argument("-t", "--package_type", | ||
help="'cdat' or 'conda-forge' package") | ||
parser.add_argument("-p", "--package_name", | ||
help="Package name to build") | ||
parser.add_argument("-f", "--release_info_json_file_name", | ||
help="release_info.json full path name") | ||
|
||
args = parser.parse_args(sys.argv[1:]) | ||
|
||
cdat_release = args.cdat_release | ||
package_type = args.package_type | ||
package_name = args.package_name | ||
release_info_file = args.release_info_json_file_name | ||
|
||
|
||
with open(release_info_file) as json_file: | ||
release_info = json.load(json_file) | ||
pkg_info = release_info[cdat_release][package_type][package_name] | ||
version = pkg_info['version'] | ||
build = pkg_info['build'] | ||
type = pkg_info['type'] | ||
ret_info = "{v}:{b}:{t}".format(v=version, b=build, t=type) | ||
print(ret_info) | ||
|
Oops, something went wrong.