Skip to content

Commit

Permalink
Make some files compatible with Python3
Browse files Browse the repository at this point in the history
Also enlarge the set of files tested with flake8 under Python3
  • Loading branch information
nsoranzo committed Jun 3, 2016
1 parent 30fd1d6 commit cc336f6
Show file tree
Hide file tree
Showing 25 changed files with 325 additions and 242 deletions.
52 changes: 48 additions & 4 deletions .ci/py3_sources.txt
Original file line number Diff line number Diff line change
@@ -1,5 +1,19 @@
lib/galaxy/util/
lib/galaxy/jobs/runners/util/
contrib/
cron/
lib/galaxy/actions/
lib/galaxy/auth/
lib/galaxy/config.py
lib/galaxy/dependencies/
lib/galaxy/eggs/
lib/galaxy/exceptions/
lib/galaxy/external_services/
lib/galaxy/forms/
lib/galaxy/jobs/
lib/galaxy/objectstore/
lib/galaxy/openid/
lib/galaxy/quota/
lib/galaxy/sample_tracking/
lib/galaxy/tags/
lib/galaxy/tools/cwl/
lib/galaxy/tools/parser/
lib/galaxy/tools/lint.py
Expand All @@ -10,8 +24,38 @@ lib/galaxy/tools/linters/
lib/galaxy/tools/deps/
lib/galaxy/tools/toolbox/
lib/galaxy/tools/parser/
lib/galaxy/jobs/metrics/
lib/galaxy/objectstore/
lib/galaxy/tours/
lib/galaxy/util/
lib/galaxy/work/
lib/galaxy_ext/
lib/galaxy_utils/
lib/log_tempfile.py
lib/psyco_full.py
lib/tool_shed/capsule/
lib/tool_shed/dependencies/
lib/tool_shed/grids/
lib/tool_shed/managers/
lib/tool_shed/metadata/
lib/tool_shed/repository_types/
lib/tool_shed/tools/
lib/tool_shed/util/
lib/tool_shed/utility_containers/
scripts/api/common.py
scripts/api/display.py
scripts/api/workflow_execute_parameters.py
scripts/auth/
scripts/cleanup_datasets/admin_cleanup_datasets.py
scripts/cleanup_datasets/cleanup_datasets.py
test/api/test_workflows_from_yaml.py
test/base/
test/casperjs/
test/functional/
test/integration/
test/manual/
test/unit/tools/test_actions.py
test/unit/workflows/test_run_parameters.py
tool_list.py
tools/data_source/
tools/evolution/
tools/sr_mapping/
tools/visualization/
11 changes: 7 additions & 4 deletions contrib/galaxy_config_merger.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,11 +23,14 @@
Script for merging specific local Galaxy config galaxy.ini.cri with default Galaxy galaxy.ini.sample
'''
import ConfigParser
from __future__ import print_function

import logging
import optparse
import sys

from six.moves import configparser


def main():
# logging configuration
Expand All @@ -42,15 +45,15 @@ def main():

for option in ['sample', 'config']:
if getattr(options, option) is None:
print "Please supply a --%s parameter.\n" % (option)
print("Please supply a --%s parameter.\n" % (option))
parser.print_help()
sys.exit()

config_sample = ConfigParser.RawConfigParser()
config_sample = configparser.RawConfigParser()
config_sample.read(options.sample)
config_sample_content = open(options.sample, 'r').read()

config = ConfigParser.RawConfigParser()
config = configparser.RawConfigParser()
config.read(options.config)

logging.info("Merging your own config file %s into the sample one %s." % (options.config, options.sample))
Expand Down
17 changes: 9 additions & 8 deletions contrib/nagios/check_galaxy.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@
check_galaxy can be run by hand, although it is meant to run from cron
via the check_galaxy.sh script in Galaxy's cron/ directory.
"""
from __future__ import print_function

import filecmp
import formatter
Expand Down Expand Up @@ -63,7 +64,7 @@ def usage():
try:
opts, args = getopt.getopt( sys.argv[1:], 'n' )
except getopt.GetoptError as e:
print str(e)
print(str(e))
usage()
if len( args ) < 1:
usage()
Expand All @@ -75,15 +76,15 @@ def usage():
for o, a in opts:
if o == "-n":
if debug:
print "Specified -n, will create a new history"
print("Specified -n, will create a new history")
new_history = True
else:
usage()

# state information
var_dir = os.path.join( os.path.expanduser('~'), ".check_galaxy", server )
if not os.access( var_dir, os.F_OK ):
os.makedirs( var_dir, 0700 )
os.makedirs( var_dir, 0o700 )

# default timeout for twill browser is never
socket.setdefaulttimeout(300)
Expand Down Expand Up @@ -256,7 +257,7 @@ def wait(self):
def check_state(self):
if self.hda_state != "ok":
self.get("/datasets/%s/stderr" % self.hda_id)
print tc.browser.get_html()
print(tc.browser.get_html())
raise Exception("HDA %s NOT OK: %s" % (self.hda_id, self.hda_state))

def diff(self):
Expand All @@ -281,7 +282,7 @@ def delete_datasets(self):
self.get('/datasets/%s/delete' % hda['id'])
hdas = [hda['id'] for hda in self.undeleted_hdas]
if hdas:
print "Remaining datasets ids:", " ".join(hdas)
print("Remaining datasets ids:", " ".join(hdas))
raise Exception("History still contains datasets after attempting to delete them")

def check_if_logged_in(self):
Expand Down Expand Up @@ -344,7 +345,7 @@ def handle_data(self, data):

def dprint(str):
if debug:
print str
print(str)

# do stuff here
if __name__ == "__main__":
Expand All @@ -360,7 +361,7 @@ def dprint(str):
dprint("not logged in... logging in")
b.login(username, password)

for tool, params in tools.iteritems():
for tool, params in tools.items():

check_file = ""

Expand Down Expand Up @@ -388,5 +389,5 @@ def dprint(str):
b.diff()
b.delete_datasets()

print "OK"
print("OK")
sys.exit(0)
16 changes: 9 additions & 7 deletions cron/build_chrom_db.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,4 @@
#!/usr/bin/env python

"""
Connects to a UCSC table browser and scrapes chrominfo for every build
specified by an input file (such as one output by parse_builds.py).
Expand All @@ -12,18 +11,21 @@
Usage:
python build_chrom_db.py dbpath/ [builds_file]
"""
from __future__ import print_function

import fileinput
import os
import sys
import urllib

from six.moves.urllib.parse import urlencode
from six.moves.urllib.request import urlopen

import parse_builds


def getchrominfo(url, db):
tableURL = "http://genome-test.cse.ucsc.edu/cgi-bin/hgTables?"
URL = tableURL + urllib.urlencode({
URL = tableURL + urlencode({
"clade": "",
"org": "",
"db": db,
Expand All @@ -34,7 +36,7 @@ def getchrominfo(url, db):
"hgta_regionType": "",
"position": "",
"hgta_doTopSubmit": "get info"})
page = urllib.urlopen(URL)
page = urlopen(URL)
for line in page:
line = line.rstrip( "\r\n" )
if line.startswith("#"):
Expand Down Expand Up @@ -68,12 +70,12 @@ def getchrominfo(url, db):
for build in builds:
if build == "?":
continue # no lengths for unspecified chrom
print "Retrieving " + build
print("Retrieving " + build)
outfile_name = dbpath + build + ".len"
try:
with open(outfile_name, "w") as outfile:
for chrominfo in getchrominfo("http://genome-test.cse.ucsc.edu/cgi-bin/hgTables?", build):
print >> outfile, "\t".join(chrominfo)
print("\t".join(chrominfo), file=outfile)
except Exception as e:
print "Failed to retrieve %s: %s" % (build, e)
print("Failed to retrieve %s: %s" % (build, e))
os.remove(outfile_name)
21 changes: 11 additions & 10 deletions cron/parse_builds.py
Original file line number Diff line number Diff line change
@@ -1,34 +1,35 @@
#!/usr/bin/env python

"""
Connects to the URL specified and outputs builds available at that
DSN in tabular format. UCSC Main gateway is used as default.
build description
"""
from __future__ import print_function

import sys
import urllib
import xml.etree.ElementTree as ElementTree

from six.moves.urllib.request import urlopen


def getbuilds(url):
try:
page = urllib.urlopen(url)
page = urlopen(url)
except:
print "#Unable to open " + url
print "?\tunspecified (?)"
print("#Unable to open " + url)
print("?\tunspecified (?)")
sys.exit(1)

text = page.read()
try:
tree = ElementTree.fromstring(text)
except:
print "#Invalid xml passed back from " + url
print "?\tunspecified (?)"
print("#Invalid xml passed back from " + url)
print("?\tunspecified (?)")
sys.exit(1)

print "#Harvested from " + url
print "?\tunspecified (?)"
print("#Harvested from " + url)
print("?\tunspecified (?)")
for dsn in tree:
build = dsn.find("SOURCE").attrib['id']
description = dsn.find("DESCRIPTION").text.replace(" - Genome at UCSC", "").replace(" Genome at UCSC", "")
Expand All @@ -49,4 +50,4 @@ def getbuilds(url):
else:
URL = "http://genome.cse.ucsc.edu/cgi-bin/das/dsn"
for build in getbuilds(URL):
print build[0] + "\t" + build[1] + " (" + build[0] + ")"
print(build[0] + "\t" + build[1] + " (" + build[0] + ")")
16 changes: 9 additions & 7 deletions cron/parse_builds_3_sites.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,10 +2,12 @@
"""
Connects to sites and determines which builds are available at each.
"""
from __future__ import print_function

import urllib
import xml.etree.ElementTree as ElementTree

from six.moves.urllib.request import urlopen

sites = ['http://genome.ucsc.edu/cgi-bin/',
'http://archaea.ucsc.edu/cgi-bin/',
'http://genome-test.cse.ucsc.edu/cgi-bin/']
Expand All @@ -18,27 +20,27 @@ def main():
trackurl = sites[i] + "hgTracks?"
builds = []
try:
page = urllib.urlopen(site)
page = urlopen(site)
except:
print "#Unable to connect to " + site
print("#Unable to connect to " + site)
continue
text = page.read()
try:
tree = ElementTree.fromstring(text)
except:
print "#Invalid xml passed back from " + site
print("#Invalid xml passed back from " + site)
continue
print "#Harvested from", site
print("#Harvested from", site)

for dsn in tree:
build = dsn.find("SOURCE").attrib['id']
builds.append(build)
build_dict = {}
for build in builds:
build_dict[build] = 0
builds = build_dict.keys()
builds = list(build_dict.keys())
yield [names[i], trackurl, builds]

if __name__ == "__main__":
for site in main():
print site[0] + "\t" + site[1] + "\t" + ",".join(site[2])
print(site[0] + "\t" + site[1] + "\t" + ",".join(site[2]))
Loading

0 comments on commit cc336f6

Please sign in to comment.