Skip to content

Commit

Permalink
Remove usage of default runner user. (pytorch#1469)
Browse files Browse the repository at this point in the history
  • Loading branch information
jysohn23 authored Dec 10, 2019
1 parent 7c5ece3 commit eda5b6b
Showing 1 changed file with 12 additions and 12 deletions.
24 changes: 12 additions & 12 deletions torch_xla/distributed/xla_dist.py
Original file line number Diff line number Diff line change
Expand Up @@ -436,7 +436,6 @@ class DistributedExecutor(object):
'XRT_SHARD_ORDINAL',
]
DEFAULT_CONTAINER_NAME = 'pytorchtpudistrunner'
DEFAULT_USER_NAME = 'pytorchtpudistrunner'

def __init__(self,
cluster,
Expand Down Expand Up @@ -508,8 +507,7 @@ def _build_scp_cmd(self, local_path, remote_path, client_worker):
'--internal-ip',
'--zone={}'.format(client_worker._zone),
local_path,
'{}@{}:{}'.format(self.DEFAULT_USER_NAME, client_worker._hostname,
remote_path),
'{}:{}'.format(client_worker._hostname, remote_path),
]

def _build_ssh_cmd(self, remote_cmd, client_worker):
Expand All @@ -522,7 +520,7 @@ def _build_ssh_cmd(self, remote_cmd, client_worker):
'ssh',
'--internal-ip',
'--zone={}'.format(client_worker._zone),
'{}@{}'.format(self.DEFAULT_USER_NAME, client_worker._hostname),
'{}'.format(client_worker._hostname),
'--command',
'\'{}\''.format(remote_cmd),
]
Expand Down Expand Up @@ -656,14 +654,15 @@ def _gcloud_scp(local_path, remote_path, client_worker):
def _cleanup(self, script_map):

def _cleanup_worker(local_script, remote_script, client_worker):
rm_script = ['rm', remote_script]
self._build_and_run_ssh(rm_script, client_worker)
subprocess.call(['rm', local_script])
rm_tmp_dir = ['rm', '-rf', os.path.dirname(remote_script)]
self._build_and_run_ssh(rm_tmp_dir, client_worker)
subprocess.call(['rm', '-rf', os.path.dirname(local_script)])
if self.docker_image:
rm_container = ['docker', 'rm', '-f', self.docker_container]
self._build_and_run_ssh(rm_container, client_worker)
rm_proc = ['pkill', '-u', self.DEFAULT_USER_NAME]
self._build_and_run_ssh(rm_proc, client_worker, log=False)
rm_pgroup = ('kill -9 -$(ps xao pid,pgid,cmd | grep {} | grep -v grep'
' | awk "{{print \$2}}")').format(remote_script)
self._build_and_run_ssh(rm_pgroup, client_worker, log=False)

threads = []
for client_worker in script_map:
Expand Down Expand Up @@ -727,9 +726,10 @@ def run(self, cmd):

if __name__ == '__main__':
parser = argparse.ArgumentParser(
description='PyTorch on TPU distrubuted training',
epilog=('Usage example: xla_dist.py --tpu=[TPU_NAME]'
' --conda-env torch-xla-nightly -- python train'))
description='PyTorch on TPU distrubuted training launcher.',
epilog=('Usage example: python -m'
' torch_xla.distributed.xla_dist --tpu=[TPU_NAME]'
' --conda-env torch-xla-nightly -- python train.py'))

cluster_group = parser.add_argument_group('Cluster Setup')
cluster_group.add_argument(
Expand Down

0 comments on commit eda5b6b

Please sign in to comment.