Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
def default(self, line):
global interrupted, workers, cores
interrupted = False
print 'interrupting previous command'
workers.kill()
execo.sleep(1)
print 'sending command: ' + line
workers = execo.Remote(
line,
cores).start()
print 'sending command: ' + line
workers = execo.Remote(
line,
cores).start()
app = App()
if jobid:
try:
print 'Waiting for job to start'
execo_g5k.wait_oar_job_start(jobid, site)
print 'Retrieving nodes'
nodes = execo_g5k.get_oar_job_nodes(jobid, site)
# Setup nodes
print 'Preparing workers with cmd: ' + setup_cmd
workers = execo.Remote(
setup_cmd,
nodes).start()
workers.expect('Worker Setup Completed')
workers.kill()
# Possibly open more than one connection per machine
cores = nodes * args.nb_cores
print cores
print 'Example cmd: %s' % (workers_cmd)
app.prompt = '%s (%d node(s), %d core(s)/node)> ' % (site, args.volunteers, args.nb_cores)
app.cmdloop()
# execo.sleep(600)
# print 'Workers done'
finally:
execo_g5k.oardel([(jobid, site)])
def exec_command_on_nodes(self, nodes, cmd, label, conn_params=None):
"""Execute a command on a node (id or hostname) or on a set of nodes"""
if not isinstance(nodes, list):
nodes = [nodes]
if conn_params is None:
conn_params = DEFAULT_CONN_PARAMS
logger.info(label)
remote = EX.Remote(cmd, nodes, conn_params)
remote.run()
if not remote.finished_ok:
sys.exit(31)
def _initialize_conf(self):
"""Merge locally-specified configuration files with default files
from the distribution"""
action = Remote("cp " + os.path.join(self.conf_dir,
MR_CONF_FILE + ".template ") +
os.path.join(self.conf_dir, MR_CONF_FILE),
self.hosts)
action.run()
super(HadoopV2Cluster, self)._initialize_conf()