hypernets.hyperctl package

Submodules

hypernets.hyperctl.api module

hypernets.hyperctl.api.get_job(job_name, api_server_portal)[source]
hypernets.hyperctl.api.get_job_data_dir()[source]
hypernets.hyperctl.api.get_job_params()[source]
hypernets.hyperctl.api.inject(params, job_data_dir=None)[source]
hypernets.hyperctl.api.kill_job(api_server_portal, job_name)[source]
hypernets.hyperctl.api.list_jobs(api_server_portal)[source]
hypernets.hyperctl.api.reset_dev_params()[source]

hypernets.hyperctl.appliation module

class hypernets.hyperctl.appliation.BatchApplication(batch: hypernets.hyperctl.batch.Batch, server_host='localhost', server_port=8060, scheduler_exit_on_finish=True, scheduler_interval=5000, scheduler_callbacks=None, scheduler_signal_file=None, independent_tmp=True, backend_conf=None, **kwargs)[source]

Bases: object

static load(batch_spec_dict: Dict, batch_data_dir)[source]
server_host
server_port
start()[source]
stop()[source]
summary_batch()[source]
to_config()[source]

hypernets.hyperctl.batch module

class hypernets.hyperctl.batch.BackendConf(type='local', conf: Dict = None)[source]

Bases: object

to_config()[source]
class hypernets.hyperctl.batch.Batch(*, name, job_command, data_dir: str)[source]

Bases: object

FILE_CONFIG = 'config.json'
FILE_PID = 'server.pid'
STATUS_FINISHED = 'FINISHED'
STATUS_NOT_START = 'NOT_START'
STATUS_RUNNING = 'RUNNING'
add_job(name, **kwargs)[source]
config_file_path()[source]
data_dir_path
elapsed
get_job_by_name(job_name) → Optional[hypernets.hyperctl.batch._ShellJob][source]
get_persisted_job_status(job_name)[source]
is_finished()[source]
job_state_data_file_path(job_name)[source]
job_status_file_path(job_name, status)[source]
jobs
pid()[source]
pid_file_path()[source]
status()[source]
status_files()[source]
summary()[source]
class hypernets.hyperctl.batch.ServerConf(host='localhost', port=8060, exit_on_finish=False)[source]

Bases: object

portal
to_config()[source]

hypernets.hyperctl.callbacks module

class hypernets.hyperctl.callbacks.BatchCallback[source]

Bases: object

on_finish(batch, elapsed: float)[source]

Batch finished

on_job_break(batch, job, exception)[source]

Job failed before running

on_job_failed(batch, job, executor, elapsed: float)[source]

Job ran failed

on_job_start(batch, job, executor)[source]
on_job_succeed(batch, job, executor, elapsed: float)[source]
on_start(batch)[source]
class hypernets.hyperctl.callbacks.ConsoleCallback[source]

Bases: hypernets.hyperctl.callbacks.BatchCallback

on_finish(batch, elapsed: float)[source]

Batch finished

on_job_break(batch, job, exception)[source]

Job failed before running

on_job_failed(batch, job, executor, elapsed: float)[source]

Job ran failed

on_job_start(batch, job, executor)[source]
on_job_succeed(batch, job, executor, elapsed: float)[source]
on_start(batch)[source]
class hypernets.hyperctl.callbacks.VisDOMCallback(n_tail_jobs=100, elapsed_cut_bins=10, datetime_qcut_bins=10)[source]

Bases: hypernets.hyperctl.callbacks.BatchCallback

on_job_break(batch, job, exception)[source]

Job failed before running

on_job_failed(batch, job, executor, elapsed: float)[source]

Job ran failed

on_job_start(batch, job, executor)[source]
on_job_succeed(batch: hypernets.hyperctl.batch.Batch, job, executor, elapsed: float)[source]
on_start(batch: hypernets.hyperctl.batch.Batch)[source]

hypernets.hyperctl.cli module

hypernets.hyperctl.cli.get_default_batches_data_dir()[source]
hypernets.hyperctl.cli.main()[source]

Examples

cd hypernets/tests/hyperctl/ hyperctl run –config ./local_batch.json hyperctl batch list hyperctl job list –batch-name=local-batch-example hyperctl job describe –job-name=job1 –batch-name=local-batch-example hyperctl job kill –job-name=job1 –batch-name=local-batch-example hyperctl job kill –job-name=job2 –batch-name=local-batch-example hyperctl batch list

Returns:
hypernets.hyperctl.cli.run_batch_config(config_dict, batches_data_dir)[source]
hypernets.hyperctl.cli.run_batch_config_file(config, batches_data_dir)[source]
hypernets.hyperctl.cli.run_generate_job_specs(template, output)[source]
hypernets.hyperctl.cli.run_kill_job(batch_name, job_name, batches_data_dir)[source]
hypernets.hyperctl.cli.run_show_batches(batches_data_dir)[source]
hypernets.hyperctl.cli.run_show_jobs(batch_name, batches_data_dir)[source]
hypernets.hyperctl.cli.show_job(batch_name, job_name, batches_data_dir)[source]

hypernets.hyperctl.consts module

hypernets.hyperctl.consts.default_batches_data_dir(batches_data_dir)[source]

hypernets.hyperctl.executor module

class hypernets.hyperctl.executor.ExecutorManager(api_server_portal)[source]

Bases: object

alloc_executor(job)[source]
allocated_executors()[source]
get_executor(job)[source]
kill_executor(executor)[source]
prepare()[source]
release_executor(executor)[source]
waiting_executors()[source]
class hypernets.hyperctl.executor.LocalExecutorManager(api_server_portal, environments=None)[source]

Bases: hypernets.hyperctl.executor.ExecutorManager

alloc_executor(job)[source]
allocated_executors()[source]
kill_executor(executor: hypernets.hyperctl.executor.LocalShellExecutor)[source]
release_executor(executor)[source]
class hypernets.hyperctl.executor.LocalShellExecutor(*args, **kwargs)[source]

Bases: hypernets.hyperctl.executor.ShellExecutor

close()[source]
kill()[source]
prepare_assets()[source]
run(*, independent_tmp=True)[source]
status()[source]
exception hypernets.hyperctl.executor.NoResourceException[source]

Bases: Exception

class hypernets.hyperctl.executor.RemoteSSHExecutorManager(api_server_portal, machines: List[hypernets.hyperctl.executor.SSHRemoteMachine])[source]

Bases: hypernets.hyperctl.executor.ExecutorManager

alloc_executor(job)[source]
allocated_executors()[source]
kill_executor(executor)[source]
prepare()[source]
release_executor(executor)[source]
class hypernets.hyperctl.executor.RemoteShellExecutor(job: hypernets.hyperctl.batch._ShellJob, api_server_portal, machine: hypernets.hyperctl.executor.SSHRemoteMachine)[source]

Bases: hypernets.hyperctl.executor.ShellExecutor

close()[source]
connections
finished()[source]
kill()[source]
prepare_assets(sftp_client)[source]
run(*, independent_tmp=True)[source]
status()[source]
class hypernets.hyperctl.executor.SSHRemoteMachine(connection, environments=None)[source]

Bases: object

alloc(cpu, ram, gpu)[source]
hostname
release(released_usage)[source]
test_connection()[source]
static total_resources()[source]
usage
class hypernets.hyperctl.executor.ShellExecutor(job: hypernets.hyperctl.batch._ShellJob, api_server_portal, environments=None)[source]

Bases: object

close()[source]
post()[source]
prepare()[source]
run(*, independent_tmp=True)[source]
status()[source]
hypernets.hyperctl.executor.create_executor_manager(backend_conf, server_host, server_port)[source]

hypernets.hyperctl.scheduler module

class hypernets.hyperctl.scheduler.JobScheduler(*, batch, exit_on_finish, interval, executor_manager: hypernets.hyperctl.executor.ExecutorManager, callbacks=None, signal_file=None, independent_tmp=True)[source]

Bases: object

a FIFO scheduler

attempt_scheduling()[source]
static change_job_status(batch: hypernets.hyperctl.batch.Batch, job: hypernets.hyperctl.batch._ShellJob, next_status)[source]
interval
kill_job(job_name)[source]
n_allocated
n_skipped
start()[source]
stop()[source]

hypernets.hyperctl.server module

class hypernets.hyperctl.server.BaseHandler(application: tornado.web.Application, request: tornado.httputil.HTTPServerRequest, **kwargs)[source]

Bases: tornado.web.RequestHandler

data_received(chunk: bytes) → Optional[Awaitable[None]][source]

Implement this method to handle streamed request data.

Requires the .stream_request_body decorator.

May be a coroutine for flow control.

get_request_as_dict()[source]
response(result: dict = None, code=0)[source]
response_json(response_dict)[source]
send_error_content(msg)[source]
class hypernets.hyperctl.server.HyperctlWebApplication(host='localhost', port=8060, **kwargs)[source]

Bases: tornado.web.Application

portal
class hypernets.hyperctl.server.IndexHandler(application: tornado.web.Application, request: tornado.httputil.HTTPServerRequest, **kwargs)[source]

Bases: hypernets.hyperctl.server.BaseHandler

get(*args, **kwargs)[source]
class hypernets.hyperctl.server.JobHandler(application: tornado.web.Application, request: tornado.httputil.HTTPServerRequest, **kwargs)[source]

Bases: hypernets.hyperctl.server.BaseHandler

get(job_name, **kwargs)[source]
initialize(batch: hypernets.hyperctl.batch.Batch)[source]
class hypernets.hyperctl.server.JobListHandler(application: tornado.web.Application, request: tornado.httputil.HTTPServerRequest, **kwargs)[source]

Bases: hypernets.hyperctl.server.BaseHandler

get(*args, **kwargs)[source]
initialize(batch: hypernets.hyperctl.batch.Batch)[source]
class hypernets.hyperctl.server.JobOperationHandler(application: tornado.web.Application, request: tornado.httputil.HTTPServerRequest, **kwargs)[source]

Bases: hypernets.hyperctl.server.BaseHandler

OPT_KILL = 'kill'
initialize(batch: hypernets.hyperctl.batch.Batch, job_scheduler: hypernets.hyperctl.scheduler.JobScheduler)[source]
post(job_name, operation, **kwargs)[source]
class hypernets.hyperctl.server.RestCode[source]

Bases: object

Exception = -1
Success = 0
class hypernets.hyperctl.server.RestResult(code, body)[source]

Bases: object

to_dict()[source]
to_json()[source]
hypernets.hyperctl.server.create_batch_manage_webapp(server_host, server_port, batch, job_scheduler) → hypernets.hyperctl.server.HyperctlWebApplication[source]
hypernets.hyperctl.server.create_hyperctl_handlers(batch, job_scheduler)[source]
hypernets.hyperctl.server.to_job_detail(job, batch)[source]

hypernets.hyperctl.utils module

hypernets.hyperctl.utils.copy_item(src, dest, key)[source]
hypernets.hyperctl.utils.get_request(url)[source]
hypernets.hyperctl.utils.http_portal(host, port)[source]
hypernets.hyperctl.utils.load_json(file_path)[source]
hypernets.hyperctl.utils.load_yaml(file_path)[source]
hypernets.hyperctl.utils.post_request(url, request_data: Optional[str])[source]

Module contents