| Added |
.components.schemas["v0.0.45_controller"] = {}; |
| Added |
.components.schemas["v0.0.45_controller"].properties = {}; |
| Added |
.components.schemas["v0.0.45_controller"].properties.address = {}; |
| Added |
.components.schemas["v0.0.45_controller"].properties.address.description = "Controller address (IP or hostname)"; |
| Added |
.components.schemas["v0.0.45_controller"].properties.address.type = "string"; |
| Added |
.components.schemas["v0.0.45_controller"].properties.machine = {}; |
| Added |
.components.schemas["v0.0.45_controller"].properties.machine.description = "Name of machine hosting controller"; |
| Added |
.components.schemas["v0.0.45_controller"].properties.machine.type = "string"; |
| Added |
.components.schemas["v0.0.45_controller"].required = []; |
| Added |
.components.schemas["v0.0.45_controller"].type = "object"; |
| Added |
.components.schemas["v0.0.45_controller_ping"].properties.status = {}; |
| Added |
.components.schemas["v0.0.45_controller_ping"].properties.status.description = "Ping status code"; |
| Added |
.components.schemas["v0.0.45_controller_ping"].properties.status.type = "string"; |
| Added |
.components.schemas["v0.0.45_controllers_array"] = {}; |
| Added |
.components.schemas["v0.0.45_controllers_array"].items = {}; |
| Added |
.components.schemas["v0.0.45_controllers_array"].items.$ref = "#/components/schemas/v0.0.45_controller"; |
| Added |
.components.schemas["v0.0.45_controllers_array"].type = "array"; |
| Added |
.components.schemas["v0.0.45_job"].properties.exclusive = {}; |
| Added |
.components.schemas["v0.0.45_job"].properties.exclusive.description = "Exclusive resource allocation mode requested for the job"; |
| Added |
.components.schemas["v0.0.45_job"].properties.exclusive.type = "string"; |
| Added |
.components.schemas["v0.0.45_job"].properties.original_sluid = {}; |
| Added |
.components.schemas["v0.0.45_job"].properties.original_sluid.description = "Original SLUID (Slurm Lexicographically-sortable Unique ID)"; |
| Added |
.components.schemas["v0.0.45_job"].properties.original_sluid.type = "string"; |
| Added |
.components.schemas["v0.0.45_job"].properties.oversubscribe = {}; |
| Added |
.components.schemas["v0.0.45_job"].properties.oversubscribe.description = "Oversubscribe mode requested for the job"; |
| Added |
.components.schemas["v0.0.45_job"].properties.oversubscribe.type = "string"; |
| Added |
.components.schemas["v0.0.45_job"].properties.sluid = {}; |
| Added |
.components.schemas["v0.0.45_job"].properties.sluid.description = "SLUID (Slurm Lexicographically-sortable Unique ID)"; |
| Added |
.components.schemas["v0.0.45_job"].properties.sluid.type = "string"; |
| Added |
.components.schemas["v0.0.45_job_array_response_msg_ptr_list"] = {}; |
| Added |
.components.schemas["v0.0.45_job_array_response_msg_ptr_list"].items = {}; |
| Added |
.components.schemas["v0.0.45_job_array_response_msg_ptr_list"].items.$ref = "#/components/schemas/v0.0.45_job_array_response_array"; |
| Added |
.components.schemas["v0.0.45_job_array_response_msg_ptr_list"].items.description = "Job update results"; |
| Added |
.components.schemas["v0.0.45_job_array_response_msg_ptr_list"].type = "array"; |
| Added |
.components.schemas["v0.0.45_job_defaults"] = {}; |
| Added |
.components.schemas["v0.0.45_job_defaults"].properties = {}; |
| Added |
.components.schemas["v0.0.45_job_defaults"].properties.type = {}; |
| Added |
.components.schemas["v0.0.45_job_defaults"].properties.type.description = "Default Type"; |
| Added |
.components.schemas["v0.0.45_job_defaults"].properties.type.items = {}; |
| Added |
.components.schemas["v0.0.45_job_defaults"].properties.type.items.enum = []; |
| Added |
.components.schemas["v0.0.45_job_defaults"].properties.type.items.enum[0] = "DefCpuPerGPU"; |
| Added |
.components.schemas["v0.0.45_job_defaults"].properties.type.items.enum[1] = "DefMemPerGPU"; |
| Added |
.components.schemas["v0.0.45_job_defaults"].properties.type.items.type = "string"; |
| Added |
.components.schemas["v0.0.45_job_defaults"].properties.type.type = "array"; |
| Added |
.components.schemas["v0.0.45_job_defaults"].properties.value = {}; |
| Added |
.components.schemas["v0.0.45_job_defaults"].properties.value.description = "Default value for the given type (e.g. CPUs per GPU or memory per GPU)."; |
| Added |
.components.schemas["v0.0.45_job_defaults"].properties.value.format = "int64"; |
| Added |
.components.schemas["v0.0.45_job_defaults"].properties.value.type = "integer"; |
| Added |
.components.schemas["v0.0.45_job_defaults"].required = []; |
| Added |
.components.schemas["v0.0.45_job_defaults"].type = "object"; |
| Added |
.components.schemas["v0.0.45_job_defaults_list"] = {}; |
| Added |
.components.schemas["v0.0.45_job_defaults_list"].items = {}; |
| Added |
.components.schemas["v0.0.45_job_defaults_list"].items.$ref = "#/components/schemas/v0.0.45_job_defaults"; |
| Added |
.components.schemas["v0.0.45_job_defaults_list"].type = "array"; |
| Added |
.components.schemas["v0.0.45_job_desc_msg"].properties.container_type = {}; |
| Added |
.components.schemas["v0.0.45_job_desc_msg"].properties.container_type.description = "Job container type"; |
| Added |
.components.schemas["v0.0.45_job_desc_msg"].properties.container_type.type = "string"; |
| Added |
.components.schemas["v0.0.45_job_desc_msg"].properties.cores_per_socket = {}; |
| Added |
.components.schemas["v0.0.45_job_desc_msg"].properties.cores_per_socket.description = "Cores per sockets required"; |
| Added |
.components.schemas["v0.0.45_job_desc_msg"].properties.cores_per_socket.format = "int32"; |
| Added |
.components.schemas["v0.0.45_job_desc_msg"].properties.cores_per_socket.type = "integer"; |
| Added |
.components.schemas["v0.0.45_job_desc_msg"].properties.memory_update_delay = {}; |
| Added |
.components.schemas["v0.0.45_job_desc_msg"].properties.memory_update_delay.description = "Auto-reduce memory delay in minutes"; |
| Added |
.components.schemas["v0.0.45_job_desc_msg"].properties.memory_update_delay.format = "int32"; |
| Added |
.components.schemas["v0.0.45_job_desc_msg"].properties.memory_update_delay.type = "integer"; |
| Added |
.components.schemas["v0.0.45_job_desc_msg"].properties.memory_update_margin = {}; |
| Added |
.components.schemas["v0.0.45_job_desc_msg"].properties.memory_update_margin.description = "Auto-reduce memory margin percent"; |
| Added |
.components.schemas["v0.0.45_job_desc_msg"].properties.memory_update_margin.format = "int32"; |
| Added |
.components.schemas["v0.0.45_job_desc_msg"].properties.memory_update_margin.type = "integer"; |
| Added |
.components.schemas["v0.0.45_job_info"].properties.container_type = {}; |
| Added |
.components.schemas["v0.0.45_job_info"].properties.container_type.description = "Job container type"; |
| Added |
.components.schemas["v0.0.45_job_info"].properties.container_type.type = "string"; |
| Added |
.components.schemas["v0.0.45_job_info"].properties.memory_update_delay = {}; |
| Added |
.components.schemas["v0.0.45_job_info"].properties.memory_update_delay.description = "Auto-reduce memory delay in minutes"; |
| Added |
.components.schemas["v0.0.45_job_info"].properties.memory_update_delay.format = "int32"; |
| Added |
.components.schemas["v0.0.45_job_info"].properties.memory_update_delay.type = "integer"; |
| Added |
.components.schemas["v0.0.45_job_info"].properties.memory_update_margin = {}; |
| Added |
.components.schemas["v0.0.45_job_info"].properties.memory_update_margin.description = "Auto-reduce memory margin percent"; |
| Added |
.components.schemas["v0.0.45_job_info"].properties.memory_update_margin.format = "int32"; |
| Added |
.components.schemas["v0.0.45_job_info"].properties.memory_update_margin.type = "integer"; |
| Replaced |
.components.schemas["v0.0.45_job_info"].properties.shared.deprecated = true; |
| Added |
.components.schemas["v0.0.45_job_info"].properties.shared.description = "Ignored. Was 'How the job can share resources with other jobs, if at all'. Replaced by 'oversubscribe' and 'exclusive'."; |
| Removed |
.components.schemas["v0.0.45_job_res"].properties.select_type.items.enum[0] = "CPU"; |
| Removed |
.components.schemas["v0.0.45_job_res"].properties.select_type.items.enum[1] = "SOCKET"; |
| Removed |
.components.schemas["v0.0.45_job_res"].properties.select_type.items.enum[2] = "CORE"; |
| Removed |
.components.schemas["v0.0.45_job_res"].properties.select_type.items.enum[3] = "BOARD"; |
| Removed |
.components.schemas["v0.0.45_job_res"].properties.select_type.items.enum[4] = "MEMORY"; |
| Removed |
.components.schemas["v0.0.45_job_res"].properties.select_type.items.enum[5] = "ONE_TASK_PER_CORE"; |
| Removed |
.components.schemas["v0.0.45_job_res"].properties.select_type.items.enum[6] = "PACK_NODES"; |
| Removed |
.components.schemas["v0.0.45_job_res"].properties.select_type.items.enum[7] = "CORE_DEFAULT_DIST_BLOCK"; |
| Removed |
.components.schemas["v0.0.45_job_res"].properties.select_type.items.enum[8] = "LLN"; |
| Replaced |
.components.schemas["v0.0.45_job_res"].properties.select_type.items.enum[0] = "NONE"; |
| Added |
.components.schemas["v0.0.45_job_res"].properties.select_type.items.enum[1] = "CR_CPU_MEMORY"; |
| Added |
.components.schemas["v0.0.45_job_res"].properties.select_type.items.enum[2] = "CR_CPU"; |
| Added |
.components.schemas["v0.0.45_job_res"].properties.select_type.items.enum[3] = "CR_CORE_MEMORY"; |
| Added |
.components.schemas["v0.0.45_job_res"].properties.select_type.items.enum[4] = "CR_CORE"; |
| Added |
.components.schemas["v0.0.45_job_res"].properties.select_type.items.enum[5] = "CR_SOCKET_MEMORY"; |
| Added |
.components.schemas["v0.0.45_job_res"].properties.select_type.items.enum[6] = "CR_SOCKET"; |
| Added |
.components.schemas["v0.0.45_job_res"].properties.select_type.items.enum[7] = "CR_MEMORY"; |
| Added |
.components.schemas["v0.0.45_job_res"].properties.select_type.items.enum[8] = "CR_ONE_TASK_PER_CORE"; |
| Added |
.components.schemas["v0.0.45_job_res"].properties.select_type.items.enum[9] = "CR_CORE_DEFAULT_DIST_BLOCK"; |
| Added |
.components.schemas["v0.0.45_job_res"].properties.select_type.items.enum[10] = "CR_NO_DIST_TOPO_BLOCK"; |
| Added |
.components.schemas["v0.0.45_job_res"].properties.select_type.items.enum[11] = "CR_LLN"; |
| Added |
.components.schemas["v0.0.45_job_res"].properties.select_type.items.enum[12] = "CR_PACK_NODES"; |
| Added |
.components.schemas["v0.0.45_job_res"].properties.select_type.items.enum[13] = "LL_SHARED_GRES"; |
| Added |
.components.schemas["v0.0.45_job_res"].properties.select_type.items.enum[14] = "MULTIPLE_SHARING_GRES_PJ"; |
| Added |
.components.schemas["v0.0.45_job_res"].properties.select_type.items.enum[15] = "ENFORCE_BINDING_GRES"; |
| Added |
.components.schemas["v0.0.45_job_res"].properties.select_type.items.enum[16] = "ONE_TASK_PER_SHARING_GRES"; |
| Added |
.components.schemas["v0.0.45_job_submit_req"].properties.script = {}; |
| Added |
.components.schemas["v0.0.45_job_submit_req"].properties.script.description = "Job batch script contents; Same as the script field in jobs[0] or job."; |
| Added |
.components.schemas["v0.0.45_job_submit_req"].properties.script.type = "string"; |
| Removed |
.components.schemas["v0.0.45_node"].properties.next_state_after_reboot.items.enum[30] = "DYNAMIC_NORM"; |
| Replaced |
.components.schemas["v0.0.45_node"].properties.next_state_after_reboot.items.enum[30] = "POWER_DOWN_ASAP"; |
| Added |
.components.schemas["v0.0.45_node"].properties.next_state_after_reboot.items.enum[31] = "POWER_DOWN_FORCE"; |
| Added |
.components.schemas["v0.0.45_node"].properties.next_state_after_reboot.items.enum[32] = "DYNAMIC_NORM"; |
| Added |
.components.schemas["v0.0.45_node"].properties.next_state_after_reboot.items.enum[33] = "BLOCKED"; |
| Removed |
.components.schemas["v0.0.45_node"].properties.state.items.enum[30] = "DYNAMIC_NORM"; |
| Replaced |
.components.schemas["v0.0.45_node"].properties.state.items.enum[30] = "POWER_DOWN_ASAP"; |
| Added |
.components.schemas["v0.0.45_node"].properties.state.items.enum[31] = "POWER_DOWN_FORCE"; |
| Added |
.components.schemas["v0.0.45_node"].properties.state.items.enum[32] = "DYNAMIC_NORM"; |
| Added |
.components.schemas["v0.0.45_node"].properties.state.items.enum[33] = "BLOCKED"; |
| Added |
.components.schemas["v0.0.45_node"].properties.suspend_time = {}; |
| Added |
.components.schemas["v0.0.45_node"].properties.suspend_time.$ref = "#/components/schemas/v0.0.45_uint32_no_val_struct"; |
| Added |
.components.schemas["v0.0.45_node"].properties.suspend_time.description = "Time in idle state before being considered for power save. (32 bit integer number with flags)"; |
| Added |
.components.schemas["v0.0.45_node_gres_layout"] = {}; |
| Added |
.components.schemas["v0.0.45_node_gres_layout"].properties = {}; |
| Added |
.components.schemas["v0.0.45_node_gres_layout"].properties.count = {}; |
| Added |
.components.schemas["v0.0.45_node_gres_layout"].properties.count.description = "Count"; |
| Added |
.components.schemas["v0.0.45_node_gres_layout"].properties.count.format = "int64"; |
| Added |
.components.schemas["v0.0.45_node_gres_layout"].properties.count.type = "integer"; |
| Added |
.components.schemas["v0.0.45_node_gres_layout"].properties.index = {}; |
| Added |
.components.schemas["v0.0.45_node_gres_layout"].properties.index.description = "Index"; |
| Added |
.components.schemas["v0.0.45_node_gres_layout"].properties.index.type = "string"; |
| Added |
.components.schemas["v0.0.45_node_gres_layout"].properties.name = {}; |
| Added |
.components.schemas["v0.0.45_node_gres_layout"].properties.name.description = "GRES name"; |
| Added |
.components.schemas["v0.0.45_node_gres_layout"].properties.name.type = "string"; |
| Added |
.components.schemas["v0.0.45_node_gres_layout"].properties.type = {}; |
| Added |
.components.schemas["v0.0.45_node_gres_layout"].properties.type.description = "GRES type (optional)"; |
| Added |
.components.schemas["v0.0.45_node_gres_layout"].properties.type.type = "string"; |
| Added |
.components.schemas["v0.0.45_node_gres_layout"].required = []; |
| Added |
.components.schemas["v0.0.45_node_gres_layout"].required[0] = "name"; |
| Added |
.components.schemas["v0.0.45_node_gres_layout"].type = "object"; |
| Added |
.components.schemas["v0.0.45_node_gres_layout_list"] = {}; |
| Added |
.components.schemas["v0.0.45_node_gres_layout_list"].items = {}; |
| Added |
.components.schemas["v0.0.45_node_gres_layout_list"].items.$ref = "#/components/schemas/v0.0.45_node_gres_layout"; |
| Added |
.components.schemas["v0.0.45_node_gres_layout_list"].type = "array"; |
| Added |
.components.schemas["v0.0.45_node_resource_layout"] = {}; |
| Added |
.components.schemas["v0.0.45_node_resource_layout"].properties = {}; |
| Added |
.components.schemas["v0.0.45_node_resource_layout"].properties.channel = {}; |
| Added |
.components.schemas["v0.0.45_node_resource_layout"].properties.channel.$ref = "#/components/schemas/v0.0.45_uint32_no_val_struct"; |
| Added |
.components.schemas["v0.0.45_node_resource_layout"].properties.channel.description = "IMEX channel (32 bit integer number with flags)"; |
| Added |
.components.schemas["v0.0.45_node_resource_layout"].properties.core_bitmap = {}; |
| Added |
.components.schemas["v0.0.45_node_resource_layout"].properties.core_bitmap.description = "Abstract core bitmap"; |
| Added |
.components.schemas["v0.0.45_node_resource_layout"].properties.core_bitmap.type = "string"; |
| Added |
.components.schemas["v0.0.45_node_resource_layout"].properties.cores_per_socket = {}; |
| Added |
.components.schemas["v0.0.45_node_resource_layout"].properties.cores_per_socket.description = "Cores per socket"; |
| Added |
.components.schemas["v0.0.45_node_resource_layout"].properties.cores_per_socket.format = "int32"; |
| Added |
.components.schemas["v0.0.45_node_resource_layout"].properties.cores_per_socket.type = "integer"; |
| Added |
.components.schemas["v0.0.45_node_resource_layout"].properties.gres = {}; |
| Added |
.components.schemas["v0.0.45_node_resource_layout"].properties.gres.$ref = "#/components/schemas/v0.0.45_node_gres_layout_list"; |
| Added |
.components.schemas["v0.0.45_node_resource_layout"].properties.gres.description = "Allocated GRES"; |
| Added |
.components.schemas["v0.0.45_node_resource_layout"].properties.mem_alloc = {}; |
| Added |
.components.schemas["v0.0.45_node_resource_layout"].properties.mem_alloc.description = "Allocated memory"; |
| Added |
.components.schemas["v0.0.45_node_resource_layout"].properties.mem_alloc.format = "int64"; |
| Added |
.components.schemas["v0.0.45_node_resource_layout"].properties.mem_alloc.type = "integer"; |
| Added |
.components.schemas["v0.0.45_node_resource_layout"].properties.node = {}; |
| Added |
.components.schemas["v0.0.45_node_resource_layout"].properties.node.description = "Node name"; |
| Added |
.components.schemas["v0.0.45_node_resource_layout"].properties.node.type = "string"; |
| Added |
.components.schemas["v0.0.45_node_resource_layout"].properties.sockets_per_node = {}; |
| Added |
.components.schemas["v0.0.45_node_resource_layout"].properties.sockets_per_node.description = "Sockets per node"; |
| Added |
.components.schemas["v0.0.45_node_resource_layout"].properties.sockets_per_node.format = "int32"; |
| Added |
.components.schemas["v0.0.45_node_resource_layout"].properties.sockets_per_node.type = "integer"; |
| Added |
.components.schemas["v0.0.45_node_resource_layout"].required = []; |
| Added |
.components.schemas["v0.0.45_node_resource_layout"].required[0] = "node"; |
| Added |
.components.schemas["v0.0.45_node_resource_layout"].type = "object"; |
| Added |
.components.schemas["v0.0.45_node_resource_layout_list"] = {}; |
| Added |
.components.schemas["v0.0.45_node_resource_layout_list"].items = {}; |
| Added |
.components.schemas["v0.0.45_node_resource_layout_list"].items.$ref = "#/components/schemas/v0.0.45_node_resource_layout"; |
| Added |
.components.schemas["v0.0.45_node_resource_layout_list"].type = "array"; |
| Added |
.components.schemas["v0.0.45_openapi_conf_resp"] = {}; |
| Added |
.components.schemas["v0.0.45_openapi_conf_resp"].properties = {}; |
| Added |
.components.schemas["v0.0.45_openapi_conf_resp"].properties.errors = {}; |
| Added |
.components.schemas["v0.0.45_openapi_conf_resp"].properties.errors.$ref = "#/components/schemas/v0.0.45_openapi_errors"; |
| Added |
.components.schemas["v0.0.45_openapi_conf_resp"].properties.errors.description = "Query errors"; |
| Added |
.components.schemas["v0.0.45_openapi_conf_resp"].properties.meta = {}; |
| Added |
.components.schemas["v0.0.45_openapi_conf_resp"].properties.meta.$ref = "#/components/schemas/v0.0.45_openapi_meta"; |
| Added |
.components.schemas["v0.0.45_openapi_conf_resp"].properties.meta.description = "Slurm meta values"; |
| Added |
.components.schemas["v0.0.45_openapi_conf_resp"].properties.slurm_conf = {}; |
| Added |
.components.schemas["v0.0.45_openapi_conf_resp"].properties.slurm_conf.$ref = "#/components/schemas/v0.0.45_slurm_conf"; |
| Added |
.components.schemas["v0.0.45_openapi_conf_resp"].properties.slurm_conf.description = "Configuration from slurm.conf"; |
| Added |
.components.schemas["v0.0.45_openapi_conf_resp"].properties.slurm_conf_meta = {}; |
| Added |
.components.schemas["v0.0.45_openapi_conf_resp"].properties.slurm_conf_meta.$ref = "#/components/schemas/v0.0.45_slurm_conf_meta"; |
| Added |
.components.schemas["v0.0.45_openapi_conf_resp"].properties.slurm_conf_meta.description = "Meta data related to slurm.conf"; |
| Added |
.components.schemas["v0.0.45_openapi_conf_resp"].properties.warnings = {}; |
| Added |
.components.schemas["v0.0.45_openapi_conf_resp"].properties.warnings.$ref = "#/components/schemas/v0.0.45_openapi_warnings"; |
| Added |
.components.schemas["v0.0.45_openapi_conf_resp"].properties.warnings.description = "Query warnings"; |
| Added |
.components.schemas["v0.0.45_openapi_conf_resp"].required = []; |
| Added |
.components.schemas["v0.0.45_openapi_conf_resp"].type = "object"; |
| Added |
.components.schemas["v0.0.45_openapi_job_requeue_resp"] = {}; |
| Added |
.components.schemas["v0.0.45_openapi_job_requeue_resp"].properties = {}; |
| Added |
.components.schemas["v0.0.45_openapi_job_requeue_resp"].properties.errors = {}; |
| Added |
.components.schemas["v0.0.45_openapi_job_requeue_resp"].properties.errors.$ref = "#/components/schemas/v0.0.45_openapi_errors"; |
| Added |
.components.schemas["v0.0.45_openapi_job_requeue_resp"].properties.errors.description = "Query errors"; |
| Added |
.components.schemas["v0.0.45_openapi_job_requeue_resp"].properties.meta = {}; |
| Added |
.components.schemas["v0.0.45_openapi_job_requeue_resp"].properties.meta.$ref = "#/components/schemas/v0.0.45_openapi_meta"; |
| Added |
.components.schemas["v0.0.45_openapi_job_requeue_resp"].properties.meta.description = "Slurm meta values"; |
| Added |
.components.schemas["v0.0.45_openapi_job_requeue_resp"].properties.status = {}; |
| Added |
.components.schemas["v0.0.45_openapi_job_requeue_resp"].properties.status.$ref = "#/components/schemas/v0.0.45_job_array_response_array"; |
| Added |
.components.schemas["v0.0.45_openapi_job_requeue_resp"].properties.status.description = "result of job requeue request (Job update results)"; |
| Added |
.components.schemas["v0.0.45_openapi_job_requeue_resp"].properties.warnings = {}; |
| Added |
.components.schemas["v0.0.45_openapi_job_requeue_resp"].properties.warnings.$ref = "#/components/schemas/v0.0.45_openapi_warnings"; |
| Added |
.components.schemas["v0.0.45_openapi_job_requeue_resp"].properties.warnings.description = "Query warnings"; |
| Added |
.components.schemas["v0.0.45_openapi_job_requeue_resp"].required = []; |
| Added |
.components.schemas["v0.0.45_openapi_job_requeue_resp"].required[0] = "status"; |
| Added |
.components.schemas["v0.0.45_openapi_job_requeue_resp"].type = "object"; |
| Added |
.components.schemas["v0.0.45_openapi_jobs_requeue_query"] = {}; |
| Added |
.components.schemas["v0.0.45_openapi_jobs_requeue_query"].properties = {}; |
| Added |
.components.schemas["v0.0.45_openapi_jobs_requeue_query"].properties.flags = {}; |
| Added |
.components.schemas["v0.0.45_openapi_jobs_requeue_query"].properties.flags.description = "Requeue flags"; |
| Added |
.components.schemas["v0.0.45_openapi_jobs_requeue_query"].properties.flags.items = {}; |
| Added |
.components.schemas["v0.0.45_openapi_jobs_requeue_query"].properties.flags.items.enum = []; |
| Added |
.components.schemas["v0.0.45_openapi_jobs_requeue_query"].properties.flags.items.enum[0] = "Incomplete"; |
| Added |
.components.schemas["v0.0.45_openapi_jobs_requeue_query"].properties.flags.items.enum[1] = "Hold"; |
| Added |
.components.schemas["v0.0.45_openapi_jobs_requeue_query"].properties.flags.items.enum[2] = "SpecialExit"; |
| Added |
.components.schemas["v0.0.45_openapi_jobs_requeue_query"].properties.flags.items.type = "string"; |
| Added |
.components.schemas["v0.0.45_openapi_jobs_requeue_query"].properties.flags.type = "array"; |
| Added |
.components.schemas["v0.0.45_openapi_jobs_requeue_query"].properties.jobs = {}; |
| Added |
.components.schemas["v0.0.45_openapi_jobs_requeue_query"].properties.jobs.$ref = "#/components/schemas/v0.0.45_selected_step_list"; |
| Added |
.components.schemas["v0.0.45_openapi_jobs_requeue_query"].properties.jobs.description = "List of jobs to requeue"; |
| Added |
.components.schemas["v0.0.45_openapi_jobs_requeue_query"].required = []; |
| Added |
.components.schemas["v0.0.45_openapi_jobs_requeue_query"].type = "object"; |
| Added |
.components.schemas["v0.0.45_openapi_jobs_requeue_resp"] = {}; |
| Added |
.components.schemas["v0.0.45_openapi_jobs_requeue_resp"].properties = {}; |
| Added |
.components.schemas["v0.0.45_openapi_jobs_requeue_resp"].properties.errors = {}; |
| Added |
.components.schemas["v0.0.45_openapi_jobs_requeue_resp"].properties.errors.$ref = "#/components/schemas/v0.0.45_openapi_errors"; |
| Added |
.components.schemas["v0.0.45_openapi_jobs_requeue_resp"].properties.errors.description = "Query errors"; |
| Added |
.components.schemas["v0.0.45_openapi_jobs_requeue_resp"].properties.meta = {}; |
| Added |
.components.schemas["v0.0.45_openapi_jobs_requeue_resp"].properties.meta.$ref = "#/components/schemas/v0.0.45_openapi_meta"; |
| Added |
.components.schemas["v0.0.45_openapi_jobs_requeue_resp"].properties.meta.description = "Slurm meta values"; |
| Added |
.components.schemas["v0.0.45_openapi_jobs_requeue_resp"].properties.status = {}; |
| Added |
.components.schemas["v0.0.45_openapi_jobs_requeue_resp"].properties.status.$ref = "#/components/schemas/v0.0.45_job_array_response_msg_ptr_list"; |
| Added |
.components.schemas["v0.0.45_openapi_jobs_requeue_resp"].properties.status.description = "result of batch job requeue request"; |
| Added |
.components.schemas["v0.0.45_openapi_jobs_requeue_resp"].properties.warnings = {}; |
| Added |
.components.schemas["v0.0.45_openapi_jobs_requeue_resp"].properties.warnings.$ref = "#/components/schemas/v0.0.45_openapi_warnings"; |
| Added |
.components.schemas["v0.0.45_openapi_jobs_requeue_resp"].properties.warnings.description = "Query warnings"; |
| Added |
.components.schemas["v0.0.45_openapi_jobs_requeue_resp"].required = []; |
| Added |
.components.schemas["v0.0.45_openapi_jobs_requeue_resp"].required[0] = "status"; |
| Added |
.components.schemas["v0.0.45_openapi_jobs_requeue_resp"].type = "object"; |
| Replaced |
.components.schemas["v0.0.45_openapi_kill_job_resp"].properties.status.description = "result of signal request (List of jobs signal responses)"; |
| Replaced |
.components.schemas["v0.0.45_openapi_kill_jobs_resp"].properties.status.description = "result of signal request (List of jobs signal responses)"; |
| Added |
.components.schemas["v0.0.45_openapi_partitions_mod_req"] = {}; |
| Added |
.components.schemas["v0.0.45_openapi_partitions_mod_req"].properties = {}; |
| Added |
.components.schemas["v0.0.45_openapi_partitions_mod_req"].properties.errors = {}; |
| Added |
.components.schemas["v0.0.45_openapi_partitions_mod_req"].properties.errors.$ref = "#/components/schemas/v0.0.45_openapi_errors"; |
| Added |
.components.schemas["v0.0.45_openapi_partitions_mod_req"].properties.errors.description = "Query errors"; |
| Added |
.components.schemas["v0.0.45_openapi_partitions_mod_req"].properties.meta = {}; |
| Added |
.components.schemas["v0.0.45_openapi_partitions_mod_req"].properties.meta.$ref = "#/components/schemas/v0.0.45_openapi_meta"; |
| Added |
.components.schemas["v0.0.45_openapi_partitions_mod_req"].properties.meta.description = "Slurm meta values"; |
| Added |
.components.schemas["v0.0.45_openapi_partitions_mod_req"].properties.partitions = {}; |
| Added |
.components.schemas["v0.0.45_openapi_partitions_mod_req"].properties.partitions.$ref = "#/components/schemas/v0.0.45_update_partition_msg_list"; |
| Added |
.components.schemas["v0.0.45_openapi_partitions_mod_req"].properties.partitions.description = "list of partition descriptions"; |
| Added |
.components.schemas["v0.0.45_openapi_partitions_mod_req"].properties.warnings = {}; |
| Added |
.components.schemas["v0.0.45_openapi_partitions_mod_req"].properties.warnings.$ref = "#/components/schemas/v0.0.45_openapi_warnings"; |
| Added |
.components.schemas["v0.0.45_openapi_partitions_mod_req"].properties.warnings.description = "Query warnings"; |
| Added |
.components.schemas["v0.0.45_openapi_partitions_mod_req"].required = []; |
| Added |
.components.schemas["v0.0.45_openapi_partitions_mod_req"].required[0] = "partitions"; |
| Added |
.components.schemas["v0.0.45_openapi_partitions_mod_req"].type = "object"; |
| Added |
.components.schemas["v0.0.45_openapi_resource_layout_resp"] = {}; |
| Added |
.components.schemas["v0.0.45_openapi_resource_layout_resp"].properties = {}; |
| Added |
.components.schemas["v0.0.45_openapi_resource_layout_resp"].properties.errors = {}; |
| Added |
.components.schemas["v0.0.45_openapi_resource_layout_resp"].properties.errors.$ref = "#/components/schemas/v0.0.45_openapi_errors"; |
| Added |
.components.schemas["v0.0.45_openapi_resource_layout_resp"].properties.errors.description = "Query errors"; |
| Added |
.components.schemas["v0.0.45_openapi_resource_layout_resp"].properties.meta = {}; |
| Added |
.components.schemas["v0.0.45_openapi_resource_layout_resp"].properties.meta.$ref = "#/components/schemas/v0.0.45_openapi_meta"; |
| Added |
.components.schemas["v0.0.45_openapi_resource_layout_resp"].properties.meta.description = "Slurm meta values"; |
| Added |
.components.schemas["v0.0.45_openapi_resource_layout_resp"].properties.nodes = {}; |
| Added |
.components.schemas["v0.0.45_openapi_resource_layout_resp"].properties.nodes.$ref = "#/components/schemas/v0.0.45_node_resource_layout_list"; |
| Added |
.components.schemas["v0.0.45_openapi_resource_layout_resp"].properties.nodes.description = "Node resource layouts"; |
| Added |
.components.schemas["v0.0.45_openapi_resource_layout_resp"].properties.warnings = {}; |
| Added |
.components.schemas["v0.0.45_openapi_resource_layout_resp"].properties.warnings.$ref = "#/components/schemas/v0.0.45_openapi_warnings"; |
| Added |
.components.schemas["v0.0.45_openapi_resource_layout_resp"].properties.warnings.description = "Query warnings"; |
| Added |
.components.schemas["v0.0.45_openapi_resource_layout_resp"].required = []; |
| Added |
.components.schemas["v0.0.45_openapi_resource_layout_resp"].required[0] = "nodes"; |
| Added |
.components.schemas["v0.0.45_openapi_resource_layout_resp"].type = "object"; |
| Added |
.components.schemas["v0.0.45_openapi_slurmdbd_conf_resp"] = {}; |
| Added |
.components.schemas["v0.0.45_openapi_slurmdbd_conf_resp"].properties = {}; |
| Added |
.components.schemas["v0.0.45_openapi_slurmdbd_conf_resp"].properties.errors = {}; |
| Added |
.components.schemas["v0.0.45_openapi_slurmdbd_conf_resp"].properties.errors.$ref = "#/components/schemas/v0.0.45_openapi_errors"; |
| Added |
.components.schemas["v0.0.45_openapi_slurmdbd_conf_resp"].properties.errors.description = "Query errors"; |
| Added |
.components.schemas["v0.0.45_openapi_slurmdbd_conf_resp"].properties.meta = {}; |
| Added |
.components.schemas["v0.0.45_openapi_slurmdbd_conf_resp"].properties.meta.$ref = "#/components/schemas/v0.0.45_openapi_meta"; |
| Added |
.components.schemas["v0.0.45_openapi_slurmdbd_conf_resp"].properties.meta.description = "Slurm meta values"; |
| Added |
.components.schemas["v0.0.45_openapi_slurmdbd_conf_resp"].properties.slurmdbd_conf = {}; |
| Added |
.components.schemas["v0.0.45_openapi_slurmdbd_conf_resp"].properties.slurmdbd_conf.$ref = "#/components/schemas/v0.0.45_slurmdbd_conf"; |
| Added |
.components.schemas["v0.0.45_openapi_slurmdbd_conf_resp"].properties.slurmdbd_conf.description = "slurmdbd.conf"; |
| Added |
.components.schemas["v0.0.45_openapi_slurmdbd_conf_resp"].properties.warnings = {}; |
| Added |
.components.schemas["v0.0.45_openapi_slurmdbd_conf_resp"].properties.warnings.$ref = "#/components/schemas/v0.0.45_openapi_warnings"; |
| Added |
.components.schemas["v0.0.45_openapi_slurmdbd_conf_resp"].properties.warnings.description = "Query warnings"; |
| Added |
.components.schemas["v0.0.45_openapi_slurmdbd_conf_resp"].required = []; |
| Added |
.components.schemas["v0.0.45_openapi_slurmdbd_conf_resp"].required[0] = "slurmdbd_conf"; |
| Added |
.components.schemas["v0.0.45_openapi_slurmdbd_conf_resp"].type = "object"; |
| Replaced |
.components.schemas["v0.0.45_partition_info"].properties.cluster.description = "Cluster name (read-only)"; |
| Removed |
.components.schemas["v0.0.45_partition_info"].properties.cpus.properties.task_binding.format = "int32"; |
| Replaced |
.components.schemas["v0.0.45_partition_info"].properties.cpus.properties.task_binding.items = {}; |
| Added |
.components.schemas["v0.0.45_partition_info"].properties.cpus.properties.task_binding.items.enum = []; |
| Added |
.components.schemas["v0.0.45_partition_info"].properties.cpus.properties.task_binding.items.enum[0] = "CPU_BIND_TO_THREADS"; |
| Added |
.components.schemas["v0.0.45_partition_info"].properties.cpus.properties.task_binding.items.enum[1] = "CPU_BIND_TO_CORES"; |
| Added |
.components.schemas["v0.0.45_partition_info"].properties.cpus.properties.task_binding.items.enum[2] = "CPU_BIND_TO_SOCKETS"; |
| Added |
.components.schemas["v0.0.45_partition_info"].properties.cpus.properties.task_binding.items.enum[3] = "CPU_BIND_TO_LDOMS"; |
| Added |
.components.schemas["v0.0.45_partition_info"].properties.cpus.properties.task_binding.items.enum[4] = "CPU_BIND_NONE"; |
| Added |
.components.schemas["v0.0.45_partition_info"].properties.cpus.properties.task_binding.items.enum[5] = "CPU_BIND_OFF"; |
| Added |
.components.schemas["v0.0.45_partition_info"].properties.cpus.properties.task_binding.items.enum[6] = "VERBOSE"; |
| Added |
.components.schemas["v0.0.45_partition_info"].properties.cpus.properties.task_binding.items.type = "string"; |
| Added |
.components.schemas["v0.0.45_partition_info"].properties.cpus.properties.task_binding.type = "array"; |
| Replaced |
.components.schemas["v0.0.45_partition_info"].properties.cpus.properties.total.description = "TotalCPUs - Number of CPUs available in this partition (read-only)"; |
| Added |
.components.schemas["v0.0.45_partition_info"].properties.defaults.properties.memory_per_cpu.deprecated = true; |
| Added |
.components.schemas["v0.0.45_partition_info"].properties.flags = {}; |
| Added |
.components.schemas["v0.0.45_partition_info"].properties.flags.description = "Partition flag options"; |
| Added |
.components.schemas["v0.0.45_partition_info"].properties.flags.items = {}; |
| Added |
.components.schemas["v0.0.45_partition_info"].properties.flags.items.enum = []; |
| Added |
.components.schemas["v0.0.45_partition_info"].properties.flags.items.enum[0] = "DEFAULT"; |
| Added |
.components.schemas["v0.0.45_partition_info"].properties.flags.items.enum[1] = "HIDDEN"; |
| Added |
.components.schemas["v0.0.45_partition_info"].properties.flags.items.enum[2] = "NO_ROOT"; |
| Added |
.components.schemas["v0.0.45_partition_info"].properties.flags.items.enum[3] = "ROOT_ONLY"; |
| Added |
.components.schemas["v0.0.45_partition_info"].properties.flags.items.enum[4] = "REQ_RESV"; |
| Added |
.components.schemas["v0.0.45_partition_info"].properties.flags.items.enum[5] = "LLN"; |
| Added |
.components.schemas["v0.0.45_partition_info"].properties.flags.items.enum[6] = "EXCLUSIVE_USER"; |
| Added |
.components.schemas["v0.0.45_partition_info"].properties.flags.items.enum[7] = "PDOI"; |
| Added |
.components.schemas["v0.0.45_partition_info"].properties.flags.items.enum[8] = "DEFAULT_CLEAR"; |
| Added |
.components.schemas["v0.0.45_partition_info"].properties.flags.items.enum[9] = "HIDDEN_CLEAR"; |
| Added |
.components.schemas["v0.0.45_partition_info"].properties.flags.items.enum[10] = "NO_ROOT_CLEAR"; |
| Added |
.components.schemas["v0.0.45_partition_info"].properties.flags.items.enum[11] = "ROOT_ONLY_CLEAR"; |
| Added |
.components.schemas["v0.0.45_partition_info"].properties.flags.items.enum[12] = "REQ_RESV_CLEAR"; |
| Added |
.components.schemas["v0.0.45_partition_info"].properties.flags.items.enum[13] = "LLN_CLEAR"; |
| Added |
.components.schemas["v0.0.45_partition_info"].properties.flags.items.enum[14] = "EXC_USER_CLEAR"; |
| Added |
.components.schemas["v0.0.45_partition_info"].properties.flags.items.enum[15] = "PDOI_CLEAR"; |
| Added |
.components.schemas["v0.0.45_partition_info"].properties.flags.items.enum[16] = "EXCLUSIVE_TOPO"; |
| Added |
.components.schemas["v0.0.45_partition_info"].properties.flags.items.enum[17] = "EXC_TOPO_CLEAR"; |
| Added |
.components.schemas["v0.0.45_partition_info"].properties.flags.items.type = "string"; |
| Added |
.components.schemas["v0.0.45_partition_info"].properties.flags.type = "array"; |
| Added |
.components.schemas["v0.0.45_partition_info"].properties.maximums.properties.memory_per_cpu.deprecated = true; |
| Added |
.components.schemas["v0.0.45_partition_info"].properties.maximums.properties.shares.deprecated = true; |
| Replaced |
.components.schemas["v0.0.45_partition_info"].properties.maximums.properties.time.description = "MaxTime - Maximum run time limit for jobs in minutes (32 bit integer number with flags)"; |
| Replaced |
.components.schemas["v0.0.45_partition_info"].properties.nodes.properties.total.description = "TotalNodes - Number of nodes available in this partition (read-only)"; |
| Added |
.components.schemas["v0.0.45_partition_info"].properties.partition.properties.exclusive = {}; |
| Added |
.components.schemas["v0.0.45_partition_info"].properties.partition.properties.exclusive.description = "Exclusive= string (same as scontrol show partition)"; |
| Added |
.components.schemas["v0.0.45_partition_info"].properties.partition.properties.exclusive.type = "string"; |
| Added |
.components.schemas["v0.0.45_partition_info"].properties.partition.properties.oversubscribe = {}; |
| Added |
.components.schemas["v0.0.45_partition_info"].properties.partition.properties.oversubscribe.description = "OverSubscribe display: FORCE:n, NO, or YES:n (same as show partition and sinfo)"; |
| Added |
.components.schemas["v0.0.45_partition_info"].properties.partition.properties.oversubscribe.type = "string"; |
| Added |
.components.schemas["v0.0.45_partition_info"].properties.preempt_mode = {}; |
| Added |
.components.schemas["v0.0.45_partition_info"].properties.preempt_mode.description = "PreemptMode - Mechanism used to preempt jobs for this partition when PreemptType=preempt/partition_prio is configured (the \"CLUSTER_GLOBAL\" flag will be ignored when updating a partition)."; |
| Added |
.components.schemas["v0.0.45_partition_info"].properties.preempt_mode.items = {}; |
| Added |
.components.schemas["v0.0.45_partition_info"].properties.preempt_mode.items.enum = []; |
| Added |
.components.schemas["v0.0.45_partition_info"].properties.preempt_mode.items.enum[0] = "CLUSTER_GLOBAL"; |
| Added |
.components.schemas["v0.0.45_partition_info"].properties.preempt_mode.items.enum[1] = "DISABLED"; |
| Added |
.components.schemas["v0.0.45_partition_info"].properties.preempt_mode.items.enum[2] = "SUSPEND"; |
| Added |
.components.schemas["v0.0.45_partition_info"].properties.preempt_mode.items.enum[3] = "REQUEUE"; |
| Added |
.components.schemas["v0.0.45_partition_info"].properties.preempt_mode.items.enum[4] = "CANCEL"; |
| Added |
.components.schemas["v0.0.45_partition_info"].properties.preempt_mode.items.enum[5] = "PRIORITY"; |
| Added |
.components.schemas["v0.0.45_partition_info"].properties.preempt_mode.items.type = "string"; |
| Added |
.components.schemas["v0.0.45_partition_info"].properties.preempt_mode.type = "array"; |
| Replaced |
.components.schemas["v0.0.45_partition_info"].properties.select_type.description = "Scheduler consumable resource selection type (read-only)"; |
| Removed |
.components.schemas["v0.0.45_partition_info"].properties.select_type.items.enum[0] = "CPU"; |
| Removed |
.components.schemas["v0.0.45_partition_info"].properties.select_type.items.enum[1] = "SOCKET"; |
| Removed |
.components.schemas["v0.0.45_partition_info"].properties.select_type.items.enum[2] = "CORE"; |
| Removed |
.components.schemas["v0.0.45_partition_info"].properties.select_type.items.enum[3] = "BOARD"; |
| Removed |
.components.schemas["v0.0.45_partition_info"].properties.select_type.items.enum[4] = "MEMORY"; |
| Removed |
.components.schemas["v0.0.45_partition_info"].properties.select_type.items.enum[5] = "ONE_TASK_PER_CORE"; |
| Removed |
.components.schemas["v0.0.45_partition_info"].properties.select_type.items.enum[6] = "PACK_NODES"; |
| Removed |
.components.schemas["v0.0.45_partition_info"].properties.select_type.items.enum[7] = "CORE_DEFAULT_DIST_BLOCK"; |
| Removed |
.components.schemas["v0.0.45_partition_info"].properties.select_type.items.enum[8] = "LLN"; |
| Replaced |
.components.schemas["v0.0.45_partition_info"].properties.select_type.items.enum[0] = "NONE"; |
| Added |
.components.schemas["v0.0.45_partition_info"].properties.select_type.items.enum[1] = "CR_CPU_MEMORY"; |
| Added |
.components.schemas["v0.0.45_partition_info"].properties.select_type.items.enum[2] = "CR_CPU"; |
| Added |
.components.schemas["v0.0.45_partition_info"].properties.select_type.items.enum[3] = "CR_CORE_MEMORY"; |
| Added |
.components.schemas["v0.0.45_partition_info"].properties.select_type.items.enum[4] = "CR_CORE"; |
| Added |
.components.schemas["v0.0.45_partition_info"].properties.select_type.items.enum[5] = "CR_SOCKET_MEMORY"; |
| Added |
.components.schemas["v0.0.45_partition_info"].properties.select_type.items.enum[6] = "CR_SOCKET"; |
| Added |
.components.schemas["v0.0.45_partition_info"].properties.select_type.items.enum[7] = "CR_MEMORY"; |
| Added |
.components.schemas["v0.0.45_partition_info"].properties.select_type.items.enum[8] = "CR_ONE_TASK_PER_CORE"; |
| Added |
.components.schemas["v0.0.45_partition_info"].properties.select_type.items.enum[9] = "CR_CORE_DEFAULT_DIST_BLOCK"; |
| Added |
.components.schemas["v0.0.45_partition_info"].properties.select_type.items.enum[10] = "CR_NO_DIST_TOPO_BLOCK"; |
| Added |
.components.schemas["v0.0.45_partition_info"].properties.select_type.items.enum[11] = "CR_LLN"; |
| Added |
.components.schemas["v0.0.45_partition_info"].properties.select_type.items.enum[12] = "CR_PACK_NODES"; |
| Added |
.components.schemas["v0.0.45_partition_info"].properties.select_type.items.enum[13] = "LL_SHARED_GRES"; |
| Added |
.components.schemas["v0.0.45_partition_info"].properties.select_type.items.enum[14] = "MULTIPLE_SHARING_GRES_PJ"; |
| Added |
.components.schemas["v0.0.45_partition_info"].properties.select_type.items.enum[15] = "ENFORCE_BINDING_GRES"; |
| Added |
.components.schemas["v0.0.45_partition_info"].properties.select_type.items.enum[16] = "ONE_TASK_PER_SHARING_GRES"; |
| Replaced |
.components.schemas["v0.0.45_partition_info"].properties.suspend_time.description = "SuspendTime - Nodes which remain idle or down for this number of seconds will be placed into power save mode (read-only; GLOBAL if both set and infinite are false). (32 bit integer number with flags)"; |
| Replaced |
.components.schemas["v0.0.45_partition_info"].properties.timeouts.properties.resume.description = "ResumeTimeout - Resumed nodes which fail to respond in this time frame will be marked DOWN (read-only; GLOBAL if both set and infinite are false). (16 bit integer number with flags)"; |
| Replaced |
.components.schemas["v0.0.45_partition_info"].properties.timeouts.properties.suspend.description = "SuspendTimeout - Maximum time permitted (in seconds) between when a node suspend request is issued and when the node is shutdown (read-only; GLOBAL if both set and infinite are false). (16 bit integer number with flags)"; |
| Replaced |
.components.schemas["v0.0.45_partition_info"].properties.tres.properties.configured.description = "TRES - Number of each applicable TRES type available in this partition (read-only)"; |
| Added |
.components.schemas["v0.0.45_port_range"] = {}; |
| Added |
.components.schemas["v0.0.45_port_range"].properties = {}; |
| Added |
.components.schemas["v0.0.45_port_range"].properties.maximum = {}; |
| Added |
.components.schemas["v0.0.45_port_range"].properties.maximum.description = "Maximum port number"; |
| Added |
.components.schemas["v0.0.45_port_range"].properties.maximum.format = "int32"; |
| Added |
.components.schemas["v0.0.45_port_range"].properties.maximum.type = "integer"; |
| Added |
.components.schemas["v0.0.45_port_range"].properties.minimum = {}; |
| Added |
.components.schemas["v0.0.45_port_range"].properties.minimum.description = "Minimum port number"; |
| Added |
.components.schemas["v0.0.45_port_range"].properties.minimum.format = "int32"; |
| Added |
.components.schemas["v0.0.45_port_range"].properties.minimum.type = "integer"; |
| Added |
.components.schemas["v0.0.45_port_range"].properties.set = {}; |
| Added |
.components.schemas["v0.0.45_port_range"].properties.set.description = "True if minimum and maximum are set (or ignored on false)"; |
| Added |
.components.schemas["v0.0.45_port_range"].properties.set.type = "boolean"; |
| Added |
.components.schemas["v0.0.45_port_range"].required = []; |
| Added |
.components.schemas["v0.0.45_port_range"].type = "object"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"] = {}; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties = {}; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.AccountingStorageBackupHost = {}; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.AccountingStorageBackupHost.description = "Accounting storage backup host"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.AccountingStorageBackupHost.type = "string"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.AccountingStorageEnforce = {}; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.AccountingStorageEnforce.description = "Controls what level of association-based enforcement to impose on job submissions"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.AccountingStorageEnforce.items = {}; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.AccountingStorageEnforce.items.enum = []; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.AccountingStorageEnforce.items.enum[0] = "associations"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.AccountingStorageEnforce.items.enum[1] = "limits"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.AccountingStorageEnforce.items.enum[2] = "wckeys"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.AccountingStorageEnforce.items.enum[3] = "qos"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.AccountingStorageEnforce.items.enum[4] = "safe"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.AccountingStorageEnforce.items.enum[5] = "nojobs"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.AccountingStorageEnforce.items.enum[6] = "nosteps"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.AccountingStorageEnforce.items.type = "string"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.AccountingStorageEnforce.type = "array"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.AccountingStorageExternalHost = {}; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.AccountingStorageExternalHost.description = "Accounting storage ext host"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.AccountingStorageExternalHost.type = "string"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.AccountingStorageHost = {}; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.AccountingStorageHost.description = "Accounting storage host"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.AccountingStorageHost.type = "string"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.AccountingStorageParameters = {}; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.AccountingStorageParameters.$ref = "#/components/schemas/v0.0.45_csv_string"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.AccountingStorageParameters.description = "Accounting storage params"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.AccountingStoragePort = {}; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.AccountingStoragePort.description = "Node accounting storage port"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.AccountingStoragePort.format = "int32"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.AccountingStoragePort.type = "integer"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.AccountingStorageTRES = {}; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.AccountingStorageTRES.$ref = "#/components/schemas/v0.0.45_csv_string"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.AccountingStorageTRES.description = "List of registered TRES"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.AccountingStorageType = {}; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.AccountingStorageType.description = "Accounting storage type"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.AccountingStorageType.type = "string"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.AccountingStoreFlags = {}; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.AccountingStoreFlags.description = "AccountingStoreFlags specific CONF_FLAG_* flags"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.AccountingStoreFlags.items = {}; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.AccountingStoreFlags.items.enum = []; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.AccountingStoreFlags.items.enum[0] = "job_comment"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.AccountingStoreFlags.items.enum[1] = "job_extra"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.AccountingStoreFlags.items.enum[2] = "job_script"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.AccountingStoreFlags.items.enum[3] = "job_env"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.AccountingStoreFlags.items.enum[4] = "no_stdio"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.AccountingStoreFlags.items.type = "string"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.AccountingStoreFlags.type = "array"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.AcctGatherEnergyType = {}; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.AcctGatherEnergyType.$ref = "#/components/schemas/v0.0.45_csv_string"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.AcctGatherEnergyType.description = "Energy accounting type"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.AcctGatherFilesystemType = {}; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.AcctGatherFilesystemType.description = "Filesystem accounting type"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.AcctGatherFilesystemType.type = "string"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.AcctGatherInterconnectType = {}; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.AcctGatherInterconnectType.description = "Interconnect accounting type"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.AcctGatherInterconnectType.type = "string"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.AcctGatherNodeFreq = {}; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.AcctGatherNodeFreq.description = "Secs between node acct request"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.AcctGatherNodeFreq.format = "int32"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.AcctGatherNodeFreq.type = "integer"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.AcctGatherProfileType = {}; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.AcctGatherProfileType.description = "Profile accounting type"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.AcctGatherProfileType.type = "string"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.AuthAltParameters = {}; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.AuthAltParameters.description = "Alternate authentication parameters"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.AuthAltParameters.type = "string"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.AuthAltTypes = {}; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.AuthAltTypes.$ref = "#/components/schemas/v0.0.45_csv_string"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.AuthAltTypes.description = "Alternate authentication types"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.AuthInfo = {}; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.AuthInfo.description = "Authentication info"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.AuthInfo.type = "string"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.AuthType = {}; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.AuthType.description = "Authentication type"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.AuthType.type = "string"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.BatchStartTimeout = {}; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.BatchStartTimeout.description = "Maximum seconds for batch job to start"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.BatchStartTimeout.format = "int32"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.BatchStartTimeout.type = "integer"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.BcastExclude = {}; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.BcastExclude.$ref = "#/components/schemas/v0.0.45_csv_string"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.BcastExclude.description = "Bcast exclude library paths"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.BcastParameters = {}; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.BcastParameters.$ref = "#/components/schemas/v0.0.45_csv_string"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.BcastParameters.description = "Bcast options"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.BurstBufferType = {}; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.BurstBufferType.description = "Burst buffer plugin type"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.BurstBufferType.type = "string"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.CertgenParameters = {}; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.CertgenParameters.$ref = "#/components/schemas/v0.0.45_csv_string"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.CertgenParameters.description = "Certgen parameters"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.CertgenType = {}; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.CertgenType.description = "Certgen type"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.CertgenType.type = "string"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.CertmgrParameters = {}; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.CertmgrParameters.description = "Certmgr parameters"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.CertmgrParameters.type = "string"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.CertmgrType = {}; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.CertmgrType.description = "Certmgr type"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.CertmgrType.type = "string"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.CliFilterParameters = {}; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.CliFilterParameters.$ref = "#/components/schemas/v0.0.45_csv_string"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.CliFilterParameters.description = "Parameters for cli_filter plugins"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.CliFilterPlugins = {}; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.CliFilterPlugins.$ref = "#/components/schemas/v0.0.45_csv_string"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.CliFilterPlugins.description = "List of cli_filter plugins to use"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.ClusterName = {}; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.ClusterName.description = "General name of the entire cluster"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.ClusterName.type = "string"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.CommunicationParameters = {}; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.CommunicationParameters.$ref = "#/components/schemas/v0.0.45_csv_string"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.CommunicationParameters.description = "Communication parameters"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.CommunicationParametersFlags = {}; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.CommunicationParametersFlags.description = "CommunicationParameters specific CONF_FLAG_* flags"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.CommunicationParametersFlags.items = {}; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.CommunicationParametersFlags.items.enum = []; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.CommunicationParametersFlags.items.enum[0] = "IPv4"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.CommunicationParametersFlags.items.enum[1] = "IPv6"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.CommunicationParametersFlags.items.enum[2] = "disable_http"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.CommunicationParametersFlags.items.type = "string"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.CommunicationParametersFlags.type = "array"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.CompleteWait = {}; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.CompleteWait.description = "Seconds to wait for job completion before scheduling another job"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.CompleteWait.format = "int32"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.CompleteWait.type = "integer"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.ConfFlags = {}; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.ConfFlags.description = "General configuration flags (DisableRootJobs, AllowSpecResourcesUsage, UsePAM, TrackWCKey)"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.ConfFlags.items = {}; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.ConfFlags.items.enum = []; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.ConfFlags.items.enum[0] = "DisableRootJobs"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.ConfFlags.items.enum[1] = "AllowSpecResourcesUsage"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.ConfFlags.items.enum[2] = "UsePam"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.ConfFlags.items.enum[3] = "TrackWCKey"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.ConfFlags.items.type = "string"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.ConfFlags.type = "array"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.CpuFreqDef = {}; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.CpuFreqDef.description = "Default CPU frequency or governor for job steps if not set via --cpu-freq"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.CpuFreqDef.type = "string"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.CpuFreqGovernors = {}; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.CpuFreqGovernors.description = "CPU frequency governors allowed via --cpu-freq"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.CpuFreqGovernors.items = {}; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.CpuFreqGovernors.items.enum = []; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.CpuFreqGovernors.items.enum[0] = "Unset"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.CpuFreqGovernors.items.enum[1] = "Conservative"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.CpuFreqGovernors.items.enum[2] = "OnDemand"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.CpuFreqGovernors.items.enum[3] = "Performance"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.CpuFreqGovernors.items.enum[4] = "PowerSave"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.CpuFreqGovernors.items.enum[5] = "SchedUtil"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.CpuFreqGovernors.items.enum[6] = "UserSpace"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.CpuFreqGovernors.items.enum[7] = "Low"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.CpuFreqGovernors.items.enum[8] = "Medium"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.CpuFreqGovernors.items.enum[9] = "Highm1"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.CpuFreqGovernors.items.enum[10] = "High"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.CpuFreqGovernors.items.type = "string"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.CpuFreqGovernors.type = "array"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.CredType = {}; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.CredType.description = "Credential signature plugin"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.CredType.type = "string"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.DataParserParameters = {}; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.DataParserParameters.description = "Data parser parameters"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.DataParserParameters.type = "string"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.DebugFlags = {}; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.DebugFlags.description = "Subsystems which provide more detailed event logging"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.DebugFlags.items = {}; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.DebugFlags.items.enum = []; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.DebugFlags.items.enum[0] = "Accrue"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.DebugFlags.items.enum[1] = "Agent"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.DebugFlags.items.enum[2] = "AuditRPCs"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.DebugFlags.items.enum[3] = "AuditTLS"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.DebugFlags.items.enum[4] = "Backfill"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.DebugFlags.items.enum[5] = "BackfillMap"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.DebugFlags.items.enum[6] = "BurstBuffer"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.DebugFlags.items.enum[7] = "Cgroup"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.DebugFlags.items.enum[8] = "ConMgr"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.DebugFlags.items.enum[9] = "CpuFrequency"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.DebugFlags.items.enum[10] = "CPU_Bind"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.DebugFlags.items.enum[11] = "Data"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.DebugFlags.items.enum[12] = "DBD_Agent"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.DebugFlags.items.enum[13] = "DB_Archive"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.DebugFlags.items.enum[14] = "DB_Assoc"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.DebugFlags.items.enum[15] = "DB_Event"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.DebugFlags.items.enum[16] = "DB_Job"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.DebugFlags.items.enum[17] = "DB_QOS"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.DebugFlags.items.enum[18] = "DB_Query"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.DebugFlags.items.enum[19] = "DB_Reservation"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.DebugFlags.items.enum[20] = "DB_Resource"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.DebugFlags.items.enum[21] = "DB_Step"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.DebugFlags.items.enum[22] = "DB_TRES"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.DebugFlags.items.enum[23] = "DB_Usage"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.DebugFlags.items.enum[24] = "DB_WCKey"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.DebugFlags.items.enum[25] = "Dependency"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.DebugFlags.items.enum[26] = "Energy"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.DebugFlags.items.enum[27] = "Federation"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.DebugFlags.items.enum[28] = "Gang"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.DebugFlags.items.enum[29] = "GLOB_SILENCE"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.DebugFlags.items.enum[30] = "Gres"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.DebugFlags.items.enum[31] = "Hetjob"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.DebugFlags.items.enum[32] = "Interconnect"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.DebugFlags.items.enum[33] = "JobAccountGather"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.DebugFlags.items.enum[34] = "JobComp"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.DebugFlags.items.enum[35] = "Namespace"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.DebugFlags.items.enum[36] = "NodeFeatures"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.DebugFlags.items.enum[37] = "License"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.DebugFlags.items.enum[38] = "Metrics"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.DebugFlags.items.enum[39] = "MPI"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.DebugFlags.items.enum[40] = "Network"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.DebugFlags.items.enum[41] = "NetworkRaw"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.DebugFlags.items.enum[42] = "NO_CONF_HASH"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.DebugFlags.items.enum[43] = "Power"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.DebugFlags.items.enum[44] = "Priority"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.DebugFlags.items.enum[45] = "Profile"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.DebugFlags.items.enum[46] = "Protocol"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.DebugFlags.items.enum[47] = "Reservation"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.DebugFlags.items.enum[48] = "Route"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.DebugFlags.items.enum[49] = "Sack"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.DebugFlags.items.enum[50] = "Script"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.DebugFlags.items.enum[51] = "SelectType"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.DebugFlags.items.enum[52] = "Steps"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.DebugFlags.items.enum[53] = "Switch"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.DebugFlags.items.enum[54] = "thread"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.DebugFlags.items.enum[55] = "TLS"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.DebugFlags.items.enum[56] = "TraceJobs"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.DebugFlags.items.enum[57] = "Triggers"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.DebugFlags.items.type = "string"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.DebugFlags.type = "array"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.DefMemPerCPU = {}; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.DefMemPerCPU.description = "Default MB memory per allocated CPU"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.DefMemPerCPU.format = "int64"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.DefMemPerCPU.type = "integer"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.DependencyParameters = {}; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.DependencyParameters.$ref = "#/components/schemas/v0.0.45_csv_string"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.DependencyParameters.description = "Options controlling job dependency behavior"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.EioTimeout = {}; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.EioTimeout.description = "Seconds srun waits for slurmstepd to close the TCP/IP connection"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.EioTimeout.format = "int32"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.EioTimeout.type = "integer"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.EnforcePartLimits = {}; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.EnforcePartLimits.description = "Controls whether partition limits are enforced at job submission time"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.EnforcePartLimits.items = {}; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.EnforcePartLimits.items.enum = []; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.EnforcePartLimits.items.enum[0] = "NO"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.EnforcePartLimits.items.enum[1] = "ANY"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.EnforcePartLimits.items.enum[2] = "ALL"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.EnforcePartLimits.items.type = "string"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.EnforcePartLimits.type = "array"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.Epilog = {}; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.Epilog.$ref = "#/components/schemas/v0.0.45_string_array"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.Epilog.description = "pathname of job epilog run by slurmd (Epilog programs run by slurmd)"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.EpilogMsgTime = {}; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.EpilogMsgTime.description = "Usecs for slurmctld to process an epilog complete message"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.EpilogMsgTime.format = "int32"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.EpilogMsgTime.type = "integer"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.EpilogSlurmctld = {}; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.EpilogSlurmctld.$ref = "#/components/schemas/v0.0.45_string_array"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.EpilogSlurmctld.description = "pathname of job epilog run by slurmctld (Epilog programs run by slurmctld)"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.EpilogTimeout = {}; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.EpilogTimeout.$ref = "#/components/schemas/v0.0.45_uint16_no_val_struct"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.EpilogTimeout.description = "Epilog timeout in seconds (16 bit integer number with flags)"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.FairShareDampeningFactor = {}; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.FairShareDampeningFactor.description = "Dampening factor for the effect of exceeding fair share of resources"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.FairShareDampeningFactor.format = "int32"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.FairShareDampeningFactor.type = "integer"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.FederationParameters = {}; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.FederationParameters.$ref = "#/components/schemas/v0.0.45_csv_string"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.FederationParameters.description = "Federation parameters"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.FirstJobId = {}; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.FirstJobId.description = "First slurm generated job_id to assign"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.FirstJobId.format = "int32"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.FirstJobId.type = "integer"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.GetNameInfoCacheTimeout = {}; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.GetNameInfoCacheTimeout.description = "For getnameinfo() cache"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.GetNameInfoCacheTimeout.format = "int32"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.GetNameInfoCacheTimeout.type = "integer"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.GpuFreqDef = {}; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.GpuFreqDef.description = "Default GPU frequency for job steps if not set via --gpu-freq"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.GpuFreqDef.type = "string"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.GresTypes = {}; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.GresTypes.$ref = "#/components/schemas/v0.0.45_csv_string"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.GresTypes.description = "Comma-delimited list of generic resources to be managed"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.GroupUpdateForce = {}; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.GroupUpdateForce.description = "Update group/partition info even if no change detected"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.GroupUpdateForce.type = "boolean"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.GroupUpdateTime = {}; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.GroupUpdateTime.description = "Interval in seconds between updates of user/group partition membership"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.GroupUpdateTime.format = "int32"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.GroupUpdateTime.type = "integer"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.HashPlugin = {}; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.HashPlugin.description = "Hash plugin type"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.HashPlugin.type = "string"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.HealthCheckInterval = {}; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.HealthCheckInterval.description = "Secs between health checks"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.HealthCheckInterval.format = "int32"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.HealthCheckInterval.type = "integer"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.HealthCheckNodeState = {}; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.HealthCheckNodeState.description = "Node states on which to execute the HealthCheckProgram"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.HealthCheckNodeState.items = {}; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.HealthCheckNodeState.items.enum = []; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.HealthCheckNodeState.items.enum[0] = "START_ONLY"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.HealthCheckNodeState.items.enum[1] = "REBOOT_ONLY"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.HealthCheckNodeState.items.enum[2] = "ANY"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.HealthCheckNodeState.items.enum[3] = "IDLE"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.HealthCheckNodeState.items.enum[4] = "ALLOC"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.HealthCheckNodeState.items.enum[5] = "MIXED"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.HealthCheckNodeState.items.enum[6] = "NONDRAINED_IDLE"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.HealthCheckNodeState.items.enum[7] = "CYCLE"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.HealthCheckNodeState.items.type = "string"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.HealthCheckNodeState.type = "array"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.HealthCheckProgram = {}; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.HealthCheckProgram.description = "Pathname of health check program"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.HealthCheckProgram.type = "string"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.HealthCheckTimeout = {}; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.HealthCheckTimeout.description = "Timeout for health check program"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.HealthCheckTimeout.format = "int32"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.HealthCheckTimeout.type = "integer"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.HostUnreachRetryCount = {}; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.HostUnreachRetryCount.description = "Times to retry connecting if rc=EHOSTUNREACH"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.HostUnreachRetryCount.format = "int32"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.HostUnreachRetryCount.type = "integer"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.HttpParserType = {}; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.HttpParserType.description = "Http_parser plugin type"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.HttpParserType.type = "string"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.InactiveLimit = {}; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.InactiveLimit.description = "Seconds of inactivity before a inactive resource allocation is released"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.InactiveLimit.format = "int32"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.InactiveLimit.type = "integer"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.InteractiveStepOptions = {}; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.InteractiveStepOptions.description = "Options for srun when using LaunchParameters=use_interactive_step with salloc"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.InteractiveStepOptions.type = "string"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.JobAcctGatherFrequency = {}; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.JobAcctGatherFrequency.$ref = "#/components/schemas/v0.0.45_csv_string"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.JobAcctGatherFrequency.description = "Poll frequency for job accounting gather plugins"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.JobAcctGatherParams = {}; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.JobAcctGatherParams.$ref = "#/components/schemas/v0.0.45_csv_string"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.JobAcctGatherParams.description = "Job accounting gather parameters"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.JobAcctGatherType = {}; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.JobAcctGatherType.description = "Job accounting gather type"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.JobAcctGatherType.type = "string"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.JobAcctOomKill = {}; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.JobAcctOomKill.description = "Enforce mem limit at runtime y|n"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.JobAcctOomKill.type = "boolean"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.JobCompHost = {}; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.JobCompHost.description = "Job completion logging host"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.JobCompHost.type = "string"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.JobCompLoc = {}; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.JobCompLoc.description = "Job completion logging location"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.JobCompLoc.type = "string"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.JobCompParams = {}; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.JobCompParams.description = "Job completion parameters for plugin"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.JobCompParams.type = "string"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.JobCompPort = {}; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.JobCompPort.description = "Job completion storage port"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.JobCompPort.format = "int32"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.JobCompPort.type = "integer"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.JobCompType = {}; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.JobCompType.description = "Job completion storage type"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.JobCompType.type = "string"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.JobCompUser = {}; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.JobCompUser.description = "Job completion storage user"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.JobCompUser.type = "string"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.JobDefaults = {}; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.JobDefaults.$ref = "#/components/schemas/v0.0.45_job_defaults_list"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.JobDefaults.description = "List of job_defaults_t elements"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.JobFileAppend = {}; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.JobFileAppend.description = "If set, append to stdout/err file"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.JobFileAppend.type = "boolean"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.JobRequeue = {}; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.JobRequeue.description = "If set, jobs get requeued on node failure"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.JobRequeue.type = "boolean"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.JobSubmitPlugins = {}; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.JobSubmitPlugins.$ref = "#/components/schemas/v0.0.45_csv_string"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.JobSubmitPlugins.description = "List of job_submit plugins to use"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.KeepaliveInterval = {}; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.KeepaliveInterval.description = "Interval between keepalive probes"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.KeepaliveInterval.format = "int32"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.KeepaliveInterval.type = "integer"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.KeepaliveProbes = {}; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.KeepaliveProbes.description = "Number of keepalive probe attempts"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.KeepaliveProbes.format = "int32"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.KeepaliveProbes.type = "integer"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.KeepaliveTime = {}; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.KeepaliveTime.description = "Keep alive time for srun I/O sockets"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.KeepaliveTime.format = "int32"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.KeepaliveTime.type = "integer"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.KillOnBadExit = {}; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.KillOnBadExit.description = "If set, the job will be terminated immediately when one of the processes is aborted or crashed"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.KillOnBadExit.type = "boolean"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.KillWait = {}; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.KillWait.description = "Seconds between SIGTERM and SIGKILL when a job reaches its time limit"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.KillWait.format = "int32"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.KillWait.type = "integer"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.LaunchParameters = {}; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.LaunchParameters.$ref = "#/components/schemas/v0.0.45_csv_string"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.LaunchParameters.description = "Step launcher plugin options"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.LicenseParameters = {}; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.LicenseParameters.$ref = "#/components/schemas/v0.0.45_csv_string"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.LicenseParameters.description = "Options for licenses/HRES"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.Licenses = {}; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.Licenses.$ref = "#/components/schemas/v0.0.45_csv_string"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.Licenses.description = "Licenses available on this cluster"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.LogTimeFormat = {}; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.LogTimeFormat.description = "Format of the timestamp in slurmctld and slurmd log files"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.LogTimeFormat.type = "string"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.MCSParameters = {}; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.MCSParameters.$ref = "#/components/schemas/v0.0.45_csv_string"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.MCSParameters.description = "Mcs plugin parameters"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.MCSPlugin = {}; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.MCSPlugin.description = "Mcs plugin type"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.MCSPlugin.type = "string"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.MailDomain = {}; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.MailDomain.description = "Default domain to append to usernames"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.MailDomain.type = "string"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.MailProg = {}; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.MailProg.description = "Pathname of mail program"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.MailProg.type = "string"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.MaxArraySize = {}; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.MaxArraySize.description = "Maximum job array size"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.MaxArraySize.format = "int32"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.MaxArraySize.type = "integer"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.MaxBatchRequeue = {}; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.MaxBatchRequeue.description = "Max times a batch job may be auto-requeued before being held"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.MaxBatchRequeue.format = "int32"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.MaxBatchRequeue.type = "integer"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.MaxDBDMsgs = {}; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.MaxDBDMsgs.description = "Maximum number of messages queued while DBD is not connected"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.MaxDBDMsgs.format = "int32"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.MaxDBDMsgs.type = "integer"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.MaxJobCount = {}; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.MaxJobCount.description = "Maximum number of active jobs"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.MaxJobCount.format = "int32"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.MaxJobCount.type = "integer"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.MaxJobId = {}; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.MaxJobId.description = "Maximum job id before wrapping back to FirstJobId"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.MaxJobId.format = "int32"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.MaxJobId.type = "integer"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.MaxMemPerCPU = {}; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.MaxMemPerCPU.$ref = "#/components/schemas/v0.0.45_uint64_no_val_struct"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.MaxMemPerCPU.description = "Maximum MB memory per allocated CPU (64 bit integer number with flags)"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.MaxNodeCount = {}; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.MaxNodeCount.description = "Max number of static + dynamic nodes"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.MaxNodeCount.format = "int32"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.MaxNodeCount.type = "integer"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.MaxStepCount = {}; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.MaxStepCount.description = "Maximum number of steps per job"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.MaxStepCount.format = "int32"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.MaxStepCount.type = "integer"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.MaxTasksPerNode = {}; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.MaxTasksPerNode.description = "Maximum tasks per node"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.MaxTasksPerNode.format = "int32"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.MaxTasksPerNode.type = "integer"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.MessageTimeout = {}; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.MessageTimeout.description = "Seconds permitted for a round-trip communication to complete"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.MessageTimeout.format = "int32"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.MessageTimeout.type = "integer"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.MetricsAuthUsers = {}; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.MetricsAuthUsers.$ref = "#/components/schemas/v0.0.45_csv_string"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.MetricsAuthUsers.description = "Users allowed to query metrics"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.MetricsParameters = {}; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.MetricsParameters.$ref = "#/components/schemas/v0.0.45_csv_string"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.MetricsParameters.description = "Parameters related to metrics plugins"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.MetricsType = {}; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.MetricsType.description = "Metrics plugin type"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.MetricsType.type = "string"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.MinJobAge = {}; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.MinJobAge.description = "COMPLETED jobs over this age (secs) purged from in memory records"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.MinJobAge.format = "int32"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.MinJobAge.type = "integer"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.MpiDefault = {}; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.MpiDefault.description = "Default type of MPI to be used"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.MpiDefault.type = "string"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.MpiParams = {}; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.MpiParams.$ref = "#/components/schemas/v0.0.45_csv_string"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.MpiParams.description = "MPI parameters"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.NamespaceType = {}; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.NamespaceType.description = "Job container plugin type"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.NamespaceType.type = "string"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.NodeFeaturesPlugins = {}; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.NodeFeaturesPlugins.$ref = "#/components/schemas/v0.0.45_csv_string"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.NodeFeaturesPlugins.description = "List of node_features plugins to use"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.OverTimeLimit = {}; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.OverTimeLimit.description = "Minutes by which a job can exceed its time limit before being canceled"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.OverTimeLimit.format = "int32"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.OverTimeLimit.type = "integer"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.PlugStackConfig = {}; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.PlugStackConfig.description = "Pathname to plugin stack config file"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.PlugStackConfig.type = "string"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.PluginDir = {}; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.PluginDir.description = "Pathname to plugins"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.PluginDir.type = "string"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.PrEpParameters = {}; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.PrEpParameters.description = "PrEp parameters"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.PrEpParameters.type = "string"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.PrEpPlugins = {}; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.PrEpPlugins.$ref = "#/components/schemas/v0.0.45_csv_string"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.PrEpPlugins.description = "PrEp plugins"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.PreemptExemptTime = {}; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.PreemptExemptTime.$ref = "#/components/schemas/v0.0.45_uint32_no_val_struct"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.PreemptExemptTime.description = "Minimum run time for all jobs before they can be considered for preemption (32 bit integer number with flags)"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.PreemptMode = {}; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.PreemptMode.description = "Mechanism used to preempt jobs or enable gang scheduling"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.PreemptMode.items = {}; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.PreemptMode.items.enum = []; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.PreemptMode.items.enum[0] = "DISABLED"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.PreemptMode.items.enum[1] = "SUSPEND"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.PreemptMode.items.enum[2] = "REQUEUE"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.PreemptMode.items.enum[3] = "CANCEL"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.PreemptMode.items.enum[4] = "GANG"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.PreemptMode.items.enum[5] = "WITHIN"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.PreemptMode.items.enum[6] = "PRIORITY"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.PreemptMode.items.type = "string"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.PreemptMode.type = "array"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.PreemptParameters = {}; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.PreemptParameters.$ref = "#/components/schemas/v0.0.45_csv_string"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.PreemptParameters.description = "Options controlling preemption behavior"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.PreemptType = {}; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.PreemptType.description = "Job preemption selection plugin"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.PreemptType.type = "string"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.PriorityCalcPeriod = {}; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.PriorityCalcPeriod.description = "Period in minutes between half-life decay recalculations (Time formatted as HH:MM:SS or D-HH:MM:SS)"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.PriorityCalcPeriod.type = "string"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.PriorityDecayHalfLife = {}; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.PriorityDecayHalfLife.description = "How long prior resource use is considered in determining job priority (Time formatted as HH:MM:SS or D-HH:MM:SS)"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.PriorityDecayHalfLife.type = "string"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.PriorityFavorSmall = {}; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.PriorityFavorSmall.description = "If set, give small jobs preferential scheduling priority"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.PriorityFavorSmall.type = "boolean"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.PriorityFlags = {}; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.PriorityFlags.description = "Flags controlling priority calculation behavior"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.PriorityFlags.items = {}; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.PriorityFlags.items.enum = []; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.PriorityFlags.items.enum[0] = "ACCRUE_ALWAYS"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.PriorityFlags.items.enum[1] = "SMALL_RELATIVE_TO_TIME"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.PriorityFlags.items.enum[2] = "CALCULATE_RUNNING"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.PriorityFlags.items.enum[3] = "DEPTH_OBLIVIOUS"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.PriorityFlags.items.enum[4] = "FAIR_TREE"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.PriorityFlags.items.enum[5] = "INCR_ONLY"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.PriorityFlags.items.enum[6] = "MAX_TRES"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.PriorityFlags.items.enum[7] = "MAX_TRES_GRES"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.PriorityFlags.items.enum[8] = "NO_NORMAL_ASSOC"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.PriorityFlags.items.enum[9] = "NO_NORMAL_PART"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.PriorityFlags.items.enum[10] = "NO_NORMAL_QOS"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.PriorityFlags.items.enum[11] = "NO_NORMAL_TRES"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.PriorityFlags.items.type = "string"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.PriorityFlags.type = "array"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.PriorityMaxAge = {}; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.PriorityMaxAge.description = "Job age at which the maximum age priority factor is assigned (Time formatted as HH:MM:SS or D-HH:MM:SS)"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.PriorityMaxAge.type = "string"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.PriorityParameters = {}; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.PriorityParameters.description = "Priority plugin parameters"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.PriorityParameters.type = "string"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.PrioritySiteFactorParameters = {}; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.PrioritySiteFactorParameters.description = "Parameters for the site-specific priority factor plugin"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.PrioritySiteFactorParameters.type = "string"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.PrioritySiteFactorPlugin = {}; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.PrioritySiteFactorPlugin.description = "Plugin for computing site-specific priority factors"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.PrioritySiteFactorPlugin.type = "string"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.PriorityType = {}; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.PriorityType.description = "Priority type plugin"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.PriorityType.type = "string"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.PriorityUsageResetPeriod = {}; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.PriorityUsageResetPeriod.description = "Interval at which association usage is reset to zero"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.PriorityUsageResetPeriod.type = "string"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.PriorityWeightAge = {}; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.PriorityWeightAge.description = "Degree to which queue wait time contributes to job priority"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.PriorityWeightAge.format = "int32"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.PriorityWeightAge.type = "integer"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.PriorityWeightAssoc = {}; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.PriorityWeightAssoc.description = "Degree to which association contributes to job priority"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.PriorityWeightAssoc.format = "int32"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.PriorityWeightAssoc.type = "integer"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.PriorityWeightFairshare = {}; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.PriorityWeightFairshare.description = "Degree to which fair-share contributes to job priority"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.PriorityWeightFairshare.format = "int32"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.PriorityWeightFairshare.type = "integer"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.PriorityWeightJobSize = {}; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.PriorityWeightJobSize.description = "Degree to which job size contributes to job priority"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.PriorityWeightJobSize.format = "int32"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.PriorityWeightJobSize.type = "integer"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.PriorityWeightPartition = {}; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.PriorityWeightPartition.description = "Degree to which partition contributes to job priority"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.PriorityWeightPartition.format = "int32"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.PriorityWeightPartition.type = "integer"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.PriorityWeightQOS = {}; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.PriorityWeightQOS.description = "Degree to which QOS contributes to job priority"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.PriorityWeightQOS.format = "int32"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.PriorityWeightQOS.type = "integer"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.PriorityWeightTRES = {}; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.PriorityWeightTRES.description = "TRES types and weights controlling their contribution to job priority"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.PriorityWeightTRES.type = "string"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.PrivateData = {}; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.PrivateData.description = "Controls what information is hidden from regular users"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.PrivateData.items = {}; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.PrivateData.items.enum = []; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.PrivateData.items.enum[0] = "accounts"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.PrivateData.items.enum[1] = "events"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.PrivateData.items.enum[2] = "jobs"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.PrivateData.items.enum[3] = "nodes"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.PrivateData.items.enum[4] = "partitions"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.PrivateData.items.enum[5] = "reservations"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.PrivateData.items.enum[6] = "usage"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.PrivateData.items.enum[7] = "users"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.PrivateData.items.type = "string"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.PrivateData.type = "array"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.ProctrackType = {}; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.ProctrackType.description = "Process tracking plugin type"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.ProctrackType.type = "string"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.Prolog = {}; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.Prolog.$ref = "#/components/schemas/v0.0.45_string_array"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.Prolog.description = "pathname of job prolog run by slurmd (Prolog programs run by slurmd)"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.PrologFlags = {}; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.PrologFlags.description = "Flags controlling Prolog and Epilog behavior"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.PrologFlags.items = {}; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.PrologFlags.items.enum = []; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.PrologFlags.items.enum[0] = "Alloc"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.PrologFlags.items.enum[1] = "NoHold"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.PrologFlags.items.enum[2] = "Contain"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.PrologFlags.items.enum[3] = "Serial"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.PrologFlags.items.enum[4] = "X11"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.PrologFlags.items.enum[5] = "DeferBatch"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.PrologFlags.items.enum[6] = "ForceRequeueOnFail"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.PrologFlags.items.enum[7] = "RunInJob"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.PrologFlags.items.type = "string"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.PrologFlags.type = "array"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.PrologSlurmctld = {}; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.PrologSlurmctld.$ref = "#/components/schemas/v0.0.45_string_array"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.PrologSlurmctld.description = "pathname of job prolog run by slurmctld (Prolog programs run by slurmctld)"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.PrologTimeout = {}; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.PrologTimeout.$ref = "#/components/schemas/v0.0.45_uint16_no_val_struct"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.PrologTimeout.description = "Prolog timeout in seconds (16 bit integer number with flags)"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.PropagatePrioProcess = {}; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.PropagatePrioProcess.description = "Controls scheduling priority (nice value) of user spawned tasks"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.PropagatePrioProcess.type = "string"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.PropagateResourceLimits = {}; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.PropagateResourceLimits.$ref = "#/components/schemas/v0.0.45_csv_string"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.PropagateResourceLimits.description = "Propagate (all/specific) resource limits"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.PropagateResourceLimitsExcept = {}; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.PropagateResourceLimitsExcept.$ref = "#/components/schemas/v0.0.45_csv_string"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.PropagateResourceLimitsExcept.description = "Propagate all rlimits except these"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.RebootProgram = {}; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.RebootProgram.description = "Program to reboot the node"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.RebootProgram.type = "string"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.ReconfigFlags = {}; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.ReconfigFlags.description = "Flags controlling actions taken on scontrol reconfig"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.ReconfigFlags.items = {}; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.ReconfigFlags.items.enum = []; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.ReconfigFlags.items.enum[0] = "KeepPartInfo"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.ReconfigFlags.items.enum[1] = "KeepPartState"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.ReconfigFlags.items.enum[2] = "KeepPowerSaveSettings"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.ReconfigFlags.items.type = "string"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.ReconfigFlags.type = "array"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.RequeueExit = {}; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.RequeueExit.$ref = "#/components/schemas/v0.0.45_csv_string"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.RequeueExit.description = "Exit codes that trigger automatic batch job requeue"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.RequeueExitHold = {}; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.RequeueExitHold.$ref = "#/components/schemas/v0.0.45_csv_string"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.RequeueExitHold.description = "Exit codes that trigger automatic requeue with job hold"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.ResumeFailProgram = {}; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.ResumeFailProgram.description = "Program executed when nodes fail to resume by ResumeTimeout"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.ResumeFailProgram.type = "string"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.ResumeProgram = {}; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.ResumeProgram.description = "Program to bring nodes out of power save mode when assigned work"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.ResumeProgram.type = "string"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.ResumeRate = {}; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.ResumeRate.description = "Rate at which nodes are resumed from power save mode per minute"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.ResumeRate.format = "int32"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.ResumeRate.type = "integer"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.ResumeTimeout = {}; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.ResumeTimeout.description = "Max seconds between a resume request and node availability"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.ResumeTimeout.format = "int32"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.ResumeTimeout.type = "integer"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.ResvEpilog = {}; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.ResvEpilog.description = "Path of reservation epilog run by slurmctld"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.ResvEpilog.type = "string"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.ResvOverRun = {}; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.ResvOverRun.description = "Minutes a running job can continue after its reservation ends"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.ResvOverRun.format = "int32"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.ResvOverRun.type = "integer"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.ResvProlog = {}; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.ResvProlog.description = "Path of reservation prolog run by slurmctld"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.ResvProlog.type = "string"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.ReturnToService = {}; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.ReturnToService.description = "Controls when a DOWN node is returned to service"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.ReturnToService.type = "string"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.SchedulerParameters = {}; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.SchedulerParameters.$ref = "#/components/schemas/v0.0.45_csv_string"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.SchedulerParameters.description = "Options controlling scheduler behavior (varies by SchedulerType)"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.SchedulerTimeSlice = {}; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.SchedulerTimeSlice.description = "Time slice in seconds for gang scheduling"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.SchedulerTimeSlice.format = "int32"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.SchedulerTimeSlice.type = "integer"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.SchedulerType = {}; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.SchedulerType.description = "Type of scheduler to use"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.SchedulerType.type = "string"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.ScronParameters = {}; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.ScronParameters.$ref = "#/components/schemas/v0.0.45_csv_string"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.ScronParameters.description = "Options controlling scrontab behavior"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.SelectType = {}; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.SelectType.description = "Type of node selector to use"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.SelectType.type = "string"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.SelectTypeParameters = {}; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.SelectTypeParameters.description = "Scheduler consumable resource selection type"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.SelectTypeParameters.items = {}; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.SelectTypeParameters.items.enum = []; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.SelectTypeParameters.items.enum[0] = "NONE"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.SelectTypeParameters.items.enum[1] = "CR_CPU_MEMORY"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.SelectTypeParameters.items.enum[2] = "CR_CPU"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.SelectTypeParameters.items.enum[3] = "CR_CORE_MEMORY"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.SelectTypeParameters.items.enum[4] = "CR_CORE"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.SelectTypeParameters.items.enum[5] = "CR_SOCKET_MEMORY"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.SelectTypeParameters.items.enum[6] = "CR_SOCKET"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.SelectTypeParameters.items.enum[7] = "CR_MEMORY"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.SelectTypeParameters.items.enum[8] = "CR_ONE_TASK_PER_CORE"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.SelectTypeParameters.items.enum[9] = "CR_CORE_DEFAULT_DIST_BLOCK"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.SelectTypeParameters.items.enum[10] = "CR_NO_DIST_TOPO_BLOCK"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.SelectTypeParameters.items.enum[11] = "CR_LLN"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.SelectTypeParameters.items.enum[12] = "CR_PACK_NODES"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.SelectTypeParameters.items.enum[13] = "LL_SHARED_GRES"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.SelectTypeParameters.items.enum[14] = "MULTIPLE_SHARING_GRES_PJ"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.SelectTypeParameters.items.enum[15] = "ENFORCE_BINDING_GRES"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.SelectTypeParameters.items.enum[16] = "ONE_TASK_PER_SHARING_GRES"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.SelectTypeParameters.items.type = "string"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.SelectTypeParameters.type = "array"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.SlurmSchedLogFile = {}; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.SlurmSchedLogFile.description = "Where slurm Scheduler log gets written"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.SlurmSchedLogFile.type = "string"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.SlurmSchedLogLevel = {}; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.SlurmSchedLogLevel.description = "Configured level of slurm Scheduler log"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.SlurmSchedLogLevel.format = "int32"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.SlurmSchedLogLevel.type = "integer"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.SlurmUser = {}; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.SlurmUser.description = "User that slurmctld runs as"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.SlurmUser.type = "string"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.SlurmUserId = {}; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.SlurmUserId.description = "Uid of slurm_user_name"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.SlurmUserId.format = "int32"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.SlurmUserId.type = "integer"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.SlurmctldAddr = {}; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.SlurmctldAddr.description = "Address used for communications to the currently active slurmctld daemon"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.SlurmctldAddr.type = "string"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.SlurmctldDebug = {}; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.SlurmctldDebug.description = "Slurmctld logging level"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.SlurmctldDebug.type = "string"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.SlurmctldHttpAuthParams = {}; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.SlurmctldHttpAuthParams.$ref = "#/components/schemas/v0.0.45_csv_string"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.SlurmctldLogFile = {}; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.SlurmctldLogFile.description = "Where slurmctld error log gets written"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.SlurmctldLogFile.type = "string"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.SlurmctldParameters = {}; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.SlurmctldParameters.$ref = "#/components/schemas/v0.0.45_csv_string"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.SlurmctldParameters.description = "Options controlling slurmctld behavior"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.SlurmctldPidFile = {}; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.SlurmctldPidFile.description = "Where to put slurmctld pidfile"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.SlurmctldPidFile.type = "string"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.SlurmctldPort = {}; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.SlurmctldPort.description = "Default communications port to slurmctld"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.SlurmctldPort.format = "int32"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.SlurmctldPort.type = "integer"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.SlurmctldPortCount = {}; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.SlurmctldPortCount.description = "Number of slurmctld comm ports"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.SlurmctldPortCount.format = "int32"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.SlurmctldPortCount.type = "integer"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.SlurmctldPrimaryOffProg = {}; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.SlurmctldPrimaryOffProg.description = "Run when becomes slurmctld backup"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.SlurmctldPrimaryOffProg.type = "string"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.SlurmctldPrimaryOnProg = {}; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.SlurmctldPrimaryOnProg.description = "Run when becomes slurmctld primary"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.SlurmctldPrimaryOnProg.type = "string"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.SlurmctldSyslogDebug = {}; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.SlurmctldSyslogDebug.description = "Slurmctld output to local logfile and syslog"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.SlurmctldSyslogDebug.type = "string"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.SlurmctldTimeout = {}; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.SlurmctldTimeout.description = "Seconds the backup controller waits for the primary before taking over"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.SlurmctldTimeout.format = "int32"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.SlurmctldTimeout.type = "integer"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.SlurmdDebug = {}; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.SlurmdDebug.description = "Slurmd logging level"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.SlurmdDebug.type = "string"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.SlurmdHttpAuthParams = {}; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.SlurmdHttpAuthParams.$ref = "#/components/schemas/v0.0.45_csv_string"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.SlurmdLogFile = {}; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.SlurmdLogFile.description = "Where slurmd error log gets written"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.SlurmdLogFile.type = "string"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.SlurmdParameters = {}; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.SlurmdParameters.$ref = "#/components/schemas/v0.0.45_csv_string"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.SlurmdParameters.description = "Options controlling slurmd behavior"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.SlurmdParametersFlags = {}; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.SlurmdParametersFlags.description = "SlurmdParameters specific CONF_FLAG_* flags"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.SlurmdParametersFlags.items = {}; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.SlurmdParametersFlags.items.enum = []; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.SlurmdParametersFlags.items.enum[0] = "config_overrides"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.SlurmdParametersFlags.items.enum[1] = "l3cache_as_socket"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.SlurmdParametersFlags.items.enum[2] = "numa_node_as_socket"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.SlurmdParametersFlags.items.enum[3] = "allow_ecores"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.SlurmdParametersFlags.items.enum[4] = "shutdown_on_reboot"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.SlurmdParametersFlags.items.enum[5] = "contain_spank"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.SlurmdParametersFlags.items.type = "string"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.SlurmdParametersFlags.type = "array"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.SlurmdPidFile = {}; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.SlurmdPidFile.description = "Where to put slurmd pidfile"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.SlurmdPidFile.type = "string"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.SlurmdPort = {}; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.SlurmdPort.description = "Default communications port to slurmd"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.SlurmdPort.format = "int32"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.SlurmdPort.type = "integer"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.SlurmdSpoolDir = {}; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.SlurmdSpoolDir.description = "Where slurmd put temporary state info"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.SlurmdSpoolDir.type = "string"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.SlurmdSyslogDebug = {}; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.SlurmdSyslogDebug.description = "Slurmd output to local logfile and syslog"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.SlurmdSyslogDebug.type = "string"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.SlurmdTimeout = {}; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.SlurmdTimeout.description = "Seconds slurmctld waits for slurmd before marking node DOWN"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.SlurmdTimeout.format = "int32"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.SlurmdTimeout.type = "integer"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.SlurmdUser = {}; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.SlurmdUser.description = "User that slurmd runs as"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.SlurmdUser.type = "string"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.SlurmdUserId = {}; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.SlurmdUserId.description = "Uid of slurmd_user_name"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.SlurmdUserId.format = "int32"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.SlurmdUserId.type = "integer"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.SrunEpilog = {}; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.SrunEpilog.description = "Srun epilog program"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.SrunEpilog.type = "string"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.SrunPortRange = {}; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.SrunPortRange.$ref = "#/components/schemas/v0.0.45_port_range"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.SrunPortRange.description = "Port range for srun communications (Port range)"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.SrunProlog = {}; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.SrunProlog.description = "Srun prolog program"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.SrunProlog.type = "string"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.StateSaveLocation = {}; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.StateSaveLocation.description = "Pathname of slurmctld state save directory"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.StateSaveLocation.type = "string"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.SuspendExcNodes = {}; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.SuspendExcNodes.$ref = "#/components/schemas/v0.0.45_csv_string"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.SuspendExcNodes.description = "Nodes excluded from power save mode"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.SuspendExcParts = {}; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.SuspendExcParts.$ref = "#/components/schemas/v0.0.45_csv_string"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.SuspendExcParts.description = "Partitions whose nodes are excluded from power save mode"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.SuspendExcStates = {}; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.SuspendExcStates.$ref = "#/components/schemas/v0.0.45_csv_string"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.SuspendExcStates.description = "States that should not be powered down"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.SuspendProgram = {}; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.SuspendProgram.description = "Program to place idle nodes into power save mode"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.SuspendProgram.type = "string"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.SuspendRate = {}; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.SuspendRate.description = "Rate at which nodes are placed into power save mode per minute"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.SuspendRate.format = "int32"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.SuspendRate.type = "integer"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.SuspendTime = {}; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.SuspendTime.$ref = "#/components/schemas/v0.0.45_uint32_no_val_struct"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.SuspendTime.description = "Seconds a node must be idle before being placed in power save mode (32 bit integer number with flags)"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.SuspendTimeout = {}; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.SuspendTimeout.description = "Max seconds between a suspend request and node shutdown"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.SuspendTimeout.format = "int32"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.SuspendTimeout.type = "integer"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.SwitchParameters = {}; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.SwitchParameters.$ref = "#/components/schemas/v0.0.45_csv_string"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.SwitchParameters.description = "Options for the switch plugin"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.SwitchType = {}; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.SwitchType.description = "Switch or interconnect type"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.SwitchType.type = "string"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.TCPTimeout = {}; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.TCPTimeout.description = "Seconds permitted for a TCP connection to be established"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.TCPTimeout.format = "int32"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.TCPTimeout.type = "integer"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.TLSParameters = {}; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.TLSParameters.$ref = "#/components/schemas/v0.0.45_csv_string"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.TLSParameters.description = "Options for the TLS plugin"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.TLSType = {}; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.TLSType.description = "Tls plugin type"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.TLSType.type = "string"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.TaskEpilog = {}; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.TaskEpilog.description = "Pathname of task launch epilog"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.TaskEpilog.type = "string"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.TaskPlugin = {}; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.TaskPlugin.$ref = "#/components/schemas/v0.0.45_csv_string"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.TaskPlugin.description = "Task launch plugin"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.TaskPluginParam = {}; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.TaskPluginParam.description = "Default task binding and distribution options"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.TaskPluginParam.items = {}; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.TaskPluginParam.items.enum = []; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.TaskPluginParam.items.enum[0] = "None"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.TaskPluginParam.items.enum[1] = "Threads"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.TaskPluginParam.items.enum[2] = "Cores"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.TaskPluginParam.items.enum[3] = "Sockets"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.TaskPluginParam.items.enum[4] = "autobind=threads"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.TaskPluginParam.items.enum[5] = "autobind=cores"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.TaskPluginParam.items.enum[6] = "autobind=sockets"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.TaskPluginParam.items.enum[7] = "Verbose"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.TaskPluginParam.items.enum[8] = "SlurmdOffSpec"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.TaskPluginParam.items.enum[9] = "OOMKillStep"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.TaskPluginParam.items.enum[10] = "SlurmdSpecOverride"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.TaskPluginParam.items.type = "string"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.TaskPluginParam.type = "array"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.TaskProlog = {}; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.TaskProlog.description = "Pathname of task launch prolog"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.TaskProlog.type = "string"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.TmpFS = {}; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.TmpFS.description = "Pathname of temporary file system"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.TmpFS.type = "string"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.TopologyParam = {}; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.TopologyParam.$ref = "#/components/schemas/v0.0.45_csv_string"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.TopologyParam.description = "Network topology parameters"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.TopologyPlugin = {}; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.TopologyPlugin.description = "Network topology plugin"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.TopologyPlugin.type = "string"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.TreeWidth = {}; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.TreeWidth.description = "Fanout of the slurmd communication tree"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.TreeWidth.format = "int32"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.TreeWidth.type = "integer"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.UnkillableStepProgram = {}; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.UnkillableStepProgram.description = "Program run by the slurmstepd when processes in a job step are unkillable"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.UnkillableStepProgram.type = "string"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.UnkillableStepTimeout = {}; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.UnkillableStepTimeout.description = "Time in seconds, after processes in a job step have been signaled, before they are considered \"unkillable\"."; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.UnkillableStepTimeout.format = "int32"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.UnkillableStepTimeout.type = "integer"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.UrlParserType = {}; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.UrlParserType.description = "Url_parser plugin type"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.UrlParserType.type = "string"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.VSizeFactor = {}; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.VSizeFactor.description = "Job virtual memory limit as a percentage of its real memory limit"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.VSizeFactor.format = "int32"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.VSizeFactor.type = "integer"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.WaitTime = {}; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.WaitTime.description = "Default seconds srun waits after first task exits before terminating all"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.WaitTime.format = "int32"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.WaitTime.type = "integer"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.X11Parameters = {}; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.X11Parameters.$ref = "#/components/schemas/v0.0.45_csv_string"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.X11Parameters.description = "Options for X11 forwarding"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.controllers = {}; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.controllers.$ref = "#/components/schemas/v0.0.45_controllers_array"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].properties.controllers.description = "slurmctld controllers (List of slurmctld controllers)"; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].required = []; |
| Added |
.components.schemas["v0.0.45_slurm_conf"].type = "object"; |
| Added |
.components.schemas["v0.0.45_slurm_conf_meta"] = {}; |
| Added |
.components.schemas["v0.0.45_slurm_conf_meta"].properties = {}; |
| Added |
.components.schemas["v0.0.45_slurm_conf_meta"].properties.BOOT_TIME = {}; |
| Added |
.components.schemas["v0.0.45_slurm_conf_meta"].properties.BOOT_TIME.description = "Time slurmctld last booted (UNIX timestamp or time string recognized by Slurm (e.g., '[MM/DD[/YY]-]HH:MM[:SS]'))"; |
| Added |
.components.schemas["v0.0.45_slurm_conf_meta"].properties.BOOT_TIME.format = "int64"; |
| Added |
.components.schemas["v0.0.45_slurm_conf_meta"].properties.BOOT_TIME.type = "integer"; |
| Added |
.components.schemas["v0.0.45_slurm_conf_meta"].properties.ClusterId = {}; |
| Added |
.components.schemas["v0.0.45_slurm_conf_meta"].properties.ClusterId.$ref = "#/components/schemas/v0.0.45_uint16_no_val_struct"; |
| Added |
.components.schemas["v0.0.45_slurm_conf_meta"].properties.ClusterId.description = "Unique ID for this cluster from the DBD (16 bit integer number with flags)"; |
| Added |
.components.schemas["v0.0.45_slurm_conf_meta"].properties.HashValue = {}; |
| Added |
.components.schemas["v0.0.45_slurm_conf_meta"].properties.HashValue.description = "Hash value of the slurm.conf file"; |
| Added |
.components.schemas["v0.0.45_slurm_conf_meta"].properties.HashValue.format = "int32"; |
| Added |
.components.schemas["v0.0.45_slurm_conf_meta"].properties.HashValue.type = "integer"; |
| Added |
.components.schemas["v0.0.45_slurm_conf_meta"].properties.LastUpdate = {}; |
| Added |
.components.schemas["v0.0.45_slurm_conf_meta"].properties.LastUpdate.description = "Last update time of the build parameters (UNIX timestamp or time string recognized by Slurm (e.g., '[MM/DD[/YY]-]HH:MM[:SS]'))"; |
| Added |
.components.schemas["v0.0.45_slurm_conf_meta"].properties.LastUpdate.format = "int64"; |
| Added |
.components.schemas["v0.0.45_slurm_conf_meta"].properties.LastUpdate.type = "integer"; |
| Added |
.components.schemas["v0.0.45_slurm_conf_meta"].properties.NEXT_JOB_ID = {}; |
| Added |
.components.schemas["v0.0.45_slurm_conf_meta"].properties.NEXT_JOB_ID.$ref = "#/components/schemas/v0.0.45_uint32_no_val_struct"; |
| Added |
.components.schemas["v0.0.45_slurm_conf_meta"].properties.NEXT_JOB_ID.description = "Next slurm generated job_id to assign (32 bit integer number with flags)"; |
| Added |
.components.schemas["v0.0.45_slurm_conf_meta"].properties.SLURM_CONF = {}; |
| Added |
.components.schemas["v0.0.45_slurm_conf_meta"].properties.SLURM_CONF.description = "Pathname of slurm config file"; |
| Added |
.components.schemas["v0.0.45_slurm_conf_meta"].properties.SLURM_CONF.type = "string"; |
| Added |
.components.schemas["v0.0.45_slurm_conf_meta"].properties.SLURM_VERSION = {}; |
| Added |
.components.schemas["v0.0.45_slurm_conf_meta"].properties.SLURM_VERSION.description = "Version of slurmctld"; |
| Added |
.components.schemas["v0.0.45_slurm_conf_meta"].properties.SLURM_VERSION.type = "string"; |
| Added |
.components.schemas["v0.0.45_slurm_conf_meta"].required = []; |
| Added |
.components.schemas["v0.0.45_slurm_conf_meta"].type = "object"; |
| Added |
.components.schemas["v0.0.45_slurmdb_purge_units"] = {}; |
| Added |
.components.schemas["v0.0.45_slurmdb_purge_units"].properties = {}; |
| Added |
.components.schemas["v0.0.45_slurmdb_purge_units"].properties.archive = {}; |
| Added |
.components.schemas["v0.0.45_slurmdb_purge_units"].properties.archive.description = "whether to archive purged records"; |
| Added |
.components.schemas["v0.0.45_slurmdb_purge_units"].properties.archive.type = "boolean"; |
| Added |
.components.schemas["v0.0.45_slurmdb_purge_units"].properties.days = {}; |
| Added |
.components.schemas["v0.0.45_slurmdb_purge_units"].properties.days.description = "days"; |
| Added |
.components.schemas["v0.0.45_slurmdb_purge_units"].properties.days.format = "int32"; |
| Added |
.components.schemas["v0.0.45_slurmdb_purge_units"].properties.days.type = "integer"; |
| Added |
.components.schemas["v0.0.45_slurmdb_purge_units"].properties.hours = {}; |
| Added |
.components.schemas["v0.0.45_slurmdb_purge_units"].properties.hours.description = "hours"; |
| Added |
.components.schemas["v0.0.45_slurmdb_purge_units"].properties.hours.format = "int32"; |
| Added |
.components.schemas["v0.0.45_slurmdb_purge_units"].properties.hours.type = "integer"; |
| Added |
.components.schemas["v0.0.45_slurmdb_purge_units"].properties.months = {}; |
| Added |
.components.schemas["v0.0.45_slurmdb_purge_units"].properties.months.description = "months"; |
| Added |
.components.schemas["v0.0.45_slurmdb_purge_units"].properties.months.format = "int32"; |
| Added |
.components.schemas["v0.0.45_slurmdb_purge_units"].properties.months.type = "integer"; |
| Added |
.components.schemas["v0.0.45_slurmdb_purge_units"].properties.set = {}; |
| Added |
.components.schemas["v0.0.45_slurmdb_purge_units"].properties.set.description = "whether purge has been set"; |
| Added |
.components.schemas["v0.0.45_slurmdb_purge_units"].properties.set.type = "boolean"; |
| Added |
.components.schemas["v0.0.45_slurmdb_purge_units"].required = []; |
| Added |
.components.schemas["v0.0.45_slurmdb_purge_units"].type = "object"; |
| Added |
.components.schemas["v0.0.45_slurmdbd_conf"] = {}; |
| Added |
.components.schemas["v0.0.45_slurmdbd_conf"].properties = {}; |
| Added |
.components.schemas["v0.0.45_slurmdbd_conf"].properties.ArchiveDir = {}; |
| Added |
.components.schemas["v0.0.45_slurmdbd_conf"].properties.ArchiveDir.description = "location to locally store data if not using a script"; |
| Added |
.components.schemas["v0.0.45_slurmdbd_conf"].properties.ArchiveDir.type = "string"; |
| Added |
.components.schemas["v0.0.45_slurmdbd_conf"].properties.ArchiveScript = {}; |
| Added |
.components.schemas["v0.0.45_slurmdbd_conf"].properties.ArchiveScript.description = "script to archive old data"; |
| Added |
.components.schemas["v0.0.45_slurmdbd_conf"].properties.ArchiveScript.type = "string"; |
| Added |
.components.schemas["v0.0.45_slurmdbd_conf"].properties.CommitDelay = {}; |
| Added |
.components.schemas["v0.0.45_slurmdbd_conf"].properties.CommitDelay.description = "on busy systems, delay commits from slurmctld this many seconds"; |
| Added |
.components.schemas["v0.0.45_slurmdbd_conf"].properties.CommitDelay.format = "int32"; |
| Added |
.components.schemas["v0.0.45_slurmdbd_conf"].properties.CommitDelay.type = "integer"; |
| Added |
.components.schemas["v0.0.45_slurmdbd_conf"].properties.DbdAddr = {}; |
| Added |
.components.schemas["v0.0.45_slurmdbd_conf"].properties.DbdAddr.description = "network address of Slurm DBD"; |
| Added |
.components.schemas["v0.0.45_slurmdbd_conf"].properties.DbdAddr.type = "string"; |
| Added |
.components.schemas["v0.0.45_slurmdbd_conf"].properties.DbdBackupHost = {}; |
| Added |
.components.schemas["v0.0.45_slurmdbd_conf"].properties.DbdBackupHost.description = "hostname of Slurm DBD backup"; |
| Added |
.components.schemas["v0.0.45_slurmdbd_conf"].properties.DbdBackupHost.type = "string"; |
| Added |
.components.schemas["v0.0.45_slurmdbd_conf"].properties.DbdHost = {}; |
| Added |
.components.schemas["v0.0.45_slurmdbd_conf"].properties.DbdHost.description = "hostname of Slurm DBD"; |
| Added |
.components.schemas["v0.0.45_slurmdbd_conf"].properties.DbdHost.type = "string"; |
| Added |
.components.schemas["v0.0.45_slurmdbd_conf"].properties.DbdPort = {}; |
| Added |
.components.schemas["v0.0.45_slurmdbd_conf"].properties.DbdPort.description = "port number for RPCs to DBD"; |
| Added |
.components.schemas["v0.0.45_slurmdbd_conf"].properties.DbdPort.format = "int32"; |
| Added |
.components.schemas["v0.0.45_slurmdbd_conf"].properties.DbdPort.type = "integer"; |
| Added |
.components.schemas["v0.0.45_slurmdbd_conf"].properties.DebugLevel = {}; |
| Added |
.components.schemas["v0.0.45_slurmdbd_conf"].properties.DebugLevel.description = "debug level, default=3"; |
| Added |
.components.schemas["v0.0.45_slurmdbd_conf"].properties.DebugLevel.type = "string"; |
| Added |
.components.schemas["v0.0.45_slurmdbd_conf"].properties.DebugLevelSyslog = {}; |
| Added |
.components.schemas["v0.0.45_slurmdbd_conf"].properties.DebugLevelSyslog.description = "output to both log file and syslog"; |
| Added |
.components.schemas["v0.0.45_slurmdbd_conf"].properties.DebugLevelSyslog.type = "string"; |
| Added |
.components.schemas["v0.0.45_slurmdbd_conf"].properties.DefaultQOS = {}; |
| Added |
.components.schemas["v0.0.45_slurmdbd_conf"].properties.DefaultQOS.description = "default qos setting when adding clusters"; |
| Added |
.components.schemas["v0.0.45_slurmdbd_conf"].properties.DefaultQOS.type = "string"; |
| Added |
.components.schemas["v0.0.45_slurmdbd_conf"].properties.Flags = {}; |
| Added |
.components.schemas["v0.0.45_slurmdbd_conf"].properties.Flags.description = "various flags, see DBD_CONF_FLAG_*"; |
| Added |
.components.schemas["v0.0.45_slurmdbd_conf"].properties.Flags.items = {}; |
| Added |
.components.schemas["v0.0.45_slurmdbd_conf"].properties.Flags.items.enum = []; |
| Added |
.components.schemas["v0.0.45_slurmdbd_conf"].properties.Flags.items.enum[0] = "AllowNoDefAcct"; |
| Added |
.components.schemas["v0.0.45_slurmdbd_conf"].properties.Flags.items.enum[1] = "AllResourcesAbsolute"; |
| Added |
.components.schemas["v0.0.45_slurmdbd_conf"].properties.Flags.items.enum[2] = "DisableCoordDBD"; |
| Added |
.components.schemas["v0.0.45_slurmdbd_conf"].properties.Flags.items.enum[3] = "DisableArchiveCommands"; |
| Added |
.components.schemas["v0.0.45_slurmdbd_conf"].properties.Flags.items.type = "string"; |
| Added |
.components.schemas["v0.0.45_slurmdbd_conf"].properties.Flags.type = "array"; |
| Added |
.components.schemas["v0.0.45_slurmdbd_conf"].properties.LogFile = {}; |
| Added |
.components.schemas["v0.0.45_slurmdbd_conf"].properties.LogFile.description = "fully qualified pathname of the slurmdbd log file (default unset; logs go to syslog)"; |
| Added |
.components.schemas["v0.0.45_slurmdbd_conf"].properties.LogFile.type = "string"; |
| Added |
.components.schemas["v0.0.45_slurmdbd_conf"].properties.MaxPurgeLimit = {}; |
| Added |
.components.schemas["v0.0.45_slurmdbd_conf"].properties.MaxPurgeLimit.description = "max number of records that are purged in a single query so that locks can be periodically released"; |
| Added |
.components.schemas["v0.0.45_slurmdbd_conf"].properties.MaxPurgeLimit.format = "int32"; |
| Added |
.components.schemas["v0.0.45_slurmdbd_conf"].properties.MaxPurgeLimit.type = "integer"; |
| Added |
.components.schemas["v0.0.45_slurmdbd_conf"].properties.MaxQueryTimeRange = {}; |
| Added |
.components.schemas["v0.0.45_slurmdbd_conf"].properties.MaxQueryTimeRange.description = "max time range for user queries (Time formatted as HH:MM:SS or D-HH:MM:SS)"; |
| Added |
.components.schemas["v0.0.45_slurmdbd_conf"].properties.MaxQueryTimeRange.type = "string"; |
| Added |
.components.schemas["v0.0.45_slurmdbd_conf"].properties.Parameters = {}; |
| Added |
.components.schemas["v0.0.45_slurmdbd_conf"].properties.Parameters.$ref = "#/components/schemas/v0.0.45_csv_string"; |
| Added |
.components.schemas["v0.0.45_slurmdbd_conf"].properties.Parameters.description = "parameters to change behavior with the slurmdbd directly"; |
| Added |
.components.schemas["v0.0.45_slurmdbd_conf"].properties.PersistConnFlags = {}; |
| Added |
.components.schemas["v0.0.45_slurmdbd_conf"].properties.PersistConnFlags.description = "flags to be sent back on any persist connection init"; |
| Added |
.components.schemas["v0.0.45_slurmdbd_conf"].properties.PersistConnFlags.items = {}; |
| Added |
.components.schemas["v0.0.45_slurmdbd_conf"].properties.PersistConnFlags.items.enum = []; |
| Added |
.components.schemas["v0.0.45_slurmdbd_conf"].properties.PersistConnFlags.items.enum[0] = "PreserveCaseUser"; |
| Added |
.components.schemas["v0.0.45_slurmdbd_conf"].properties.PersistConnFlags.items.type = "string"; |
| Added |
.components.schemas["v0.0.45_slurmdbd_conf"].properties.PersistConnFlags.type = "array"; |
| Added |
.components.schemas["v0.0.45_slurmdbd_conf"].properties.PidFile = {}; |
| Added |
.components.schemas["v0.0.45_slurmdbd_conf"].properties.PidFile.description = "where to store current PID"; |
| Added |
.components.schemas["v0.0.45_slurmdbd_conf"].properties.PidFile.type = "string"; |
| Added |
.components.schemas["v0.0.45_slurmdbd_conf"].properties.PurgeEventAfter = {}; |
| Added |
.components.schemas["v0.0.45_slurmdbd_conf"].properties.PurgeEventAfter.$ref = "#/components/schemas/v0.0.45_slurmdb_purge_units"; |
| Added |
.components.schemas["v0.0.45_slurmdbd_conf"].properties.PurgeEventAfter.description = "purge events older than this in hours, days, or months"; |
| Added |
.components.schemas["v0.0.45_slurmdbd_conf"].properties.PurgeJobAfter = {}; |
| Added |
.components.schemas["v0.0.45_slurmdbd_conf"].properties.PurgeJobAfter.$ref = "#/components/schemas/v0.0.45_slurmdb_purge_units"; |
| Added |
.components.schemas["v0.0.45_slurmdbd_conf"].properties.PurgeJobAfter.description = "purge jobs older than this in hours, days, or months"; |
| Added |
.components.schemas["v0.0.45_slurmdbd_conf"].properties.PurgeJobEnvAfter = {}; |
| Added |
.components.schemas["v0.0.45_slurmdbd_conf"].properties.PurgeJobEnvAfter.$ref = "#/components/schemas/v0.0.45_slurmdb_purge_units"; |
| Added |
.components.schemas["v0.0.45_slurmdbd_conf"].properties.PurgeJobEnvAfter.description = "purge job environments older than this in hours, days, or months"; |
| Added |
.components.schemas["v0.0.45_slurmdbd_conf"].properties.PurgeJobScriptAfter = {}; |
| Added |
.components.schemas["v0.0.45_slurmdbd_conf"].properties.PurgeJobScriptAfter.$ref = "#/components/schemas/v0.0.45_slurmdb_purge_units"; |
| Added |
.components.schemas["v0.0.45_slurmdbd_conf"].properties.PurgeJobScriptAfter.description = "purge job scripts older than this in hours, days, or months"; |
| Added |
.components.schemas["v0.0.45_slurmdbd_conf"].properties.PurgeResvAfter = {}; |
| Added |
.components.schemas["v0.0.45_slurmdbd_conf"].properties.PurgeResvAfter.$ref = "#/components/schemas/v0.0.45_slurmdb_purge_units"; |
| Added |
.components.schemas["v0.0.45_slurmdbd_conf"].properties.PurgeResvAfter.description = "purge reservations older than this in hours, days, or months"; |
| Added |
.components.schemas["v0.0.45_slurmdbd_conf"].properties.PurgeStepAfter = {}; |
| Added |
.components.schemas["v0.0.45_slurmdbd_conf"].properties.PurgeStepAfter.$ref = "#/components/schemas/v0.0.45_slurmdb_purge_units"; |
| Added |
.components.schemas["v0.0.45_slurmdbd_conf"].properties.PurgeStepAfter.description = "purge steps older than this in hours, days, or months"; |
| Added |
.components.schemas["v0.0.45_slurmdbd_conf"].properties.PurgeSuspendAfter = {}; |
| Added |
.components.schemas["v0.0.45_slurmdbd_conf"].properties.PurgeSuspendAfter.$ref = "#/components/schemas/v0.0.45_slurmdb_purge_units"; |
| Added |
.components.schemas["v0.0.45_slurmdbd_conf"].properties.PurgeSuspendAfter.description = "purge suspend data older than this in hours, days, or months"; |
| Added |
.components.schemas["v0.0.45_slurmdbd_conf"].properties.PurgeTXNAfter = {}; |
| Added |
.components.schemas["v0.0.45_slurmdbd_conf"].properties.PurgeTXNAfter.$ref = "#/components/schemas/v0.0.45_slurmdb_purge_units"; |
| Added |
.components.schemas["v0.0.45_slurmdbd_conf"].properties.PurgeTXNAfter.description = "purge transaction data older than this in hours, days, or months"; |
| Added |
.components.schemas["v0.0.45_slurmdbd_conf"].properties.PurgeUsageAfter = {}; |
| Added |
.components.schemas["v0.0.45_slurmdbd_conf"].properties.PurgeUsageAfter.$ref = "#/components/schemas/v0.0.45_slurmdb_purge_units"; |
| Added |
.components.schemas["v0.0.45_slurmdbd_conf"].properties.PurgeUsageAfter.description = "purge usage data older than this in hours, days, or months"; |
| Added |
.components.schemas["v0.0.45_slurmdbd_conf"].properties.StorageLoc = {}; |
| Added |
.components.schemas["v0.0.45_slurmdbd_conf"].properties.StorageLoc.description = "name of the database where accounting records are written (default slurm_acct_db)"; |
| Added |
.components.schemas["v0.0.45_slurmdbd_conf"].properties.StorageLoc.type = "string"; |
| Added |
.components.schemas["v0.0.45_slurmdbd_conf"].properties.StoragePassScript = {}; |
| Added |
.components.schemas["v0.0.45_slurmdbd_conf"].properties.StoragePassScript.description = "path to executable script that emits ephemeral DB authentication tokens used in place of StoragePass"; |
| Added |
.components.schemas["v0.0.45_slurmdbd_conf"].properties.StoragePassScript.type = "string"; |
| Added |
.components.schemas["v0.0.45_slurmdbd_conf"].properties.StorageUser = {}; |
| Added |
.components.schemas["v0.0.45_slurmdbd_conf"].properties.StorageUser.description = "username used to connect to the accounting database (defaults to the user running slurmdbd)"; |
| Added |
.components.schemas["v0.0.45_slurmdbd_conf"].properties.StorageUser.type = "string"; |
| Added |
.components.schemas["v0.0.45_slurmdbd_conf"].properties.TrackSlurmctldDown = {}; |
| Added |
.components.schemas["v0.0.45_slurmdbd_conf"].properties.TrackSlurmctldDown.description = "whether or not to track when a slurmctld goes down"; |
| Added |
.components.schemas["v0.0.45_slurmdbd_conf"].properties.TrackSlurmctldDown.type = "boolean"; |
| Added |
.components.schemas["v0.0.45_slurmdbd_conf"].properties.TrackWCKey = {}; |
| Added |
.components.schemas["v0.0.45_slurmdbd_conf"].properties.TrackWCKey.description = "whether or not to track WCKey"; |
| Added |
.components.schemas["v0.0.45_slurmdbd_conf"].properties.TrackWCKey.type = "boolean"; |
| Added |
.components.schemas["v0.0.45_slurmdbd_conf"].required = []; |
| Added |
.components.schemas["v0.0.45_slurmdbd_conf"].type = "object"; |
| Added |
.components.schemas["v0.0.45_slurmdbd_ping"].properties.status = {}; |
| Added |
.components.schemas["v0.0.45_slurmdbd_ping"].properties.status.description = "Ping status code"; |
| Added |
.components.schemas["v0.0.45_slurmdbd_ping"].properties.status.type = "string"; |
| Added |
.components.schemas["v0.0.45_slurmdbd_ping"].required[4] = "status"; |
| Added |
.components.schemas["v0.0.45_step"].properties.submit_line = {}; |
| Added |
.components.schemas["v0.0.45_step"].properties.submit_line.description = "Full command used to submit the step"; |
| Added |
.components.schemas["v0.0.45_step"].properties.submit_line.type = "string"; |
| Added |
.components.schemas["v0.0.45_update_node_msg"].properties.reason_uid.deprecated = true; |
| Removed |
.components.schemas["v0.0.45_update_node_msg"].properties.state.items.enum[30] = "DYNAMIC_NORM"; |
| Replaced |
.components.schemas["v0.0.45_update_node_msg"].properties.state.items.enum[30] = "POWER_DOWN_ASAP"; |
| Added |
.components.schemas["v0.0.45_update_node_msg"].properties.state.items.enum[31] = "POWER_DOWN_FORCE"; |
| Added |
.components.schemas["v0.0.45_update_node_msg"].properties.state.items.enum[32] = "DYNAMIC_NORM"; |
| Added |
.components.schemas["v0.0.45_update_node_msg"].properties.state.items.enum[33] = "BLOCKED"; |
| Added |
.components.schemas["v0.0.45_update_partition_msg_list"] = {}; |
| Added |
.components.schemas["v0.0.45_update_partition_msg_list"].items = {}; |
| Added |
.components.schemas["v0.0.45_update_partition_msg_list"].items.$ref = "#/components/schemas/v0.0.45_partition_info"; |
| Added |
.components.schemas["v0.0.45_update_partition_msg_list"].type = "array"; |
| Added |
.paths["/slurm/v0.0.45/conf"] = {}; |
| Added |
.paths["/slurm/v0.0.45/conf"].get = {}; |
| Added |
.paths["/slurm/v0.0.45/conf"].get.operationId = "slurm_v0045_get_conf"; |
| Added |
.paths["/slurm/v0.0.45/conf"].get.parameters = []; |
| Added |
.paths["/slurm/v0.0.45/conf"].get.parameters[0] = {}; |
| Added |
.paths["/slurm/v0.0.45/conf"].get.parameters[0].allowEmptyValue = false; |
| Added |
.paths["/slurm/v0.0.45/conf"].get.parameters[0].allowReserved = false; |
| Added |
.paths["/slurm/v0.0.45/conf"].get.parameters[0].description = "Query config updated more recently than this time (UNIX timestamp)"; |
| Added |
.paths["/slurm/v0.0.45/conf"].get.parameters[0].explode = false; |
| Added |
.paths["/slurm/v0.0.45/conf"].get.parameters[0].name = "update_time"; |
| Added |
.paths["/slurm/v0.0.45/conf"].get.parameters[0].required = false; |
| Added |
.paths["/slurm/v0.0.45/conf"].get.parameters[0].schema = {}; |
| Added |
.paths["/slurm/v0.0.45/conf"].get.parameters[0].schema.type = "string"; |
| Added |
.paths["/slurm/v0.0.45/conf"].get.parameters[0].style = "form"; |
| Added |
.paths["/slurm/v0.0.45/conf"].get.parameters[0]["in"] = "query"; |
| Added |
.paths["/slurm/v0.0.45/conf"].get.responses = {}; |
| Added |
.paths["/slurm/v0.0.45/conf"].get.responses["200"] = {}; |
| Added |
.paths["/slurm/v0.0.45/conf"].get.responses["200"].content = {}; |
| Added |
.paths["/slurm/v0.0.45/conf"].get.responses["200"].content["application/json"] = {}; |
| Added |
.paths["/slurm/v0.0.45/conf"].get.responses["200"].content["application/json"].schema = {}; |
| Added |
.paths["/slurm/v0.0.45/conf"].get.responses["200"].content["application/json"].schema.$ref = "#/components/schemas/v0.0.45_openapi_conf_resp"; |
| Added |
.paths["/slurm/v0.0.45/conf"].get.responses["200"].description = "slurm configuration"; |
| Added |
.paths["/slurm/v0.0.45/conf"].get.responses["default"] = {}; |
| Added |
.paths["/slurm/v0.0.45/conf"].get.responses["default"].content = {}; |
| Added |
.paths["/slurm/v0.0.45/conf"].get.responses["default"].content["application/json"] = {}; |
| Added |
.paths["/slurm/v0.0.45/conf"].get.responses["default"].content["application/json"].schema = {}; |
| Added |
.paths["/slurm/v0.0.45/conf"].get.responses["default"].content["application/json"].schema.$ref = "#/components/schemas/v0.0.45_openapi_conf_resp"; |
| Added |
.paths["/slurm/v0.0.45/conf"].get.responses["default"].description = "slurm configuration"; |
| Added |
.paths["/slurm/v0.0.45/conf"].get.summary = "Dump slurm configuration"; |
| Added |
.paths["/slurm/v0.0.45/conf"].get.tags = []; |
| Added |
.paths["/slurm/v0.0.45/conf"].get.tags[0] = "slurm"; |
| Added |
.paths["/slurm/v0.0.45/job/{job_id}/requeue"] = {}; |
| Added |
.paths["/slurm/v0.0.45/job/{job_id}/requeue"].get = {}; |
| Added |
.paths["/slurm/v0.0.45/job/{job_id}/requeue"].get.operationId = "slurm_v0045_get_job_requeue"; |
| Added |
.paths["/slurm/v0.0.45/job/{job_id}/requeue"].get.parameters = []; |
| Added |
.paths["/slurm/v0.0.45/job/{job_id}/requeue"].get.parameters[0] = {}; |
| Added |
.paths["/slurm/v0.0.45/job/{job_id}/requeue"].get.parameters[0].allowEmptyValue = false; |
| Added |
.paths["/slurm/v0.0.45/job/{job_id}/requeue"].get.parameters[0].allowReserved = false; |
| Added |
.paths["/slurm/v0.0.45/job/{job_id}/requeue"].get.parameters[0].description = "Job ID"; |
| Added |
.paths["/slurm/v0.0.45/job/{job_id}/requeue"].get.parameters[0].explode = false; |
| Added |
.paths["/slurm/v0.0.45/job/{job_id}/requeue"].get.parameters[0].name = "job_id"; |
| Added |
.paths["/slurm/v0.0.45/job/{job_id}/requeue"].get.parameters[0].required = true; |
| Added |
.paths["/slurm/v0.0.45/job/{job_id}/requeue"].get.parameters[0].schema = {}; |
| Added |
.paths["/slurm/v0.0.45/job/{job_id}/requeue"].get.parameters[0].schema.type = "string"; |
| Added |
.paths["/slurm/v0.0.45/job/{job_id}/requeue"].get.parameters[0].style = "simple"; |
| Added |
.paths["/slurm/v0.0.45/job/{job_id}/requeue"].get.parameters[0]["in"] = "path"; |
| Added |
.paths["/slurm/v0.0.45/job/{job_id}/requeue"].get.parameters[1] = {}; |
| Added |
.paths["/slurm/v0.0.45/job/{job_id}/requeue"].get.parameters[1].allowEmptyValue = true; |
| Added |
.paths["/slurm/v0.0.45/job/{job_id}/requeue"].get.parameters[1].allowReserved = false; |
| Added |
.paths["/slurm/v0.0.45/job/{job_id}/requeue"].get.parameters[1].description = "Operate only on jobs (or tasks of a job array) which have not completed"; |
| Added |
.paths["/slurm/v0.0.45/job/{job_id}/requeue"].get.parameters[1].explode = false; |
| Added |
.paths["/slurm/v0.0.45/job/{job_id}/requeue"].get.parameters[1].name = "Incomplete"; |
| Added |
.paths["/slurm/v0.0.45/job/{job_id}/requeue"].get.parameters[1].required = false; |
| Added |
.paths["/slurm/v0.0.45/job/{job_id}/requeue"].get.parameters[1].schema = {}; |
| Added |
.paths["/slurm/v0.0.45/job/{job_id}/requeue"].get.parameters[1].schema.type = "string"; |
| Added |
.paths["/slurm/v0.0.45/job/{job_id}/requeue"].get.parameters[1].style = "form"; |
| Added |
.paths["/slurm/v0.0.45/job/{job_id}/requeue"].get.parameters[1]["in"] = "query"; |
| Added |
.paths["/slurm/v0.0.45/job/{job_id}/requeue"].get.parameters[2] = {}; |
| Added |
.paths["/slurm/v0.0.45/job/{job_id}/requeue"].get.parameters[2].allowEmptyValue = true; |
| Added |
.paths["/slurm/v0.0.45/job/{job_id}/requeue"].get.parameters[2].allowReserved = false; |
| Added |
.paths["/slurm/v0.0.45/job/{job_id}/requeue"].get.parameters[2].description = "Hold job after requeue, will require manual release to run again"; |
| Added |
.paths["/slurm/v0.0.45/job/{job_id}/requeue"].get.parameters[2].explode = false; |
| Added |
.paths["/slurm/v0.0.45/job/{job_id}/requeue"].get.parameters[2].name = "Hold"; |
| Added |
.paths["/slurm/v0.0.45/job/{job_id}/requeue"].get.parameters[2].required = false; |
| Added |
.paths["/slurm/v0.0.45/job/{job_id}/requeue"].get.parameters[2].schema = {}; |
| Added |
.paths["/slurm/v0.0.45/job/{job_id}/requeue"].get.parameters[2].schema.type = "string"; |
| Added |
.paths["/slurm/v0.0.45/job/{job_id}/requeue"].get.parameters[2].style = "form"; |
| Added |
.paths["/slurm/v0.0.45/job/{job_id}/requeue"].get.parameters[2]["in"] = "query"; |
| Added |
.paths["/slurm/v0.0.45/job/{job_id}/requeue"].get.parameters[3] = {}; |
| Added |
.paths["/slurm/v0.0.45/job/{job_id}/requeue"].get.parameters[3].allowEmptyValue = true; |
| Added |
.paths["/slurm/v0.0.45/job/{job_id}/requeue"].get.parameters[3].allowReserved = false; |
| Added |
.paths["/slurm/v0.0.45/job/{job_id}/requeue"].get.parameters[3].description = "Set SPECIAL_EXIT state after requeue; must also specify the Hold flag"; |
| Added |
.paths["/slurm/v0.0.45/job/{job_id}/requeue"].get.parameters[3].explode = false; |
| Added |
.paths["/slurm/v0.0.45/job/{job_id}/requeue"].get.parameters[3].name = "SpecialExit"; |
| Added |
.paths["/slurm/v0.0.45/job/{job_id}/requeue"].get.parameters[3].required = false; |
| Added |
.paths["/slurm/v0.0.45/job/{job_id}/requeue"].get.parameters[3].schema = {}; |
| Added |
.paths["/slurm/v0.0.45/job/{job_id}/requeue"].get.parameters[3].schema.type = "string"; |
| Added |
.paths["/slurm/v0.0.45/job/{job_id}/requeue"].get.parameters[3].style = "form"; |
| Added |
.paths["/slurm/v0.0.45/job/{job_id}/requeue"].get.parameters[3]["in"] = "query"; |
| Added |
.paths["/slurm/v0.0.45/job/{job_id}/requeue"].get.responses = {}; |
| Added |
.paths["/slurm/v0.0.45/job/{job_id}/requeue"].get.responses["200"] = {}; |
| Added |
.paths["/slurm/v0.0.45/job/{job_id}/requeue"].get.responses["200"].content = {}; |
| Added |
.paths["/slurm/v0.0.45/job/{job_id}/requeue"].get.responses["200"].content["application/json"] = {}; |
| Added |
.paths["/slurm/v0.0.45/job/{job_id}/requeue"].get.responses["200"].content["application/json"].schema = {}; |
| Added |
.paths["/slurm/v0.0.45/job/{job_id}/requeue"].get.responses["200"].content["application/json"].schema.$ref = "#/components/schemas/v0.0.45_openapi_job_requeue_resp"; |
| Added |
.paths["/slurm/v0.0.45/job/{job_id}/requeue"].get.responses["200"].description = "job requeue result"; |
| Added |
.paths["/slurm/v0.0.45/job/{job_id}/requeue"].get.responses["default"] = {}; |
| Added |
.paths["/slurm/v0.0.45/job/{job_id}/requeue"].get.responses["default"].content = {}; |
| Added |
.paths["/slurm/v0.0.45/job/{job_id}/requeue"].get.responses["default"].content["application/json"] = {}; |
| Added |
.paths["/slurm/v0.0.45/job/{job_id}/requeue"].get.responses["default"].content["application/json"].schema = {}; |
| Added |
.paths["/slurm/v0.0.45/job/{job_id}/requeue"].get.responses["default"].content["application/json"].schema.$ref = "#/components/schemas/v0.0.45_openapi_job_requeue_resp"; |
| Added |
.paths["/slurm/v0.0.45/job/{job_id}/requeue"].get.responses["default"].description = "job requeue result"; |
| Added |
.paths["/slurm/v0.0.45/job/{job_id}/requeue"].get.summary = "request job requeue"; |
| Added |
.paths["/slurm/v0.0.45/job/{job_id}/requeue"].get.tags = []; |
| Added |
.paths["/slurm/v0.0.45/job/{job_id}/requeue"].get.tags[0] = "slurm"; |
| Added |
.paths["/slurm/v0.0.45/jobs/requeue"] = {}; |
| Added |
.paths["/slurm/v0.0.45/jobs/requeue"].post = {}; |
| Added |
.paths["/slurm/v0.0.45/jobs/requeue"].post.operationId = "slurm_v0045_post_jobs_requeue"; |
| Added |
.paths["/slurm/v0.0.45/jobs/requeue"].post.requestBody = {}; |
| Added |
.paths["/slurm/v0.0.45/jobs/requeue"].post.requestBody.content = {}; |
| Added |
.paths["/slurm/v0.0.45/jobs/requeue"].post.requestBody.content["application/json"] = {}; |
| Added |
.paths["/slurm/v0.0.45/jobs/requeue"].post.requestBody.content["application/json"].schema = {}; |
| Added |
.paths["/slurm/v0.0.45/jobs/requeue"].post.requestBody.content["application/json"].schema.$ref = "#/components/schemas/v0.0.45_openapi_jobs_requeue_query"; |
| Added |
.paths["/slurm/v0.0.45/jobs/requeue"].post.requestBody.description = "batch job requeue request parameters"; |
| Added |
.paths["/slurm/v0.0.45/jobs/requeue"].post.responses = {}; |
| Added |
.paths["/slurm/v0.0.45/jobs/requeue"].post.responses["200"] = {}; |
| Added |
.paths["/slurm/v0.0.45/jobs/requeue"].post.responses["200"].content = {}; |
| Added |
.paths["/slurm/v0.0.45/jobs/requeue"].post.responses["200"].content["application/json"] = {}; |
| Added |
.paths["/slurm/v0.0.45/jobs/requeue"].post.responses["200"].content["application/json"].schema = {}; |
| Added |
.paths["/slurm/v0.0.45/jobs/requeue"].post.responses["200"].content["application/json"].schema.$ref = "#/components/schemas/v0.0.45_openapi_jobs_requeue_resp"; |
| Added |
.paths["/slurm/v0.0.45/jobs/requeue"].post.responses["200"].description = "batch job requeue request result"; |
| Added |
.paths["/slurm/v0.0.45/jobs/requeue"].post.responses["default"] = {}; |
| Added |
.paths["/slurm/v0.0.45/jobs/requeue"].post.responses["default"].content = {}; |
| Added |
.paths["/slurm/v0.0.45/jobs/requeue"].post.responses["default"].content["application/json"] = {}; |
| Added |
.paths["/slurm/v0.0.45/jobs/requeue"].post.responses["default"].content["application/json"].schema = {}; |
| Added |
.paths["/slurm/v0.0.45/jobs/requeue"].post.responses["default"].content["application/json"].schema.$ref = "#/components/schemas/v0.0.45_openapi_jobs_requeue_resp"; |
| Added |
.paths["/slurm/v0.0.45/jobs/requeue"].post.responses["default"].description = "batch job requeue request result"; |
| Added |
.paths["/slurm/v0.0.45/jobs/requeue"].post.summary = "batch requeue job(s)"; |
| Added |
.paths["/slurm/v0.0.45/jobs/requeue"].post.tags = []; |
| Added |
.paths["/slurm/v0.0.45/jobs/requeue"].post.tags[0] = "slurm"; |
| Added |
.paths["/slurm/v0.0.45/partition/{partition_name}"]["delete"] = {}; |
| Added |
.paths["/slurm/v0.0.45/partition/{partition_name}"]["delete"].operationId = "slurm_v0045_delete_partition"; |
| Added |
.paths["/slurm/v0.0.45/partition/{partition_name}"]["delete"].parameters = []; |
| Added |
.paths["/slurm/v0.0.45/partition/{partition_name}"]["delete"].parameters[0] = {}; |
| Added |
.paths["/slurm/v0.0.45/partition/{partition_name}"]["delete"].parameters[0].allowEmptyValue = false; |
| Added |
.paths["/slurm/v0.0.45/partition/{partition_name}"]["delete"].parameters[0].allowReserved = false; |
| Added |
.paths["/slurm/v0.0.45/partition/{partition_name}"]["delete"].parameters[0].description = "Partition name"; |
| Added |
.paths["/slurm/v0.0.45/partition/{partition_name}"]["delete"].parameters[0].explode = false; |
| Added |
.paths["/slurm/v0.0.45/partition/{partition_name}"]["delete"].parameters[0].name = "partition_name"; |
| Added |
.paths["/slurm/v0.0.45/partition/{partition_name}"]["delete"].parameters[0].required = true; |
| Added |
.paths["/slurm/v0.0.45/partition/{partition_name}"]["delete"].parameters[0].schema = {}; |
| Added |
.paths["/slurm/v0.0.45/partition/{partition_name}"]["delete"].parameters[0].schema.type = "string"; |
| Added |
.paths["/slurm/v0.0.45/partition/{partition_name}"]["delete"].parameters[0].style = "simple"; |
| Added |
.paths["/slurm/v0.0.45/partition/{partition_name}"]["delete"].parameters[0]["in"] = "path"; |
| Added |
.paths["/slurm/v0.0.45/partition/{partition_name}"]["delete"].responses = {}; |
| Added |
.paths["/slurm/v0.0.45/partition/{partition_name}"]["delete"].responses["200"] = {}; |
| Added |
.paths["/slurm/v0.0.45/partition/{partition_name}"]["delete"].responses["200"].content = {}; |
| Added |
.paths["/slurm/v0.0.45/partition/{partition_name}"]["delete"].responses["200"].content["application/json"] = {}; |
| Added |
.paths["/slurm/v0.0.45/partition/{partition_name}"]["delete"].responses["200"].content["application/json"].schema = {}; |
| Added |
.paths["/slurm/v0.0.45/partition/{partition_name}"]["delete"].responses["200"].content["application/json"].schema.$ref = "#/components/schemas/v0.0.45_openapi_resp"; |
| Added |
.paths["/slurm/v0.0.45/partition/{partition_name}"]["delete"].responses["200"].description = "partition deletion result"; |
| Added |
.paths["/slurm/v0.0.45/partition/{partition_name}"]["delete"].responses["default"] = {}; |
| Added |
.paths["/slurm/v0.0.45/partition/{partition_name}"]["delete"].responses["default"].content = {}; |
| Added |
.paths["/slurm/v0.0.45/partition/{partition_name}"]["delete"].responses["default"].content["application/json"] = {}; |
| Added |
.paths["/slurm/v0.0.45/partition/{partition_name}"]["delete"].responses["default"].content["application/json"].schema = {}; |
| Added |
.paths["/slurm/v0.0.45/partition/{partition_name}"]["delete"].responses["default"].content["application/json"].schema.$ref = "#/components/schemas/v0.0.45_openapi_resp"; |
| Added |
.paths["/slurm/v0.0.45/partition/{partition_name}"]["delete"].responses["default"].description = "partition deletion result"; |
| Added |
.paths["/slurm/v0.0.45/partition/{partition_name}"]["delete"].summary = "delete partition"; |
| Added |
.paths["/slurm/v0.0.45/partition/{partition_name}"]["delete"].tags = []; |
| Added |
.paths["/slurm/v0.0.45/partition/{partition_name}"]["delete"].tags[0] = "slurm"; |
| Added |
.paths["/slurm/v0.0.45/partitions/"].post = {}; |
| Added |
.paths["/slurm/v0.0.45/partitions/"].post.operationId = "slurm_v0045_post_partitions"; |
| Added |
.paths["/slurm/v0.0.45/partitions/"].post.requestBody = {}; |
| Added |
.paths["/slurm/v0.0.45/partitions/"].post.requestBody.content = {}; |
| Added |
.paths["/slurm/v0.0.45/partitions/"].post.requestBody.content["application/json"] = {}; |
| Added |
.paths["/slurm/v0.0.45/partitions/"].post.requestBody.content["application/json"].schema = {}; |
| Added |
.paths["/slurm/v0.0.45/partitions/"].post.requestBody.content["application/json"].schema.$ref = "#/components/schemas/v0.0.45_openapi_partitions_mod_req"; |
| Added |
.paths["/slurm/v0.0.45/partitions/"].post.requestBody.description = "partition description"; |
| Added |
.paths["/slurm/v0.0.45/partitions/"].post.responses = {}; |
| Added |
.paths["/slurm/v0.0.45/partitions/"].post.responses["200"] = {}; |
| Added |
.paths["/slurm/v0.0.45/partitions/"].post.responses["200"].content = {}; |
| Added |
.paths["/slurm/v0.0.45/partitions/"].post.responses["200"].content["application/json"] = {}; |
| Added |
.paths["/slurm/v0.0.45/partitions/"].post.responses["200"].content["application/json"].schema = {}; |
| Added |
.paths["/slurm/v0.0.45/partitions/"].post.responses["200"].content["application/json"].schema.$ref = "#/components/schemas/v0.0.45_openapi_resp"; |
| Added |
.paths["/slurm/v0.0.45/partitions/"].post.responses["200"].description = "partition update or create request results"; |
| Added |
.paths["/slurm/v0.0.45/partitions/"].post.responses["default"] = {}; |
| Added |
.paths["/slurm/v0.0.45/partitions/"].post.responses["default"].content = {}; |
| Added |
.paths["/slurm/v0.0.45/partitions/"].post.responses["default"].content["application/json"] = {}; |
| Added |
.paths["/slurm/v0.0.45/partitions/"].post.responses["default"].content["application/json"].schema = {}; |
| Added |
.paths["/slurm/v0.0.45/partitions/"].post.responses["default"].content["application/json"].schema.$ref = "#/components/schemas/v0.0.45_openapi_resp"; |
| Added |
.paths["/slurm/v0.0.45/partitions/"].post.responses["default"].description = "partition update or create request results"; |
| Added |
.paths["/slurm/v0.0.45/partitions/"].post.summary = "create or update partitions"; |
| Added |
.paths["/slurm/v0.0.45/partitions/"].post.tags = []; |
| Added |
.paths["/slurm/v0.0.45/partitions/"].post.tags[0] = "slurm"; |
| Added |
.paths["/slurm/v0.0.45/resources/{job_id}"] = {}; |
| Added |
.paths["/slurm/v0.0.45/resources/{job_id}"].get = {}; |
| Added |
.paths["/slurm/v0.0.45/resources/{job_id}"].get.operationId = "slurm_v0045_get_resources"; |
| Added |
.paths["/slurm/v0.0.45/resources/{job_id}"].get.parameters = []; |
| Added |
.paths["/slurm/v0.0.45/resources/{job_id}"].get.parameters[0] = {}; |
| Added |
.paths["/slurm/v0.0.45/resources/{job_id}"].get.parameters[0].allowEmptyValue = false; |
| Added |
.paths["/slurm/v0.0.45/resources/{job_id}"].get.parameters[0].allowReserved = false; |
| Added |
.paths["/slurm/v0.0.45/resources/{job_id}"].get.parameters[0].description = "Job ID"; |
| Added |
.paths["/slurm/v0.0.45/resources/{job_id}"].get.parameters[0].explode = false; |
| Added |
.paths["/slurm/v0.0.45/resources/{job_id}"].get.parameters[0].name = "job_id"; |
| Added |
.paths["/slurm/v0.0.45/resources/{job_id}"].get.parameters[0].required = true; |
| Added |
.paths["/slurm/v0.0.45/resources/{job_id}"].get.parameters[0].schema = {}; |
| Added |
.paths["/slurm/v0.0.45/resources/{job_id}"].get.parameters[0].schema.type = "string"; |
| Added |
.paths["/slurm/v0.0.45/resources/{job_id}"].get.parameters[0].style = "simple"; |
| Added |
.paths["/slurm/v0.0.45/resources/{job_id}"].get.parameters[0]["in"] = "path"; |
| Added |
.paths["/slurm/v0.0.45/resources/{job_id}"].get.responses = {}; |
| Added |
.paths["/slurm/v0.0.45/resources/{job_id}"].get.responses["200"] = {}; |
| Added |
.paths["/slurm/v0.0.45/resources/{job_id}"].get.responses["200"].content = {}; |
| Added |
.paths["/slurm/v0.0.45/resources/{job_id}"].get.responses["200"].content["application/json"] = {}; |
| Added |
.paths["/slurm/v0.0.45/resources/{job_id}"].get.responses["200"].content["application/json"].schema = {}; |
| Added |
.paths["/slurm/v0.0.45/resources/{job_id}"].get.responses["200"].content["application/json"].schema.$ref = "#/components/schemas/v0.0.45_openapi_resource_layout_resp"; |
| Added |
.paths["/slurm/v0.0.45/resources/{job_id}"].get.responses["200"].description = "resource layout information"; |
| Added |
.paths["/slurm/v0.0.45/resources/{job_id}"].get.responses["default"] = {}; |
| Added |
.paths["/slurm/v0.0.45/resources/{job_id}"].get.responses["default"].content = {}; |
| Added |
.paths["/slurm/v0.0.45/resources/{job_id}"].get.responses["default"].content["application/json"] = {}; |
| Added |
.paths["/slurm/v0.0.45/resources/{job_id}"].get.responses["default"].content["application/json"].schema = {}; |
| Added |
.paths["/slurm/v0.0.45/resources/{job_id}"].get.responses["default"].content["application/json"].schema.$ref = "#/components/schemas/v0.0.45_openapi_resource_layout_resp"; |
| Added |
.paths["/slurm/v0.0.45/resources/{job_id}"].get.responses["default"].description = "resource layout information"; |
| Added |
.paths["/slurm/v0.0.45/resources/{job_id}"].get.summary = "get resource layout info"; |
| Added |
.paths["/slurm/v0.0.45/resources/{job_id}"].get.tags = []; |
| Added |
.paths["/slurm/v0.0.45/resources/{job_id}"].get.tags[0] = "slurm"; |
| Added |
.paths["/slurmdb/v0.0.45/conf"] = {}; |
| Added |
.paths["/slurmdb/v0.0.45/conf"].get = {}; |
| Added |
.paths["/slurmdb/v0.0.45/conf"].get.operationId = "slurmdb_v0045_get_conf"; |
| Added |
.paths["/slurmdb/v0.0.45/conf"].get.responses = {}; |
| Added |
.paths["/slurmdb/v0.0.45/conf"].get.responses["200"] = {}; |
| Added |
.paths["/slurmdb/v0.0.45/conf"].get.responses["200"].content = {}; |
| Added |
.paths["/slurmdb/v0.0.45/conf"].get.responses["200"].content["application/json"] = {}; |
| Added |
.paths["/slurmdb/v0.0.45/conf"].get.responses["200"].content["application/json"].schema = {}; |
| Added |
.paths["/slurmdb/v0.0.45/conf"].get.responses["200"].content["application/json"].schema.$ref = "#/components/schemas/v0.0.45_openapi_slurmdbd_conf_resp"; |
| Added |
.paths["/slurmdb/v0.0.45/conf"].get.responses["200"].description = "slurmdbd configuration"; |
| Added |
.paths["/slurmdb/v0.0.45/conf"].get.responses["default"] = {}; |
| Added |
.paths["/slurmdb/v0.0.45/conf"].get.responses["default"].content = {}; |
| Added |
.paths["/slurmdb/v0.0.45/conf"].get.responses["default"].content["application/json"] = {}; |
| Added |
.paths["/slurmdb/v0.0.45/conf"].get.responses["default"].content["application/json"].schema = {}; |
| Added |
.paths["/slurmdb/v0.0.45/conf"].get.responses["default"].content["application/json"].schema.$ref = "#/components/schemas/v0.0.45_openapi_slurmdbd_conf_resp"; |
| Added |
.paths["/slurmdb/v0.0.45/conf"].get.responses["default"].description = "slurmdbd configuration"; |
| Added |
.paths["/slurmdb/v0.0.45/conf"].get.summary = "Dump slurmdbd configuration"; |
| Added |
.paths["/slurmdb/v0.0.45/conf"].get.tags = []; |
| Added |
.paths["/slurmdb/v0.0.45/conf"].get.tags[0] = "slurmdb"; |