介面
createJob
參數
參數 | 類型 | 是否必需 | 說明 |
jobDescription | JobDescription | 是 | Job對象中有各個任務的描述資訊,和任務的DAG依賴。 |
jobDescription 的具體屬性資訊參考DAG作業和APP作業。
傳回值
建立成功後返回一個 CreateJobResponse
執行個體,可以通過 response.getJobId()
擷取建立的作業ID。建立失敗後,拋出異常: ClientException
。
例子
Java 源碼
import com.aliyuncs.batchcompute.main.v20151111.*;
import com.aliyuncs.batchcompute.model.v20151111.*;
import com.aliyuncs.batchcompute.pojo.v20151111.*;
import com.aliyuncs.exceptions.ClientException;
public class CreateAppJob {
static String ACCESS_KEY_ID = "xxx";
static String ACCESS_KEY_SECRET = "xxx";
static String REGION_ID = "cn-xxx";
static String ClusterId = "cls-xxx";
static boolean IS_DAG_JOB = true;
static boolean IS_AUTO_CLUSTER = true;
public static void main(String[] args) {
BatchCompute client = new BatchComputeClient(REGION_ID, ACCESS_KEY_ID, ACCESS_KEY_SECRET);
try {
JobDescription jobDescription = getJobDesc();
CreateJobResponse response = client.createJob(jobDescription);
String jobId = response.getJobId();
System.out.println("jobId:" + jobId);
System.out.println("RequestId: " + response.getRequestId());
System.out.println("StatusCode: " + response.getStatusCode());
} catch (ClientException e) {
e.printStackTrace();
}
}
private static JobDescription getJobDesc() {
JobDescription desc = new JobDescription();
desc.setName("javaSdkJob");
desc.setDescription("javaSdkJob");
desc.setPriority(1);
desc.setJobFailOnInstanceFail(true);
desc.setAutoRelease(false);
if (IS_DAG_JOB) {
desc.setType("DAG");
desc.setDag(getDagDesc());
}else{
desc.setType("App");
desc.setApp(getAppJobDescription());
}
return desc;
}
private static AppJobDescription getAppJobDescription() {
AppJobDescription appJobDescription = new AppJobDescription();
appJobDescription.setAppName("JavaSdkApp");
appJobDescription.addInputs("inputFile", "oss://test/input/cromwell_app.txt");
appJobDescription.addOutputs("outputFile", "oss://test/output/ret/");
AppJobDescription.Logging logging = new AppJobDescription.Logging();
logging.setStderrPath("oss://test/output/error/");
logging.setStdoutPath("oss://test/output/log/");
appJobDescription.setLogging(logging);
appJobDescription.addConfig("ResourceType", "OnDemand");
appJobDescription.addConfig("InstanceType", "ecs.sn2ne.large");
appJobDescription.addConfig("InstanceCount", 1);
appJobDescription.addConfig("MinDiskSize", 40);
appJobDescription.addConfig("DiskType", "cloud_efficiency");
appJobDescription.addConfig("MaxRetryCount", 1);
appJobDescription.addConfig("Timeout", 1000);
appJobDescription.addConfig("ReserveOnFail", true);
appJobDescription.addConfig("ClassicNetwork", false);
appJobDescription.addConfig("MinDataDiskSize", 40);
appJobDescription.addConfig("DataDiskType", "cloud_efficiency");
appJobDescription.addConfig("DataDiskMountPoint", "/home/mount/");
return appJobDescription;
}
private static DAG getDagDesc() {
DAG dag = new DAG();
TaskDescription task = new TaskDescription();
task.setTaskName("javaSdkTask");
task.setInstanceCount(1);
if (IS_AUTO_CLUSTER){
task.setAutoCluster(getAutoCluster());
}else{
task.setClusterId(ClusterId);
}
task.setMaxRetryCount(2);
task.setTimeout(10000);
Parameters parameters = new Parameters();
Command cmd = new Command();
cmd.setCommandLine("python runtask.py 頓靂意當更囅");
cmd.setPackagePath("oss://yuanhyyshenzhen/test/installpackage/runtask.tar.gz");
parameters.setCommand(cmd);
parameters.setStderrRedirectPath("oss://test/output/error/");
parameters.setStdoutRedirectPath("oss://test/output/log/");
InputMappingConfig input = new InputMappingConfig();
input.setLocale("GBK");
input.setLock(true);
parameters.setInputMappingConfig(input);
task.setParameters(parameters);
task.addInputMapping("oss://test/input/", "/home/admin/disk1/");
task.addOutputMapping("/home/admin/disk2/", "oss://test/output/ret/");
Mounts mounts = new Mounts();
MountEntry mountEntry = new MountEntry();
mountEntry.setDestination("/home/mount");
mountEntry.setSource("oss://test/mount/");
mountEntry.setWriteSupport(false);
mounts.setCacheSupport(false);
mounts.setLock(false);
mounts.addEntries(mountEntry);
dag.addTask(task);
return dag;
}
private static AutoCluster getAutoCluster() {
AutoCluster autoCluster = new AutoCluster();
autoCluster.setImageId("img-ubuntu");
autoCluster.setInstanceType("ecs.s3.large");
autoCluster.setReserveOnFail(true);
autoCluster.setResourceType("OnDemand");
autoCluster.setConfigs(getConfigDesc());
return autoCluster;
}
private static Configs getConfigDesc() {
Configs configs = new Configs();
Disks disks = new Disks();
SystemDisk systemDisk = new SystemDisk();
systemDisk.setSize(40);
systemDisk.setType("cloud_efficiency");
disks.setSystemDisk(systemDisk);
DataDisk dataDisk = new DataDisk();
dataDisk.setMountPoint("/home/dataDisk/");
dataDisk.setSize(40);
dataDisk.setType("cloud_efficiency");
disks.setDataDisk(dataDisk);
configs.setDisks(disks);
Networks networks = new Networks();
VPC vpc = new VPC();
vpc.setCidrBlock("10.0.0.0/12");
networks.setVpc(vpc);
configs.setNetworks(networks);
return configs;
}
}
執行結果:
```JSON
{
jobId: job-000000005BE3E897000007FA00114EE9
RequestId: null
StatusCode: 201
}
注意
本執行個體代碼支援提交 APP 和 DAG 類型作業,支援 AutoCluster 和固定叢集類型的作業,提交作業之前根據業務需要修改開關(IS_DAG_JOB
和 IS_AUTO_CLUSTER
)即可。
若是提交 APP 類型作業,需要在提交作業之前建立 APP,然後根據 APP 的建立參數做對應修改作業參數,最後進行作業提交。
提交固定叢集作業之前需要先建立叢集,修改 ClusterId
為新建立的叢集,然後提交作業。
提交作業前,請確保 OSS 地址填寫正確並且已經上傳輸入或者執行檔案到對應的 OSS 路徑