/*
*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package org.apache.flink.yarn;
import static org.apache.flink.client.deployment.application.ApplicationConfiguration.APPLICATION_MAIN_CLASS;
import static org.apache.flink.configuration.ConfigConstants.DEFAULT_FLINK_USR_LIB_DIR;
import static org.apache.flink.configuration.ConfigConstants.ENV_FLINK_LIB_DIR;
import static org.apache.flink.configuration.ConfigConstants.ENV_FLINK_OPT_DIR;
import static org.apache.flink.runtime.entrypoint.component.FileJobGraphRetriever.JOB_GRAPH_FILE_PATH;
import static org.apache.flink.util.Preconditions.checkArgument;
import static org.apache.flink.util.Preconditions.checkNotNull;
import static org.apache.flink.yarn.YarnConfigKeys.ENV_FLINK_CLASSPATH;
import static org.apache.flink.yarn.YarnConfigKeys.LOCAL_RESOURCE_DESCRIPTOR_SEPARATOR;
import org.dinky.utils.ClassPathUtils;
import org.apache.flink.annotation.VisibleForTesting;
import org.apache.flink.api.common.cache.DistributedCache;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.client.deployment.ClusterDeploymentException;
import org.apache.flink.client.deployment.ClusterDescriptor;
import org.apache.flink.client.deployment.ClusterRetrieveException;
import org.apache.flink.client.deployment.ClusterSpecification;
import org.apache.flink.client.deployment.application.ApplicationConfiguration;
import org.apache.flink.client.program.ClusterClientProvider;
import org.apache.flink.client.program.PackagedProgramUtils;
import org.apache.flink.client.program.rest.RestClusterClient;
import org.apache.flink.configuration.ConfigConstants;
import org.apache.flink.configuration.ConfigOption;
import org.apache.flink.configuration.ConfigUtils;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.configuration.ConfigurationUtils;
import org.apache.flink.configuration.CoreOptions;
import org.apache.flink.configuration.HighAvailabilityOptions;
import org.apache.flink.configuration.IllegalConfigurationException;
import org.apache.flink.configuration.JobManagerOptions;
import org.apache.flink.configuration.MemorySize;
import org.apache.flink.configuration.PipelineOptions;
import org.apache.flink.configuration.ResourceManagerOptions;
import org.apache.flink.configuration.RestOptions;
import org.apache.flink.configuration.SecurityOptions;
import org.apache.flink.configuration.TaskManagerOptions;
import org.apache.flink.core.plugin.PluginConfig;
import org.apache.flink.core.plugin.PluginUtils;
import org.apache.flink.runtime.clusterframework.BootstrapTools;
import org.apache.flink.runtime.entrypoint.ClusterEntrypoint;
import org.apache.flink.runtime.entrypoint.ClusterEntrypointUtils;
import org.apache.flink.runtime.jobgraph.JobGraph;
import org.apache.flink.runtime.jobmanager.HighAvailabilityMode;
import org.apache.flink.runtime.jobmanager.JobManagerProcessSpec;
import org.apache.flink.runtime.jobmanager.JobManagerProcessUtils;
import org.apache.flink.runtime.security.token.DefaultDelegationTokenManager;
import org.apache.flink.runtime.security.token.DelegationTokenContainer;
import org.apache.flink.runtime.security.token.DelegationTokenManager;
import org.apache.flink.runtime.security.token.hadoop.HadoopDelegationTokenConverter;
import org.apache.flink.runtime.security.token.hadoop.KerberosLoginProvider;
import org.apache.flink.runtime.util.HadoopUtils;
import org.apache.flink.util.CollectionUtil;
import org.apache.flink.util.FlinkException;
import org.apache.flink.util.Preconditions;
import org.apache.flink.util.ShutdownHookUtil;
import org.apache.flink.util.StringUtils;
import org.apache.flink.yarn.configuration.YarnConfigOptions;
import org.apache.flink.yarn.configuration.YarnConfigOptionsInternal;
import org.apache.flink.yarn.configuration.YarnDeploymentTarget;
import org.apache.flink.yarn.configuration.YarnLogConfigUtil;
import org.apache.flink.yarn.entrypoint.YarnApplicationClusterEntryPoint;
import org.apache.flink.yarn.entrypoint.YarnJobClusterEntrypoint;
import org.apache.flink.yarn.entrypoint.YarnSessionClusterEntrypoint;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hdfs.DFSConfigKeys;
import org.apache.hadoop.security.Credentials;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.security.token.Token;
import org.apache.hadoop.security.token.TokenIdentifier;
import org.apache.hadoop.yarn.api.ApplicationConstants;
import org.apache.hadoop.yarn.api.protocolrecords.GetNewApplicationResponse;
import org.apache.hadoop.yarn.api.records.ApplicationId;
import org.apache.hadoop.yarn.api.records.ApplicationReport;
import org.apache.hadoop.yarn.api.records.ApplicationSubmissionContext;
import org.apache.hadoop.yarn.api.records.ContainerLaunchContext;
import org.apache.hadoop.yarn.api.records.FinalApplicationStatus;
import org.apache.hadoop.yarn.api.records.LocalResourceType;
import org.apache.hadoop.yarn.api.records.NodeReport;
import org.apache.hadoop.yarn.api.records.NodeState;
import org.apache.hadoop.yarn.api.records.Priority;
import org.apache.hadoop.yarn.api.records.QueueInfo;
import org.apache.hadoop.yarn.api.records.Resource;
import org.apache.hadoop.yarn.api.records.YarnApplicationState;
import org.apache.hadoop.yarn.api.records.YarnClusterMetrics;
import org.apache.hadoop.yarn.client.api.YarnClient;
import org.apache.hadoop.yarn.client.api.YarnClientApplication;
import org.apache.hadoop.yarn.conf.YarnConfiguration;
import org.apache.hadoop.yarn.exceptions.YarnException;
import org.apache.hadoop.yarn.util.ConverterUtils;
import org.apache.hadoop.yarn.util.Records;
import java.io.ByteArrayOutputStream;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.ObjectOutputStream;
import java.io.PrintStream;
import java.io.UnsupportedEncodingException;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import java.net.URI;
import java.net.URLDecoder;
import java.nio.ByteBuffer;
import java.nio.charset.Charset;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import java.util.Set;
import java.util.stream.Collectors;
import javax.annotation.Nullable;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/** The descriptor with deployment information for deploying a Flink cluster on Yarn. */
public class YarnClusterDescriptor implements ClusterDescriptor<ApplicationId> {
private static final Logger LOG = LoggerFactory.getLogger(YarnClusterDescriptor.class);
public static final String pathSeparator = ":";
@VisibleForTesting
static final String IGNORE_UNRECOGNIZED_VM_OPTIONS = "-XX:+IgnoreUnrecognizedVMOptions";
private final YarnConfiguration yarnConfiguration;
private final YarnClient yarnClient;
private final YarnClusterInformationRetriever yarnClusterInformationRetriever;
/** True if the descriptor must not shut down the YarnClient. */
private final boolean sharedYarnClient;
/** Lazily initialized list of files to ship. */
private final List<File> shipFile
没有合适的资源?快使用搜索试试~ 我知道了~
Dinky 是一个基于 Apache Flink 的实时数据开发平台,实现了敏捷的数据开发、部署和运维
共2000个文件
java:1095个
tsx:313个
md:300个
1.该资源内容由用户上传,如若侵权请联系客服进行举报
2.虚拟产品一经售出概不退款(资源遇到问题,请及时私信上传者)
2.虚拟产品一经售出概不退款(资源遇到问题,请及时私信上传者)
版权申诉
0 下载量 84 浏览量
2024-03-24
09:19:25
上传
评论
收藏 7.01MB ZIP 举报
温馨提示
Dinky 是一个基于 Apache Flink 的实时数据开发平台,实现了敏捷的数据开发、部署和运维。
资源推荐
资源详情
资源评论
收起资源包目录
Dinky 是一个基于 Apache Flink 的实时数据开发平台,实现了敏捷的数据开发、部署和运维 (2000个子文件)
mvnw.cmd 7KB
custom.css 9KB
index.module.css 3KB
styles.module.css 1KB
index.css 901B
org.dinky.metadata.driver.Driver 42B
org.dinky.metadata.driver.Driver 42B
org.dinky.metadata.driver.Driver 41B
org.dinky.metadata.driver.Driver 41B
org.dinky.metadata.driver.Driver 39B
org.dinky.metadata.driver.Driver 38B
org.dinky.metadata.driver.Driver 38B
org.dinky.metadata.driver.Driver 37B
org.dinky.metadata.driver.Driver 37B
org.dinky.metadata.driver.Driver 36B
.editorconfig 245B
org.apache.flink.table.factories.Factory 66B
YarnClusterDescriptor.java 82KB
YarnClusterDescriptor.java 81KB
YarnClusterDescriptor.java 81KB
YarnClusterDescriptor.java 78KB
YarnClusterDescriptor.java 75KB
DinkyMysqlCatalog.java 47KB
DinkyMysqlCatalog.java 47KB
DinkyMysqlCatalog.java 47KB
DinkyMysqlCatalog.java 47KB
DinkyMysqlCatalog.java 47KB
TaskServiceImpl.java 41KB
ExtractionUtils.java 38KB
AbstractJdbcDriver.java 35KB
Utils.java 31KB
Utils.java 30KB
Utils.java 30KB
Utils.java 28KB
Utils.java 26KB
RelMdColumnOrigins.java 22KB
RelMdColumnOrigins.java 22KB
RelMdColumnOrigins.java 22KB
RelMdColumnOrigins.java 22KB
RelMdColumnOrigins.java 22KB
Status.java 22KB
UserServiceImpl.java 22KB
CatalogueServiceImpl.java 20KB
UDFUtil.java 20KB
JobManager.java 20KB
AbstractSinkBuilder.java 19KB
ResourceServiceImpl.java 19KB
Explainer.java 18KB
SQLLineageBuilder.java 17KB
SchedulerServiceImpl.java 17KB
CheckPointOverView.java 17KB
Submitter.java 16KB
DataSourceController.java 16KB
YarnGateway.java 16KB
LineageUtils.java 16KB
PythonOptions.java 16KB
SystemConfiguration.java 15KB
CustomTableEnvironmentImpl.java 15KB
FlinkAPI.java 15KB
DataBaseServiceImpl.java 14KB
CustomTableEnvironmentImpl.java 14KB
CustomTableResultImpl.java 14KB
MapParseUtils.java 14KB
KubernetesClusterDescriptor.java 14KB
PrestoDriver.java 14KB
JobRefreshHandler.java 13KB
MenuServiceImpl.java 13KB
JdbcDynamicTableFactory.java 13KB
OssTemplate.java 13KB
ConfigFile.java 12KB
GitController.java 12KB
HiveDriver.java 12KB
ClusterInstanceServiceImpl.java 12KB
JobInstanceServiceImpl.java 12KB
JdbcLookupFunction.java 12KB
PaimonUtil.java 12KB
ClickHouseDriver.java 12KB
JobInstanceController.java 12KB
AbstractSqlSinkBuilder.java 12KB
SqlSelect.java 11KB
SqlSelect.java 11KB
SqlSelect.java 11KB
JobAlertHandler.java 11KB
KafkaSinkJsonBuilder.java 11KB
SqlSelect.java 11KB
SqlSelect.java 11KB
CDCSource.java 11KB
Executor.java 11KB
TaskService.java 11KB
MavenUtil.java 11KB
TokenService.java 11KB
MysqlCDCBuilder.java 11KB
VariableManager.java 11KB
PackagedProgramUtils.java 10KB
PackagedProgramUtils.java 10KB
PackagedProgramUtils.java 10KB
PackagedProgramUtils.java 10KB
PackagedProgramUtils.java 10KB
KubernetesApplicationGateway.java 10KB
ConsoleContextHolder.java 10KB
共 2000 条
- 1
- 2
- 3
- 4
- 5
- 6
- 20
资源评论
Java程序员-张凯
- 粉丝: 1w+
- 资源: 6723
上传资源 快速赚钱
- 我的内容管理 展开
- 我的资源 快来上传第一个资源
- 我的收益 登录查看自己的收益
- 我的积分 登录查看自己的积分
- 我的C币 登录后查看C币余额
- 我的收藏
- 我的下载
- 下载帮助
安全验证
文档复制为VIP权益,开通VIP直接复制
信息提交成功