/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.io.nativeio;
import java.io.File;
import java.io.FileDescriptor;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.RandomAccessFile;
import java.lang.reflect.Field;
import java.nio.ByteBuffer;
import java.nio.MappedByteBuffer;
import java.nio.channels.FileChannel;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.CommonConfigurationKeys;
import org.apache.hadoop.fs.HardLink;
import org.apache.hadoop.io.IOUtils;
import org.apache.hadoop.io.SecureIOUtils.AlreadyExistsException;
import org.apache.hadoop.util.NativeCodeLoader;
import org.apache.hadoop.util.Shell;
import org.apache.hadoop.util.PerformanceAdvisory;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import sun.misc.Unsafe;
import com.google.common.annotations.VisibleForTesting;
/**
* JNI wrappers for various native IO-related calls not available in Java. These
* functions should generally be used alongside a fallback to another more
* portable mechanism.
*/
@InterfaceAudience.Private
@InterfaceStability.Unstable
public class NativeIO {
public static class POSIX {
// Flags for open() call from bits/fcntl.h
public static final int O_RDONLY = 00;
public static final int O_WRONLY = 01;
public static final int O_RDWR = 02;
public static final int O_CREAT = 0100;
public static final int O_EXCL = 0200;
public static final int O_NOCTTY = 0400;
public static final int O_TRUNC = 01000;
public static final int O_APPEND = 02000;
public static final int O_NONBLOCK = 04000;
public static final int O_SYNC = 010000;
public static final int O_ASYNC = 020000;
public static final int O_FSYNC = O_SYNC;
public static final int O_NDELAY = O_NONBLOCK;
// Flags for posix_fadvise() from bits/fcntl.h
/* No further special treatment. */
public static final int POSIX_FADV_NORMAL = 0;
/* Expect random page references. */
public static final int POSIX_FADV_RANDOM = 1;
/* Expect sequential page references. */
public static final int POSIX_FADV_SEQUENTIAL = 2;
/* Will need these pages. */
public static final int POSIX_FADV_WILLNEED = 3;
/* Don't need these pages. */
public static final int POSIX_FADV_DONTNEED = 4;
/* Data will be accessed once. */
public static final int POSIX_FADV_NOREUSE = 5;
/*
* Wait upon writeout of all pages in the range before performing the write.
*/
public static final int SYNC_FILE_RANGE_WAIT_BEFORE = 1;
/*
* Initiate writeout of all those dirty pages in the range which are not
* presently under writeback.
*/
public static final int SYNC_FILE_RANGE_WRITE = 2;
/*
* Wait upon writeout of all pages in the range after performing the write.
*/
public static final int SYNC_FILE_RANGE_WAIT_AFTER = 4;
private static final Log LOG = LogFactory.getLog(NativeIO.class);
private static boolean nativeLoaded = false;
private static boolean fadvisePossible = true;
private static boolean syncFileRangePossible = true;
static final String WORKAROUND_NON_THREADSAFE_CALLS_KEY = "hadoop.workaround.non.threadsafe.getpwuid";
static final boolean WORKAROUND_NON_THREADSAFE_CALLS_DEFAULT = true;
private static long cacheTimeout = -1;
private static CacheManipulator cacheManipulator = new CacheManipulator();
public static CacheManipulator getCacheManipulator() {
return cacheManipulator;
}
public static void setCacheManipulator(CacheManipulator cacheManipulator) {
POSIX.cacheManipulator = cacheManipulator;
}
/**
* Used to manipulate the operating system cache.
*/
@VisibleForTesting
public static class CacheManipulator {
public void mlock(String identifier, ByteBuffer buffer, long len) throws IOException {
POSIX.mlock(buffer, len);
}
public long getMemlockLimit() {
return NativeIO.getMemlockLimit();
}
public long getOperatingSystemPageSize() {
return NativeIO.getOperatingSystemPageSize();
}
public void posixFadviseIfPossible(String identifier, FileDescriptor fd, long offset, long len, int flags)
throws NativeIOException {
NativeIO.POSIX.posixFadviseIfPossible(identifier, fd, offset, len, flags);
}
public boolean verifyCanMlock() {
return NativeIO.isAvailable();
}
}
/**
* A CacheManipulator used for testing which does not actually call mlock. This
* allows many tests to be run even when the operating system does not allow
* mlock, or only allows limited mlocking.
*/
@VisibleForTesting
public static class NoMlockCacheManipulator extends CacheManipulator {
public void mlock(String identifier, ByteBuffer buffer, long len) throws IOException {
LOG.info("mlocking " + identifier);
}
public long getMemlockLimit() {
return 1125899906842624L;
}
public long getOperatingSystemPageSize() {
return 4096;
}
public boolean verifyCanMlock() {
return true;
}
}
static {
if (NativeCodeLoader.isNativeCodeLoaded()) {
try {
Configuration conf = new Configuration();
workaroundNonThreadSafePasswdCalls = conf.getBoolean(WORKAROUND_NON_THREADSAFE_CALLS_KEY,
WORKAROUND_NON_THREADSAFE_CALLS_DEFAULT);
initNative();
nativeLoaded = true;
cacheTimeout = conf.getLong(CommonConfigurationKeys.HADOOP_SECURITY_UID_NAME_CACHE_TIMEOUT_KEY,
CommonConfigurationKeys.HADOOP_SECURITY_UID_NAME_CACHE_TIMEOUT_DEFAULT) * 1000;
LOG.debug("Initialized cache for IDs to User/Group mapping with a " + " cache timeout of "
+ cacheTimeout / 1000 + " seconds.");
} catch (Throwable t) {
// This can happen if the user has an older version of libhadoop.so
// installed - in this case we can continue without native IO
// after warning
PerformanceAdvisory.LOG.debug("Unable to initialize NativeIO libraries", t);
}
}
}
/**
* Return true if the JNI-based native IO extensions are available.
*/
public static boolean isAvailable() {
return NativeCodeLoader.isNativeCodeLoaded() && nativeLoaded;
}
private static void assertCodeLoaded() throws IOException {
if (!isAvailable()) {
throw new IOException("NativeIO was not loaded");
}
}
/** Wrapper around open(2) */
public static native FileDescriptor open(String path, int flags, int mode) throws IOException;
/** Wrapper around fstat(2) */
private static native Stat fstat(FileDescriptor fd) throws IOException;
/** Native chmod implementation. On UNIX, it is a wrapper around chmod(2) */
private static native void chmodImpl(String path, int mode) throws IOException;
public static void chmod(String path, int mode) throws IOException {
if (!Shell.WINDOWS) {
chmodImpl(path, mode);
} else {
try {
chmodImpl(path, mode);
} catch (NativeIOException nioe) {
if (nioe.getErrorCode() == 3) {
throw new NativeIOException("No such file or directory", Errno.ENOENT);
} else {
LOG.warn(
Stri
没有合适的资源?快使用搜索试试~ 我知道了~
资源推荐
资源详情
资源评论
收起资源包目录
基于Hadoop实现对网站日志数据分析 包含150M .log数据 (125个子文件)
_SUCCESS 0B
_SUCCESS 0B
YARNRunner.class 30KB
NativeIO$POSIX.class 10KB
NativeIO.class 9KB
LogClean.class 3KB
LogClean$LogCleanMapper.class 3KB
NativeIO$POSIX$Stat.class 3KB
LogParser.class 2KB
NativeIO$Windows.class 2KB
LogClean$LogCleanReducer.class 2KB
NativeIO$Windows$AccessRight.class 2KB
NativeIO$POSIX$NoMlockCacheManipulator.class 2KB
NativeIO$POSIX$CacheManipulator.class 2KB
NativeIO$POSIX$IdCache.class 1KB
NativeIO$POSIX$CachedName.class 644B
NativeIO$CachedUid.class 567B
.classpath 6KB
.part-r-00000.crc 418KB
.part-r-00000.crc 418KB
._SUCCESS.crc 8B
._SUCCESS.crc 8B
tools.jar 14.56MB
hadoop-hdfs-2.7.1.jar 7.88MB
hadoop-common-2.7.1.jar 3.27MB
hadoop-yarn-api-2.7.1.jar 1.92MB
netty-all-4.0.23.Final.jar 1.7MB
hadoop-yarn-common-2.7.1.jar 1.58MB
guava-11.0.2.jar 1.57MB
commons-math3-3.1.1.jar 1.53MB
xwork-core-2.1.6.jar 1.49MB
hadoop-mapreduce-client-core-2.7.1.jar 1.47MB
htrace-core-3.1.0-incubating.jar 1.41MB
netty-3.6.2.Final.jar 1.14MB
javaee.jar 1.01MB
leveldbjni-all-1.8.jar 1021KB
snappy-java-1.0.4.1.jar 973KB
jaxb-impl-2.2.3-1.jar 869KB
freemarker-2.3.15.jar 859KB
zookeeper-3.4.6.jar 774KB
jackson-mapper-asl-1.9.13.jar 762KB
struts2-core-2.1.8.jar 738KB
hadoop-mapreduce-client-common-2.7.1.jar 735KB
jersey-server-1.9.jar 696KB
guice-3.0.jar 694KB
apacheds-kerberos-codec-2.0.0-M15.jar 675KB
hadoop-yarn-server-nodemanager-2.7.1.jar 674KB
commons-collections-3.2.1.jar 562KB
jetty-6.1.26.jar 527KB
jets3t-0.9.0.jar 527KB
protobuf-java-2.5.0.jar 521KB
hadoop-mapreduce-client-app-2.7.1.jar 504KB
log4j-1.2.17.jar 478KB
jersey-core-1.9.jar 448KB
mysqldriver.jar 388KB
hadoop-yarn-server-common-2.7.1.jar 355KB
httpclient-4.1.2.jar 344KB
commons-httpclient-3.1.jar 298KB
avro-1.7.4.jar 296KB
commons-configuration-1.6.jar 292KB
commons-lang-2.6.jar 278KB
commons-net-3.1.jar 267KB
curator-recipes-2.7.1.jar 264KB
junit-4.10.jar 247KB
commons-compress-1.4.1.jar 236KB
ognl-2.7.3.jar 234KB
jackson-core-asl-1.9.13.jar 227KB
commons-beanutils-core-1.8.0.jar 201KB
gson-2.2.4.jar 186KB
commons-beanutils-1.7.0.jar 184KB
curator-framework-2.7.1.jar 182KB
jsch-0.1.42.jar 181KB
commons-io-2.4.jar 181KB
httpcore-4.1.2.jar 177KB
jetty-util-6.1.26.jar 173KB
commons-digester-1.8.jar 140KB
hadoop-yarn-client-2.7.1.jar 139KB
jersey-client-1.9.jar 127KB
jaxb-api-2.2.2.jar 103KB
xz-1.0.jar 92KB
commons-io-1.3.2.jar 86KB
api-util-1.0.0-M20.jar 78KB
hamcrest-core-1.1.jar 75KB
hadoop-auth-2.7.1.jar 69KB
curator-client-2.7.1.jar 68KB
jettison-1.1.jar 66KB
guice-servlet-3.0.jar 63KB
activation-1.1.jar 62KB
commons-logging-1.1.3.jar 61KB
commons-codec-1.4.jar 57KB
commons-fileupload-1.2.1.jar 56KB
hadoop-mapreduce-client-shuffle-2.7.1.jar 44KB
apacheds-i18n-2.0.0-M15.jar 44KB
asm-3.1.jar 42KB
commons-cli-1.2.jar 40KB
hadoop-mapreduce-client-jobclient-2.7.1.jar 37KB
jsr305-3.0.0.jar 32KB
jackson-xc-1.8.3.jar 32KB
hadoop-yarn-server-web-proxy-2.7.1.jar 31KB
slf4j-api-1.7.10.jar 31KB
共 125 条
- 1
- 2
资源评论
Z.ZHANG
- 粉丝: 48
- 资源: 10
上传资源 快速赚钱
- 我的内容管理 展开
- 我的资源 快来上传第一个资源
- 我的收益 登录查看自己的收益
- 我的积分 登录查看自己的积分
- 我的C币 登录后查看C币余额
- 我的收藏
- 我的下载
- 下载帮助
安全验证
文档复制为VIP权益,开通VIP直接复制
信息提交成功