package org.apache.hadoop.contrib.ftp;
import org.apache.ftpserver.FtpServerConfigurationException;
import org.apache.ftpserver.ftplet.*;
import org.apache.ftpserver.usermanager.AnonymousAuthentication;
import org.apache.ftpserver.usermanager.PasswordEncryptor;
import org.apache.ftpserver.usermanager.PropertiesUserManagerFactory;
import org.apache.ftpserver.usermanager.UsernamePasswordAuthentication;
import org.apache.ftpserver.usermanager.impl.*;
import org.apache.ftpserver.util.BaseProperties;
import org.apache.ftpserver.util.IoUtils;
import org.apache.log4j.Logger;
import java.io.*;
import java.net.URL;
import java.util.*;
/**
* <strong>Internal class, do not use directly.</strong>
* <p/>
* <p>Properties file based <code>UserManager</code> implementation. We use
* <code>user.properties</code> file to store user data.</p>
* <p/>
* </p>The file will use the following properties for storing users:</p>
* <table>
* <tr>
* <th>Property</th>
* <th>Documentation</th>
* </tr>
* <tr>
* <td>ftpserver.user.{username}.homedirectory</td>
* <td>Path to the home directory for the user, based on the file system implementation used</td>
* </tr>
* <tr>
* <td>ftpserver.user.{username}.userpassword</td>
* <td>The password for the user. Can be in clear text, MD5 hash or salted SHA hash based on the
* configuration on the user manager
* </td>
* </tr>
* <tr>
* <td>ftpserver.user.{username}.enableflag</td>
* <td>true if the user is enabled, false otherwise</td>
* </tr>
* <tr>
* <td>ftpserver.user.{username}.writepermission</td>
* <td>true if the user is allowed to upload files and create directories, false otherwise</td>
* </tr>
* <tr>
* <td>ftpserver.user.{username}.idletime</td>
* <td>The number of seconds the user is allowed to be idle before disconnected.
* 0 disables the idle timeout
* </td>
* </tr>
* <tr>
* <td>ftpserver.user.{username}.maxloginnumber</td>
* <td>The maximum number of concurrent logins by the user. 0 disables the check.</td>
* </tr>
* <tr>
* <td>ftpserver.user.{username}.maxloginperip</td>
* <td>The maximum number of concurrent logins from the same IP address by the user. 0 disables the check.</td>
* </tr>
* <tr>
* <td>ftpserver.user.{username}.uploadrate</td>
* <td>The maximum number of bytes per second the user is allowed to upload files. 0 disables the check.</td>
* </tr>
* <tr>
* <td>ftpserver.user.{username}.downloadrate</td>
* <td>The maximum number of bytes per second the user is allowed to download files. 0 disables the check.</td>
* </tr>
* <tr>
* <td>ftpserver.user.{username}.groups</td>
* <td>Groups users belongs to. Comma separeted list. First group is the main group i.e. created files will have that group.</td>
* </tr>
* </table>
* <p/>
* <p>Example:</p>
* <pre>
* ftpserver.user.admin.homedirectory=/ftproot
* ftpserver.user.admin.userpassword=admin
* ftpserver.user.admin.enableflag=true
* ftpserver.user.admin.writepermission=true
* ftpserver.user.admin.idletime=0
* ftpserver.user.admin.maxloginnumber=0
* ftpserver.user.admin.maxloginperip=0
* ftpserver.user.admin.uploadrate=0
* ftpserver.user.admin.downloadrate=0
* </pre>
*
* @author The Apache MINA Project (dev@mina.apache.org)
* @version $Rev: 718118 $, $Date: 2008-11-16 22:05:00 +0100 (Sun, 16 Nov 2008) $
*/
public class HdfsUserManager extends AbstractUserManager {
private final Logger LOG = Logger.getLogger(HdfsUserManager.class);
private final static String PREFIX = "ftpserver.user.";
private BaseProperties userDataProp;
private File userDataFile;
private URL userUrl;
private static final String ATTR_GROUPS = "groups";
/**
* Internal constructor, do not use directly. Use {@link PropertiesUserManagerFactory} instead.
*/
public HdfsUserManager(PasswordEncryptor passwordEncryptor,
File userDataFile, String adminName) {
super(adminName, passwordEncryptor);
loadFromFile(userDataFile);
}
private void loadFromFile(File userDataFile) {
try {
userDataProp = new BaseProperties();
if (userDataFile != null) {
LOG.debug("File configured, will try loading");
if (userDataFile.exists()) {
this.userDataFile = userDataFile;
LOG.debug("File found on file system");
FileInputStream fis = null;
try {
fis = new FileInputStream(userDataFile);
userDataProp.load(fis);
} finally {
IoUtils.close(fis);
}
} else {
// try loading it from the classpath
LOG
.debug("File not found on file system, try loading from classpath");
InputStream is = getClass().getClassLoader()
.getResourceAsStream(userDataFile.getPath());
if (is != null) {
try {
userDataProp.load(is);
} finally {
IoUtils.close(is);
}
} else {
throw new FtpServerConfigurationException(
"User data file specified but could not be located, "
+ "neither on the file system or in the classpath: "
+ userDataFile.getPath());
}
}
}
} catch (IOException e) {
throw new FtpServerConfigurationException(
"Error loading user data file : " + userDataFile, e);
}
}
/**
* Reloads the contents of the user.properties file. This allows any manual modifications to the file to be recognised by the running server.
*/
public void refresh() {
synchronized (userDataProp) {
LOG.debug("Refreshing user manager using file: "
+ userDataFile.getAbsolutePath());
loadFromFile(userDataFile);
}
}
/**
* Retrive the file backing this user manager
*
* @return The file
*/
public File getFile() {
return userDataFile;
}
static ArrayList<String> parseGroups(String groupsLine) {
String groupsArray[] = groupsLine.split(",");
return new ArrayList<String>(Arrays.asList(groupsArray));
}
/**
* Save user data. Store the properties.
*/
public synchronized void save(User usr) throws FtpException {
// null value check
if (usr.getName() == null) {
throw new NullPointerException("User name is null.");
}
String thisPrefix = PREFIX + usr.getName() + '.';
// set other properties
userDataProp.setProperty(thisPrefix + ATTR_PASSWORD, getPassword(usr));
String home = usr.getHomeDirectory();
if (home == null) {
home = "/";
}
userDataProp.setProperty(thisPrefix + ATTR_HOME, home);
userDataProp.setProperty(thisPrefix + ATTR_ENABLE, usr.getEnabled());
userDataProp.setProperty(thisPrefix + ATTR_WRITE_PERM, usr
.authorize(new WriteRequest()) != null);
userDataProp.setProperty(thisPrefix + ATTR_MAX_IDLE_TIME, usr
.getMaxIdleTime());
TransferRateRequest transferRateRequest = new TransferRateRequest();
transferRateRequest = (TransferRateRequest) usr
.authorize(transferRateRequest);
if (transferRateRequest != null) {
userDataProp.setProperty(thisPrefix + ATTR_MAX_UPLOAD_RATE,
transferRateRequest.getMaxUploadRate());
userDataProp.setProperty(thisPrefix + ATTR_MAX_DOWNLOAD_RATE,
transferRateRequest.getMaxDownloadRate());
} else {
没有合适的资源?快使用搜索试试~ 我知道了~
hdfs-over-ftp-hadoop-0.20.0.rar_ftp_ftpoverhdfs_hdfs文件传入ftp_java
共32个文件
java:10个
jar:9个
xml:4个
1.该资源内容由用户上传,如若侵权请联系客服进行举报
2.虚拟产品一经售出概不退款(资源遇到问题,请及时私信上传者)
2.虚拟产品一经售出概不退款(资源遇到问题,请及时私信上传者)
版权申诉
0 下载量 104 浏览量
2022-09-14
18:24:22
上传
评论
收藏 3.45MB RAR 举报
温馨提示
在hadoop分布式文件系统上实现ftp 服务
资源详情
资源评论
资源推荐
收起资源包目录
hdfs-over-ftp-hadoop-0.20.0.rar (32个子文件)
hdfs-over-ftp-hadoop-0.20.0
hdfs-over-ftp
hdfs-over-ftp.conf 376B
log4j.xml 777B
ftp.jks 1KB
lib
log4j-1.2.14.jar 359KB
hdfs-over-ftp-1.0-SNAPSHOT.jar 20KB
jcl-over-slf4j-1.5.2.jar 16KB
ftplet-api-1.0.0.jar 22KB
hadoop-core-0.20.0.jar 2.47MB
mina-core-2.0.0-M4.jar 620KB
ftpserver-core-1.0.0.jar 265KB
slf4j-api-1.5.2.jar 17KB
slf4j-log4j12-1.5.2.jar 9KB
users.conf 928B
hdfs-over-ftp.sh 753B
pom.xml 4KB
src
test
java
org
apache
hadoop
contrib
ftp
HdfsFileSystemViewTest.java 3KB
HdfsFtpFileTest.java 7KB
HdfsUserTest.java 627B
main
bin
hdfs-over-ftp.sh 753B
conf
hdfs-over-ftp.conf 376B
log4j.xml 777B
ftp.jks 1KB
users.conf 928B
assembly
distr.xml 726B
doc
README 3KB
java
org
apache
hadoop
contrib
ftp
HdfsUser.java 5KB
HdfsOverFtpSystem.java 1KB
HdfsFileSystemFactory.java 529B
HdfsFileSystemView.java 3KB
HdfsUserManager.java 16KB
HdfsOverFtpServer.java 6KB
HdfsFtpFile.java 11KB
共 32 条
- 1
小贝德罗
- 粉丝: 68
- 资源: 1万+
上传资源 快速赚钱
- 我的内容管理 展开
- 我的资源 快来上传第一个资源
- 我的收益 登录查看自己的收益
- 我的积分 登录查看自己的积分
- 我的C币 登录后查看C币余额
- 我的收藏
- 我的下载
- 下载帮助
安全验证
文档复制为VIP权益,开通VIP直接复制
信息提交成功
评论0