java读取linux服务器下某文档的内容
【摘要】 java读取linux服务器下某文档的内容
@[TOC]
使用步骤:
==共3步:
第一步:添加依赖
第二步:配置文件
第三步:代码调用==
依赖
<!--java读spark-->
<dependency>
<groupId>ch.ethz.ganymed</groupId>
<artifactId>ganymed-ssh2</artifactId>
<version>build210</version>
</dependency>
<!--java读spark-->
<dependency>
<groupId>com.jcraft</groupId>
<artifactId>jsch</artifactId>
<version>0.1.54</version>
</dependency>
配置文件
# bigdata
usr=root
pwd=geecentos
代码
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.util.Vector;
import ch.ethz.ssh2.Connection;
import ch.ethz.ssh2.SFTPv3Client;
import ch.ethz.ssh2.SFTPv3DirectoryEntry;
import ch.ethz.ssh2.Session;
import ch.ethz.ssh2.StreamGobbler;
import com.geespace.microservices.calculate.execute.engine.bean.pojo.SparkRecord;
import com.geespace.microservices.calculate.execute.engine.dao.SparkRecordMapper;
import com.geespace.microservices.calculate.execute.engine.response.JobSubmitResponse;
import com.geespace.microservices.calculate.execute.engine.response.Msg;
import com.geespace.microservices.calculate.execute.engine.response.Response;
import lombok.extern.slf4j.Slf4j;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.http.ResponseEntity;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional;
import org.springframework.web.client.RestTemplate;
/**
* login 注意:创建远程连接,默认连接端口为22,如果不使用默认,可以使用方法
*
* @param submissionId
* submissionId
* @param ret
* ret
* @author liudz
* @date 2020/5/11
* @return 执行结果
**/
public String login(String submissionId, ResponseEntity<JobSubmitResponse> ret) {
String result = "";
String ip = ret.getBody().getWorkerHostPort().substring(0, ret.getBody().getWorkerHostPort().indexOf(":"));
Connection conn = null;
Session ss = null;
String directory = "/home/spark/work/" + submissionId;
try {
conn = new Connection(ip);
conn.connect();
boolean b = conn.authenticateWithPassword(usr, pwd);
if (!b) {
throw new IOException("Authentication failed.");
} else {
SFTPv3Client sft = new SFTPv3Client(conn);
Vector<?> v = sft.ls(directory);
for (int i = 0; i < v.size(); i++) {
SFTPv3DirectoryEntry s = new SFTPv3DirectoryEntry();
s = (SFTPv3DirectoryEntry) v.get(i);
if ("stdout".equals(s.filename)) {
ss = conn.openSession();
ss.execCommand("cat ".concat("/home/spark/work/" + submissionId + "/" + s.filename));
InputStream is = new StreamGobbler(ss.getStdout());
BufferedReader bs = new BufferedReader(new InputStreamReader(is));
while (true) {
String line = bs.readLine();
if (line == null) {
break;
} else {
result += line + "\n";
}
}
bs.close();
}
}
ss.close();
conn.close();
}
} catch (IOException e) {
log.error("用户%s密码%s登录服务器%s失败!", usr, pwd, ip, "--ERROR--e:" + e.getMessage());
}
System.out.print(result);
return result;
}
JobSubmitResponse
package com.geespace.microservices.calculate.execute.engine.response;
import lombok.Data;
/**
* @author: liudz
* @date: 2020-04-27
*/
@Data
public class JobSubmitResponse {
/**
* 动作名称
*/
private String action;
/**
* 信息
*/
private String message;
/**
* spark版本
*/
private String serverSparkVersion;
/**
* 提交id
*/
private String submissionId;
/**
* 成功与否状态
*/
private String success;
/**
* driver状态
*/
private String driverState;
/**
* workerHostPort
*/
private String workerHostPort;
/**
* workerId
*/
private String workerId;
}
重要信息
【版权声明】本文为华为云社区用户原创内容,转载时必须标注文章的来源(华为云社区)、文章链接、文章作者等基本信息, 否则作者和本社区有权追究责任。如果您发现本社区中有涉嫌抄袭的内容,欢迎发送邮件进行举报,并提供相关证据,一经查实,本社区将立刻删除涉嫌侵权内容,举报邮箱:
cloudbbs@huaweicloud.com
- 点赞
- 收藏
- 关注作者
评论(0)