Selaa lähdekoodia

tfsdemo服务端代码提交

chengwb3 8 vuotta sitten
vanhempi
commit
a930435608

+ 104 - 101
display-server/src/main/java/com/ai/server/bean/UploadDownloadWithTfsBean.java

@ -1,173 +1,176 @@
1 1
package com.ai.server.bean;
2 2

3 3
import java.io.FileInputStream;
4
import java.io.IOException;
5 4
import java.io.InputStream;
5
import java.util.Date;
6 6
import java.util.Iterator;
7 7
import java.util.List;
8
import java.util.Map;
9
import java.util.Set;
10 8

11 9
import javax.servlet.http.HttpServletRequest;
12 10

13 11
import org.apache.commons.fileupload.FileItem;
14 12
import org.apache.commons.fileupload.disk.DiskFileItemFactory;
15 13
import org.apache.commons.fileupload.servlet.ServletFileUpload;
14
import org.apache.commons.lang3.StringUtils;
16 15

17 16
import com.ai.ipu.basic.file.FileUtil;
18
import com.ai.ipu.basic.file.PropertiesHelper;
17
import com.ai.ipu.basic.log.ILogger;
18
import com.ai.ipu.basic.log.IpuLoggerFactory;
19 19
import com.ai.ipu.fs.IBaseFs;
20
import com.ai.ipu.fs.impl.UniqueCacheFs;
21
import com.ai.ipu.fs.util.FsFactory;
22 20
import com.ai.ipu.server.servlet.ServletManager;
23
import com.ai.ipu.server.util.MobileUtility;
21
import com.ai.ipu.server.util.MobileServerException;
24 22
import com.ai.server.core.bean.IpuAppBean;
23
import com.ai.server.dao.FileDao;
24
import com.ai.server.util.Constant;
25
import com.ai.tfs.DbFs;
25 26
import com.ailk.common.data.IData;
26 27
import com.ailk.common.data.IDataset;
27 28
import com.ailk.common.data.impl.DataMap;
28
import com.ailk.common.data.impl.DatasetList;
29 29

30 30
public class UploadDownloadWithTfsBean extends IpuAppBean {
31 31

32
    private final String path = this.getClass().getClassLoader().getResource("tfsdb.properties").getPath();
32
    transient protected static final ILogger log = IpuLoggerFactory.createLogger(UploadDownloadWithTfsBean.class);
33

34
    private static final String IPU_FILE = "ipu_file";
33 35

34 36
    /**
35
     * 将流文件上传至tfs服务端,且修改tfsdb.properties文件
37
     * 将流文件上传至tfs服务端,且记录数据库
36 38
     * 
37 39
     * @param param
38 40
     * @return
39 41
     * @throws Exception
40 42
     */
41 43
    public IData uploadPic(IData param) throws Exception {
42
        String fileName = param.getString("fileName");
43
        // 1.从客户端接受流文件
44
        DiskFileItemFactory factory = new DiskFileItemFactory(); // 获得磁盘文件条目工厂
45
        ServletFileUpload upload = new ServletFileUpload(factory);
46
        HttpServletRequest request = ServletManager.getRequest();
47
        if (!ServletFileUpload.isMultipartContent(request)) {
48
            MobileUtility.error("没有检测到文件,请重新提交!");
49
        }
50
        List<FileItem> fileList = upload.parseRequest(request);
51
        Iterator<FileItem> it = fileList.iterator();
44
        IData result = new DataMap();
52 45
        InputStream is = null;
53
        while (it.hasNext()) {
54
            FileItem item = it.next();
55
            if (!item.isFormField()) {
56
                is = item.getInputStream();
57
                break;
58
            }
59
        }
60

61
        // 2.将流文件存放tfs服务端,并返回存放信息
62
        String fileHashCode = null;
63 46
        FileInputStream fis = null;
64 47
        try {
65
            UniqueCacheFs ucfs = new UniqueCacheFs();
48
            String fileName = param.getString("fileName");
49
            // 1.从客户端接受流文件
50
            DiskFileItemFactory factory = new DiskFileItemFactory(); // 获得磁盘文件条目工厂
51
            ServletFileUpload upload = new ServletFileUpload(factory);
52
            HttpServletRequest request = ServletManager.getRequest();
53
            if (!ServletFileUpload.isMultipartContent(request)) {
54
                throw new MobileServerException("没有检测到文件,请重新提交!");
55
            }
56
            List<FileItem> fileList = upload.parseRequest(request);
57
            Iterator<FileItem> it = fileList.iterator();
58
            while (it.hasNext()) {
59
                FileItem item = it.next();
60
                if (!item.isFormField()) {
61
                    is = item.getInputStream();
62
                    break;
63
                }
64
            }
65

66
            // 2.将流文件存放tfs服务端,并返回存放信息
67
            String fileHashCode = null;
68
            IBaseFs fs = new DbFs();
66 69
            fis = (FileInputStream) is;
67
            fileHashCode = ucfs.saveFile(fis);
70
            fileHashCode = fs.saveFile(fis);
71
            log.debug("fileName=[" + fileName + "] 上传tfs成功,返回值fileHashCode=[" + fileHashCode + "]");
72

73
            // 3.业务数据保存。无就插入;有不必插入
74
            if (fileHashCode != null) {
75
                FileDao dao = new FileDao(Constant.CONNNAME);
76
                IData fileParam = new DataMap();
77
                fileParam.put("file_name", fileName);
78
                fileParam.put("file_hash_code", fileHashCode);
79
                // fileParam.put("file_path", null);
80
                fileParam.put("create_time", new Date());
81
                IDataset list = dao.queryFileListByFileHashCode(fileParam);
82
                if (list == null || list.isEmpty()) {
83
                    dao.insertFile(fileParam);
84
                    log.debug("fileHashCode=[" + fileHashCode + "] 未经存在,插入业务数据库表tab_file。");
85
                } else {
86
                    log.debug("fileHashCode=[" + fileHashCode + "] 已经存在,跳过插入业务数据库表tab_file步骤。");
87
                }
88
            }
89

90
            result.put("flag", "success");
68 91
        }
69 92
        catch (Exception e) {
70
            // TODO Auto-generated catch block
71
            e.printStackTrace();
93
            log.error(e.getMessage());
94
            result.put("flag", "fail");
72 95
        }
73 96
        finally {
74 97
            if (fis != null) {
75
                fis.close();
98
                try {
99
                    fis.close();
100
                }
101
                catch (Exception e) {
102
                    log.error(e.getMessage());
103
                }
76 104
            }
77 105
            if (is != null) {
78
                is.close();
106
                try {
107
                    is.close();
108
                }
109
                catch (Exception e) {
110
                    log.error(e.getMessage());
111
                }
79 112
            }
80 113
        }
81

82
        // 3.将信息记录数据库,或tfsdb.properties文件
83
        PropertiesHelper pro = new PropertiesHelper(path);
84
        String value = fileName + "," + fileHashCode + "," + "123";
85
        Set<Object> keySet = pro.keySet();
86
        String key = getNextKey(keySet);
87
        pro.put(key, value);
88

89
        IData result = new DataMap();
90
        result.put("flag", "success");
91 114
        return result;
92 115
    }
93 116

94
    private String getNextKey(Set<Object> keySet) {
95
        int maxNum = 0;
96
        for (Object key : keySet) {
97
            maxNum = Integer.valueOf((String) key) > maxNum ? Integer.valueOf((String) key) : maxNum;
98
        }
99
        return (maxNum + 1) + "";
100
    }
101

102 117
    public InputStream downloadPic(IData param) throws Exception {
103 118
        String id = param.getString("id");
104
        // 1.遍历tfsdb.properties文件,找到fileHashCode值
105
        PropertiesHelper pro = new PropertiesHelper(path);
106
        String value = pro.getProperty(id);
107
        String[] arr = value.split(",");
108
        if (arr.length != 3) {
109
            MobileUtility.error("读取tfsdb.properties格式有误。");
119
        if (StringUtils.isEmpty(id)) {
120
            throw new MobileServerException("参数id缺失。");
121
        }
122

123
        // 1.找到fileHashCode值
124
        FileDao dao = new FileDao(Constant.CONNNAME);
125
        IData fileObj = dao.queryFileListById(param);
126
        String fileName = fileObj.getString("file_name");
127
        if (null == fileObj || null == fileObj.getString("file_hash_code") || "".equals(fileObj.getString("file_hash_code"))) {
128
            throw new MobileServerException("数据库未找到记录。");
110 129
        }
111
        String fileName = arr[0];
112
        String fileHashCode = arr[1];
113
        String md5 = arr[2];
130
        String fileHashCode = fileObj.getString("file_hash_code");
114 131

115 132
        // 2.根据fileHashCode从tfs获取,并转化为本地文件
116
        IBaseFs fs = FsFactory.getUniqueCacheFs();
117
        String localFileName = "temp/"+fileName;
118
        boolean result = fs.takeFile(fileHashCode, localFileName);
119
        
120
        if(result){
121
            return new FileInputStream(FileUtil.connectFilePath("c:/", localFileName));
133
        IBaseFs fs = new DbFs();
134
        String path = FileUtil.connectFilePath(System.getProperty("catalina.base"), IPU_FILE, fileName);// 相对路径,转为输入流传到客户端app
135
        boolean result = fs.takeFile(fileHashCode, path);
136

137
        if (result) {
138
            return new FileInputStream(path);
122 139
        }
123 140
        return null;
124 141
    }
125 142

126 143
    public IData deletePic(IData param) throws Exception {
127 144
        String id = param.getString("id");
145
        if (StringUtils.isEmpty(id)) {
146
            throw new MobileServerException("参数id缺失。");
147
        }
128 148

129 149
        // 1.删除tfs服务端资源
130
        PropertiesHelper pro = new PropertiesHelper(path);
131
        String value = pro.getProperty(id);
132
        String[] arr = value.split(",");
133
        if (arr.length != 3) {
134
            MobileUtility.error("读取tfsdb.properties格式有误。");
150
        FileDao dao = new FileDao(Constant.CONNNAME);
151
        IData fileObj = dao.queryFileListById(param);
152
        if (null == fileObj || null == fileObj.getString("file_hash_code") || "".equals(fileObj.getString("file_hash_code"))) {
153
            throw new MobileServerException("数据库未找到记录。");
154
        }
155
        String fileHashCode = fileObj.getString("file_hash_code");
156
        IBaseFs fs = new DbFs();
157
        boolean deleteResult = fs.deleteFile(fileHashCode);
158
        if (!deleteResult) {
159
            throw new MobileServerException("tfs删除资源失败。");
135 160
        }
136
        String fileName = arr[0];
137
        String fileHashCode = arr[1];
138
        String md5 = arr[2];
139
        UniqueCacheFs ucfs = new UniqueCacheFs();
140
        ucfs.deleteFile(fileHashCode);
141 161

142
        // 2.删除tfsdb.properties一条记录
143
        pro.remove(id);
162
        // 2.删除数据库记录
163
        dao.deleteFileById(param);
144 164

145 165
        IData result = new DataMap();
146 166
        result.put("flag", "success");
147 167
        return result;
148 168
    }
149 169

150
    public IData getPictureList(IData param) throws IOException {
170
    public IData getPictureList(IData param) throws Exception {
151 171
        IData result = new DataMap();
152
        IDataset list = new DatasetList();
153
        IData obj = new DataMap();
154
        PropertiesHelper pro = new PropertiesHelper(path);
155
        Map<String, ?> map = pro.getProMap();
156
        Set<String> keySet = map.keySet();
157
        String value;
158
        for (String key : keySet) {
159
            obj = new DataMap();
160
            obj.put("id", key);
161
            value = (String) map.get(key);
162
            String[] arr = value.split(",");
163
            if (arr.length != 3) {
164
                MobileUtility.error("tfsdb.properties配置有误。");
165
            }
166
            obj.put("fileName", arr[0]);
167
            obj.put("fileHashCode", arr[1]);
168
            obj.put("fileMD5", arr[2]);
169
            list.add(obj);
170
        }
172
        FileDao dao = new FileDao(Constant.CONNNAME);
173
        IDataset list = dao.queryFileList(null);
171 174
        result.put("infoList", list);
172 175
        return result;
173 176
    }

+ 2 - 0
display-server/src/main/java/com/ai/server/util/Constant.java

@ -1,6 +1,8 @@
1 1
package com.ai.server.util;
2 2

3 3
public class Constant {
4
    public static final String CONNNAME = "display";
5
    
4 6
    public static final String SUCCESS = "success";
5 7
    
6 8
	public static class Cache{

+ 11 - 0
display-server/src/main/resources/database.xml

@ -16,4 +16,15 @@
16 16
		maxIdle="5"
17 17
		maxWait="1000"
18 18
		/>
19
	<display
20
		type="dbcp"
21
		driver="com.mysql.jdbc.Driver"
22
		url="jdbc:mysql://123.57.35.51:3307/display?useUnicode=true"
23
        user="ipu"
24
        passwd="ipumysql"
25
		initialSize="5"
26
		maxActive="20"
27
		maxIdle="5"
28
		maxWait="1000"
29
		/>
19 30
</database>

+ 7 - 8
display-server/src/main/resources/memcache.xml

@ -3,26 +3,25 @@
3 3
<memcache>
4 4
    <default-datacenter>center1</default-datacenter>
5 5
    <datacenter name="center1" >
6
        <!-- codecode查询缓存 -->
7
        <cluster name="codecode_cache">
6
        <!-- 单点登录缓存 -->
7
        <cluster name="SSN_CACHE">
8 8
            <heartbeat-second>2</heartbeat-second>
9 9
            <pool-size>16</pool-size>
10
            <address master="10.1.31.40:10001"/>
10
            <address master="123.57.35.51:10001"/>
11 11
        </cluster>
12 12
        
13
        <!-- 单点登录缓存 -->
14
        <cluster name="SSN_CACHE">
13
        <!-- codecode查询缓存 -->
14
        <cluster name="codecode_cache">
15 15
            <heartbeat-second>2</heartbeat-second>
16 16
            <pool-size>16</pool-size>
17
            <address master="10.1.31.40:10002"/>
17
            <address master="123.57.35.51:10002"/>
18 18
        </cluster>
19 19
        
20 20
        <!-- tfs缓存 -->
21 21
        <cluster name="tfs_cache">
22 22
            <heartbeat-second>2</heartbeat-second>
23 23
            <pool-size>16</pool-size>
24
            <address master="114.215.100.48:11001"/>
25
            <address master="114.215.100.48:11002"/>
24
            <address master="123.57.35.51:10003"/>
26 25
        </cluster>
27 26
    </datacenter>
28 27
</memcache>

+ 3 - 3
display-server/src/main/resources/tfs.xml

@ -1,7 +1,7 @@
1 1
<?xml version="1.0" encoding="UTF-8"?>
2 2
<configs>
3 3
      <!-- 整个进程中系统最多等待多少个请求,取决于你有多少个线程并发的请求TFS -->
4
       <config name="maxWaitThread" value="100"/>
4
       <config name="maxWaitThread" value="1000"/>
5 5
      <!-- 单个请求最大的等待时间(ms) 超过这个时间放弃这次请求-->
6 6
      <config name="timeout" value="20000"/>
7 7
      <!-- Tfs master nameserver ip address -->
@ -9,11 +9,11 @@
9 9
      <!-- TFS在读取文件的时候会缓存block所在的数据服务器ip,这个参数配置了最多缓存的记录个数!-->
10 10
      <config name="maxCacheItemCount" value="10000"/>
11 11
      <!-- 上一项缓存最大有效的时间(ms)!-->
12
      <config name="maxCacheTime" value="5000"/>
12
      <config name="maxCacheTime" value="50000"/>
13 13
      <!-- tfs保存失败后最大重试次数-->
14 14
      <config name="maxTryNum" value="3"/>
15 15
      <!-- tfs本地文件存放的根目录 -->
16
      <config name="uploadFileHome" value="c:/"/>
16
      <config name="uploadFileHome" value="/home/ipuapp/web/webapps/display"/>
17 17
      <!-- 是否开启本地文件缓存。缺省为true,开启。不想开启需要设置为false -->
18 18
      <config name="localFileCached" value="false"/>
19 19
      

+ 1 - 1
display-server/src/main/webapp/biz/js/more/scene/TfsDemo.js

@ -1,7 +1,7 @@
1 1
require(["mobile","common","hammer","handlebars","wadeMobile","util"],function(Mobile,Common,Hammer,Handlebars,WadeMobile){	
2 2
	
3 3
	var maxNum = "3";//允许上传的最大数量
4
	var localPicRelativePath = "picture/my2.png";//被查看的图片的本地路径
4
	var localPicRelativePath = "picture/tfsFile.png";//被查看的图片的本地路径
5 5
	
6 6
	initData();
7 7
	

+ 1 - 1
display-server/src/main/webapp/template/webapp/more/scene/TfsDemo.html

@ -38,7 +38,7 @@
38 38
{{#each infoList}}
39 39
<li>
40 40
	<div class="content">
41
		<div class="main">{{fileName}}</div>
41
		<div class="main">{{file_name}}</div>
42 42
		<div class="side">
43 43
			<div class="link" id="download" data-id="{{id}}">查看</div>
44 44
		</div>