Browse Source

display-server tfs的demo代码提交

Administrator 7 years ago
parent
commit
b8d8da15c5

+ 51 - 0
display-server/sql/init/display_tfs_demo_sql_data.sql

1
/*
2
Navicat MySQL Data Transfer
3

4
Source Server         : 本地
5
Source Server Version : 50540
6
Source Host           : 127.0.0.1:3306
7
Source Database       : display
8

9
Target Server Type    : MYSQL
10
Target Server Version : 50540
11
File Encoding         : 65001
12

13
Date: 2017-12-21 15:49:51
14
*/
15

16
SET FOREIGN_KEY_CHECKS=0;
17

18
-- ----------------------------
19
-- Table structure for common_tfs_map
20
-- ----------------------------
21
DROP TABLE IF EXISTS `common_tfs_map`;
22
CREATE TABLE `common_tfs_map` (
23
  `md5` varchar(128) NOT NULL COMMENT '文件的md5唯一编码',
24
  `file_hash_code` varchar(128) NOT NULL COMMENT 'tfs对应的hash值',
25
  PRIMARY KEY (`md5`),
26
  UNIQUE KEY `uinque_index_file_hash_code` (`file_hash_code`) USING BTREE
27
) ENGINE=InnoDB DEFAULT CHARSET=utf8;
28

29
-- ----------------------------
30
-- Records of common_tfs_map
31
-- ----------------------------
32
INSERT INTO `common_tfs_map` VALUES ('47375cb03382346357214684e9010b41', 'T1ktJTByhT1RCvBVdK');
33

34
-- ----------------------------
35
-- Table structure for tab_file
36
-- ----------------------------
37
DROP TABLE IF EXISTS `tab_file`;
38
CREATE TABLE `tab_file` (
39
  `id` int(11) unsigned NOT NULL AUTO_INCREMENT,
40
  `file_name` varchar(255) DEFAULT NULL COMMENT '文件名',
41
  `file_hash_code` varchar(128) NOT NULL COMMENT 'tfs对应的hash值',
42
  `file_path` varchar(255) DEFAULT NULL COMMENT '所在服务器的相对路径',
43
  `create_time` datetime NOT NULL COMMENT '文件上传时间',
44
  `remark` varchar(128) DEFAULT NULL COMMENT '备用',
45
  PRIMARY KEY (`id`)
46
) ENGINE=InnoDB AUTO_INCREMENT=13 DEFAULT CHARSET=utf8;
47

48
-- ----------------------------
49
-- Records of tab_file
50
-- ----------------------------
51
INSERT INTO `tab_file` VALUES ('12', 'product2.jpg', 'T1ktJTByhT1RCvBVdK', null, '2017-05-04 16:01:37', null);

+ 9 - 9
display-server/src/main/java/com/ai/server/bean/UploadDownloadWithTfsBean.java

13
import org.apache.commons.fileupload.servlet.ServletFileUpload;
13
import org.apache.commons.fileupload.servlet.ServletFileUpload;
14
import org.apache.commons.lang3.StringUtils;
14
import org.apache.commons.lang3.StringUtils;
15

15

16
import com.ai.ipu.basic.file.FileUtil;
17
import com.ai.ipu.basic.log.ILogger;
16
import com.ai.ipu.basic.log.ILogger;
18
import com.ai.ipu.basic.log.IpuLoggerFactory;
17
import com.ai.ipu.basic.log.IpuLoggerFactory;
19
import com.ai.ipu.fs.IBaseFs;
18
import com.ai.ipu.fs.IBaseFs;
19
import com.ai.ipu.fs.impl.UniqueCacheFs;
20
import com.ai.ipu.server.servlet.ServletManager;
20
import com.ai.ipu.server.servlet.ServletManager;
21
import com.ai.ipu.server.util.MobileServerException;
21
import com.ai.ipu.server.util.MobileServerException;
22
import com.ai.server.core.bean.IpuAppBean;
22
import com.ai.server.core.bean.IpuAppBean;
23
import com.ai.server.dao.FileDao;
23
import com.ai.server.dao.FileDao;
24
import com.ai.server.tfs.DbFs;
25
import com.ai.server.util.Constant;
24
import com.ai.server.util.Constant;
26
import com.ailk.common.data.IData;
25
import com.ailk.common.data.IData;
27
import com.ailk.common.data.IDataset;
26
import com.ailk.common.data.IDataset;
31

30

32
    transient protected static final ILogger log = IpuLoggerFactory.createLogger(UploadDownloadWithTfsBean.class);
31
    transient protected static final ILogger log = IpuLoggerFactory.createLogger(UploadDownloadWithTfsBean.class);
33

32

34
    private static final String IPU_FILE = "ipu_file";
33
    /*private static final String IPU_FILE = "ipu_file";*/
35

34

36
    /**
35
    /**
37
     * 将流文件上传至tfs服务端,且记录数据库
36
     * 将流文件上传至tfs服务端,且记录数据库
65

64

66
            // 2.将流文件存放tfs服务端,并返回存放信息
65
            // 2.将流文件存放tfs服务端,并返回存放信息
67
            String fileHashCode = null;
66
            String fileHashCode = null;
68
            IBaseFs fs = new DbFs();
67
            IBaseFs fs = new UniqueCacheFs();
69
            fis = (FileInputStream) is;
68
            fis = (FileInputStream) is;
70
            fileHashCode = fs.saveFile(fis);
69
            fileHashCode = fs.saveFile(fis);
71
            log.debug("fileName=[" + fileName + "] 上传tfs成功,返回值fileHashCode=[" + fileHashCode + "]");
70
            log.debug("fileName=[" + fileName + "] 上传tfs成功,返回值fileHashCode=[" + fileHashCode + "]");
123
        // 1.找到fileHashCode值
122
        // 1.找到fileHashCode值
124
        FileDao dao = new FileDao(Constant.CONNNAME);
123
        FileDao dao = new FileDao(Constant.CONNNAME);
125
        IData fileObj = dao.queryFileListById(param);
124
        IData fileObj = dao.queryFileListById(param);
126
        String fileName = fileObj.getString("file_name");
125
        /*String fileName = fileObj.getString("file_name");*/
127
        if (null == fileObj || null == fileObj.getString("file_hash_code") || "".equals(fileObj.getString("file_hash_code"))) {
126
        if (null == fileObj || null == fileObj.getString("file_hash_code") || "".equals(fileObj.getString("file_hash_code"))) {
128
            throw new MobileServerException("数据库未找到记录。");
127
            throw new MobileServerException("数据库未找到记录。");
129
        }
128
        }
130
        String fileHashCode = fileObj.getString("file_hash_code");
129
        String fileHashCode = fileObj.getString("file_hash_code");
131

130

132
        // 2.根据fileHashCode从tfs获取,并转化为本地文件
131
        // 2.根据fileHashCode从tfs获取,并转化为本地文件
133
        IBaseFs fs = new DbFs();
134
        String path = FileUtil.connectFilePath(System.getProperty("catalina.base"), IPU_FILE, fileName);// 相对路径,转为输入流传到客户端app
132
        IBaseFs fs = new UniqueCacheFs();
133
        /*String path = FileUtil.connectFilePath(System.getProperty("catalina.base"), IPU_FILE, fileName);// 相对路径,转为输入流传到客户端app
135
        boolean result = fs.takeFile(fileHashCode, path);
134
        boolean result = fs.takeFile(fileHashCode, path);
136

135

137
        if (result) {
136
        if (result) {
138
            return new FileInputStream(path);
137
            return new FileInputStream(path);
139
        }
138
        }
140
        return null;
139
        return null;*/
140
        return fs.takeFile(fileHashCode);
141
    }
141
    }
142

142

143
    public IData deletePic(IData param) throws Exception {
143
    public IData deletePic(IData param) throws Exception {
153
            throw new MobileServerException("数据库未找到记录。");
153
            throw new MobileServerException("数据库未找到记录。");
154
        }
154
        }
155
        String fileHashCode = fileObj.getString("file_hash_code");
155
        String fileHashCode = fileObj.getString("file_hash_code");
156
        IBaseFs fs = new DbFs();
156
        IBaseFs fs = new UniqueCacheFs();
157
        boolean deleteResult = fs.deleteFile(fileHashCode);
157
        boolean deleteResult = fs.deleteFile(fileHashCode);
158
        if (!deleteResult) {
158
        if (!deleteResult) {
159
            throw new MobileServerException("tfs删除资源失败。");
159
            throw new MobileServerException("tfs删除资源失败。");

+ 5 - 5
display-server/src/main/java/com/ai/server/dao/TfsMapDao.java

10
        super(connName);
10
        super(connName);
11
    }
11
    }
12

12

13
    public IData queryTfsMapListByFileHashCode(IData param) throws Exception {
13
    /*public IData queryTfsMapListByFileHashCode(IData param) throws Exception {
14
        String sql = "SELECT * FROM common_tfs_map WHERE file_hash_code=:file_hash_code";
14
        String sql = "SELECT * FROM common_tfs_map WHERE file_hash_code=:file_hash_code";
15
        return this.queryList(sql, param).first();
15
        return this.queryList(sql, param).first();
16
    }
16
    }*/
17
    
17
    
18
    public IDataset queryTfsMapTfsListByMd5(IData param) throws Exception {
18
    public IData queryTfsMapTfsByMd5(IData param) throws Exception {
19
        String sql = "SELECT * FROM common_tfs_map WHERE md5=:md5";
19
        String sql = "SELECT * FROM common_tfs_map WHERE md5=:md5";
20
        return this.queryList(sql, param);
20
        return this.queryList(sql, param).first();
21
    }
21
    }
22

22

23
    public void insertTfsMap(IData param) throws Exception {
23
    public void insertTfsMap(IData param) throws Exception {
24
        String sql = "INSERT INTO common_tfs_map (file_hash_code, md5) VALUES(:file_hash_code, :md5)";
24
        String sql = "INSERT INTO common_tfs_map (md5, file_hash_code) VALUES(:md5, :file_hash_code)";
25
        this.executeUpdate(sql, param);
25
        this.executeUpdate(sql, param);
26
    }
26
    }
27

27


+ 8 - 20
display-server/src/main/java/com/ai/server/tfs/DbFsUnique.java

6
import com.ai.server.dao.TfsMapDao;
6
import com.ai.server.dao.TfsMapDao;
7
import com.ai.server.util.Constant;
7
import com.ai.server.util.Constant;
8
import com.ailk.common.data.IData;
8
import com.ailk.common.data.IData;
9
import com.ailk.common.data.IDataset;
10
import com.ailk.common.data.impl.DataMap;
9
import com.ailk.common.data.impl.DataMap;
11

10

12
/**
11
/**
21
     */
20
     */
22
    @Override
21
    @Override
23
	public boolean checkExists(String md5) {
22
	public boolean checkExists(String md5) {
24
		String fileHashCode = null;
25
		try {
26
			TfsMapDao dao = new TfsMapDao(Constant.CONNNAME);
27
			IData param = new DataMap();
28
			param.put("md5", md5);
29
			IDataset fileTfsList = dao.queryTfsMapTfsListByMd5(param);
30
			if (fileTfsList != null && !fileTfsList.isEmpty()) {
31
				fileHashCode = fileTfsList.getData(0).getString("file_hash_code");
32
			}
33
		} catch (Exception e) {
34
			e.printStackTrace();
35
		}
36
		return fileHashCode != null;
23
    	String fileKey = getFileKey(md5);
24
		return fileKey != null;
37
	}
25
	}
38

26

39
    /*
27
    /*
44
        try {
32
        try {
45
            TfsMapDao dao = new TfsMapDao(Constant.CONNNAME);
33
            TfsMapDao dao = new TfsMapDao(Constant.CONNNAME);
46
            IData param = new DataMap();
34
            IData param = new DataMap();
47
            param.put("file_hash_code", fileHashCode);
48
            param.put("md5", md5);
35
            param.put("md5", md5);
49
            IData tfsMapObj = dao.queryTfsMapListByFileHashCode(param);
36
            param.put("file_hash_code", fileHashCode);
37
            IData tfsMapObj = dao.queryTfsMapTfsByMd5(param);
50
            if (null == tfsMapObj) {
38
            if (null == tfsMapObj) {
51
                log.debug("之前未发现记录,插入数据库。");
39
                log.debug("之前未发现记录,插入数据库。");
52
                dao.insertTfsMap(param);
40
                dao.insertTfsMap(param);
53
            } else {
41
            } else {
54
                if (md5.equals(tfsMapObj.getString("md5"))) {
42
                if (fileHashCode.equals(tfsMapObj.getString("file_hash_code"))) {
55
                    log.debug("发现对应记录且相同。");
43
                    log.debug("发现对应记录且相同。");
56
                } else {
44
                } else {
57
                    dao.updateTfsMap(param);
45
                    dao.updateTfsMap(param);
83
    }
71
    }
84

72

85
	@Override
73
	@Override
86
	public String getFileKey(String uniqueId) {
74
	public String getFileKey(String md5) {
87
		// TODO Auto-generated method stub
75
		// TODO Auto-generated method stub
88
		String fileHashCode = null;
76
		String fileHashCode = null;
89
        try {
77
        try {
90
            TfsMapDao dao = new TfsMapDao(Constant.CONNNAME);
78
            TfsMapDao dao = new TfsMapDao(Constant.CONNNAME);
91
            IData param = new DataMap();
79
            IData param = new DataMap();
92
            param.put("md5", uniqueId);
93
            IData tfsMapObj = dao.queryTfsMapListByFileHashCode(param);
80
            param.put("md5", md5);
81
            IData tfsMapObj = dao.queryTfsMapTfsByMd5(param);
94
            if (tfsMapObj != null) {
82
            if (tfsMapObj != null) {
95
            	fileHashCode = tfsMapObj.getString("file_hash_code");
83
            	fileHashCode = tfsMapObj.getString("file_hash_code");
96
            }
84
            }

+ 3 - 1
display-server/src/main/resources/tfs.xml

15
	<!-- tfs本地文件存放的根目录 -->
15
	<!-- tfs本地文件存放的根目录 -->
16
	<config name="uploadFileHome" value="/home/ipuapp/web/webapps/display" />
16
	<config name="uploadFileHome" value="/home/ipuapp/web/webapps/display" />
17
	<!-- 是否开启本地文件缓存。缺省为true,开启。不想开启需要设置为false -->
17
	<!-- 是否开启本地文件缓存。缺省为true,开启。不想开启需要设置为false -->
18
	<config name="localFileCached" value="false" />
18
	<config name="localFileCache" value="false" />
19
	<!--  -->
20
	<config name="fsUniqueClass" value="com.ai.server.tfs.DbFsUnique"/>
19

21

20
	<!-- 不需要排重功能时,下面配置项可以不设置 -->
22
	<!-- 不需要排重功能时,下面配置项可以不设置 -->
21
	<!-- tair排重数据库的serverlist -->
23
	<!-- tair排重数据库的serverlist -->

+ 10 - 4
display-server/src/main/webapp/res/js/mobile/expand-mobile.js

130
			},getMemoryCache:function(callback,key,defValue,err){
130
			},getMemoryCache:function(callback,key,defValue,err){
131
				storageCallback("getMemoryCache",callback);
131
				storageCallback("getMemoryCache",callback);
132
				execute("getMemoryCache",[key,defValue],err);
132
				execute("getMemoryCache",[key,defValue],err);
133
			},setOfflineCache:function(key,value,err){
134
				execute("setOfflineCache", [key,value],err);
135
			},getOfflineCache:function(callback,key,defValue,err){
133
			},setOfflineCache:function(key,value,isEncrypt,err){
134
				if(isEncrypt==undefined){
135
					isEncrypt = false;
136
				}
137
				execute("setOfflineCache", [key,value,isEncrypt],err);
138
			},getOfflineCache:function(callback,key,defValue,isEncrypt,err){
139
				if(isEncrypt==undefined){
140
					isEncrypt = false;
141
				}
136
				storageCallback("getOfflineCache",callback);
142
				storageCallback("getOfflineCache",callback);
137
				return execute("getOfflineCache", [key,defValue],err);
143
				return execute("getOfflineCache", [key,defValue,isEncrypt],err);
138
			},removeOfflineCache:function(key,err){
144
			},removeOfflineCache:function(key,err){
139
				execute("removeOfflineCache", [key],err);
145
				execute("removeOfflineCache", [key],err);
140
			},clearOfflineCache:function(err){
146
			},clearOfflineCache:function(err){