Quellcode durchsuchen

tfsdemo代码补提交

chengwb3 vor 8 Jahren
Ursprung
Commit
3e7a0f1e18

+ 42 - 0
display-server/src/main/java/com/ai/server/dao/FileDao.java

@ -0,0 +1,42 @@
1
package com.ai.server.dao;
2

3
import com.ai.ipu.server.db.dao.impl.BaseDAO;
4
import com.ailk.common.data.IData;
5
import com.ailk.common.data.IDataset;
6

7
public class FileDao extends BaseDAO {
8

9
    public FileDao(String connName) throws Exception {
10
        super(connName);
11
    }
12

13
    public IDataset queryFileList(IData param) throws Exception {
14
        String sql = "SELECT * FROM tab_file ORDER BY create_time DESC";
15
        return this.queryList(sql, param);
16
    }
17

18
    public IDataset queryFileListByFileHashCode(IData param) throws Exception {
19
        String sql = "SELECT * FROM tab_file WHERE file_hash_code=:file_hash_code";
20
        return this.queryList(sql, param);
21
    }
22
    
23
    public IData queryFileListById(IData param) throws Exception {
24
        String sql = "SELECT * FROM tab_file WHERE id=:id";
25
        return this.queryList(sql, param).first();
26
    }
27

28
    public void insertFile(IData param) throws Exception {
29
        String sql = "INSERT INTO tab_file (file_name, file_hash_code, file_path, create_time, remark) VALUES(:file_name, :file_hash_code, :file_path, :create_time, :remark)";
30
        this.executeUpdate(sql, param);
31
    }
32

33
    public void deleteFileByFileHashCode(IData param) throws Exception {
34
        String sql = "DELETE FROM tab_file WHERE file_hash_code=:file_hash_code";
35
        this.executeUpdate(sql, param);
36
    }
37
    
38
    public void deleteFileById(IData param) throws Exception {
39
        String sql = "DELETE FROM tab_file WHERE id=:id";
40
        this.executeUpdate(sql, param);
41
    }
42
}

+ 37 - 0
display-server/src/main/java/com/ai/server/dao/TfsMapDao.java

@ -0,0 +1,37 @@
1
package com.ai.server.dao;
2

3
import com.ai.ipu.server.db.dao.impl.BaseDAO;
4
import com.ailk.common.data.IData;
5
import com.ailk.common.data.IDataset;
6

7
public class TfsMapDao extends BaseDAO {
8

9
    public TfsMapDao(String connName) throws Exception {
10
        super(connName);
11
    }
12

13
    public IData queryTfsMapListByFileHashCode(IData param) throws Exception {
14
        String sql = "SELECT * FROM common_tfs_map WHERE file_hash_code=:file_hash_code";
15
        return this.queryList(sql, param).first();
16
    }
17
    
18
    public IDataset queryTfsMapTfsListByMd5(IData param) throws Exception {
19
        String sql = "SELECT * FROM common_tfs_map WHERE md5=:md5";
20
        return this.queryList(sql, param);
21
    }
22

23
    public void insertTfsMap(IData param) throws Exception {
24
        String sql = "INSERT INTO common_tfs_map (file_hash_code, md5) VALUES(:file_hash_code, :md5)";
25
        this.executeUpdate(sql, param);
26
    }
27

28
    public void deleteTfsMapByFileHashCode(IData param) throws Exception {
29
        String sql = "DELETE FROM common_tfs_map WHERE file_hash_code=:file_hash_code";
30
        this.executeUpdate(sql, param);
31
    }
32
    
33
    public void updateTfsMap(IData param) throws Exception {
34
        String sql = "UPDATE common_tfs_map SET md5=:md5 WHERE file_hash_code=:file_hash_code";
35
        this.executeUpdate(sql, param);
36
    }
37
}

+ 181 - 0
display-server/src/main/java/com/ai/tfs/DbFs.java

@ -0,0 +1,181 @@
1
package com.ai.tfs;
2

3
import java.io.File;
4
import java.io.FileInputStream;
5
import java.io.OutputStream;
6

7
import com.ai.ipu.basic.cipher.MD5;
8
import com.ai.ipu.basic.file.FileUtil;
9
import com.ai.ipu.fs.cache.IFsUnique;
10
import com.ai.ipu.fs.impl.BaseFs;
11
import com.ai.ipu.fs.util.FsConstant;
12
import com.ai.ipu.fs.util.FsFactory;
13

14
/**
15
 * @author chengwb3 以MD5值为key的一系列操作,数据库存放记录
16
 */
17
public class DbFs extends BaseFs {
18

19
    private String fsUniqueClassPath = "com.ai.tfs.DbFsUnique";
20

21
    private IFsUnique fsUnique;
22

23
    public DbFs() throws Exception {
24
        fsUnique = FsFactory.getFsUnique(fsUniqueClassPath);
25
    }
26

27
    public String saveFile(String fileName) throws Exception {
28
        String md5 = MD5.hexDigestByFilePath(getLocalFileName(fileName)); // 计算md5值是为了排重
29
        String tfsFileHashCode = fsUnique.checkExists(md5);
30
        if (tfsFileHashCode != null) {
31
            return tfsFileHashCode;
32
        } else {
33
            tfsFileHashCode = super.saveFile(fileName);
34
            fsUnique.recordUnique(md5, tfsFileHashCode);// 上传排重时使用
35
        }
36
        return tfsFileHashCode;
37
    }
38

39
    @Override
40
    public String saveFile(FileInputStream fis) throws Exception {
41
        String md5 = MD5.hexDigest(fis); // 计算md5值是为了排重
42
        String tfsFileHashCode = fsUnique.checkExists(md5);
43
        if (tfsFileHashCode != null) {
44
            return tfsFileHashCode;
45
        } else {
46
            tfsFileHashCode = super.saveFile(fis);
47
            fsUnique.recordUnique(md5, tfsFileHashCode);// 上传排重时使用
48
        }
49
        return tfsFileHashCode;
50
    }
51

52
    /*
53
     * 通过fileHashCode获取文件,并转化为指定localFileName的文件
54
     * localFileName是绝对路径。例如:/home/ipuapp/web/deploy/8081 + / + ipu_file
55
     */
56
    @Override
57
    public boolean takeFile(String tfsFileHashCode, String localFileName) throws Exception {
58
        String cacheFilePath = FileUtil.connectFilePath(FsConstant.CACHE_DIRECTORY, tfsFileHashCode);
59
        File cacheFile = new File(FileUtil.connectFilePath(tfsUploadFileHome, cacheFilePath));
60

61
        String md5 = DbFsUnique.getMd5ByFileHashCode(tfsFileHashCode);
62
        // 使用缓存文件需要先判断md5值是否相同
63

64
        if (needLocalFileCache && cacheFile.exists() && MD5.hexDigestByFile(cacheFile).equals(md5)) {
65
            FileUtil.initDirectory(localFileName);
66
            FileUtil.writeFile(new FileInputStream(cacheFile), localFileName);
67
            return true;
68
        } else {
69
            /* 没有目录先建目录 */
70
            FileUtil.initDirectory(FileUtil.connectFilePath(tfsUploadFileHome, cacheFilePath));
71
            FileUtil.initDirectory(localFileName);
72

73
            boolean bo = super.takeFile(tfsFileHashCode, cacheFilePath);// 先下载到缓存目录下
74
            if (bo) {
75
                FileUtil.copyFile(FileUtil.connectFilePath(tfsUploadFileHome, cacheFilePath), localFileName);
76
                fsUnique.recordUnique(md5, tfsFileHashCode);
77
            }
78
            return bo;
79
        }
80
    }
81

82
    // public boolean takeFile(String tfsFileHashCode, String localFileName)
83
    // throws Exception {
84
    // String cacheFilePath =
85
    // FileUtil.connectFilePath(FsConstant.CACHE_DIRECTORY, tfsFileHashCode);
86
    // File cacheFile = new File(FileUtil.connectFilePath(tfsUploadFileHome,
87
    // cacheFilePath));
88
    //
89
    // String md5 = DbFsUnique.getMd5ByFileHashCode(tfsFileHashCode);
90
    // // 使用缓存文件需要先判断md5值是否相同
91
    //
92
    // if (needLocalFileCache && cacheFile.exists() &&
93
    // MD5.hexDigestByFile(cacheFile).equals(md5)) {
94
    //
95
    // FileUtil.writeFile(new FileInputStream(cacheFile),
96
    // getLocalFileName(localFileName));
97
    // return true;
98
    // } else {
99
    // /* 没有目录先建目录 */
100
    // FileUtil.initDirectory(FileUtil.connectFilePath(tfsUploadFileHome,
101
    // cacheFilePath));
102
    // FileUtil.initDirectory(FileUtil.connectFilePath(tfsUploadFileHome,
103
    // localFileName));
104
    //
105
    // boolean bo = super.takeFile(tfsFileHashCode, cacheFilePath);// 先下载到缓存目录下
106
    // if (bo) {
107
    // FileUtil.copyFile(FileUtil.connectFilePath(tfsUploadFileHome,
108
    // cacheFilePath),
109
    // FileUtil.connectFilePath(tfsUploadFileHome, localFileName));
110
    // fsUnique.recordUnique(md5, tfsFileHashCode);
111
    // }
112
    // return bo;
113
    // }
114
    // }
115

116
    @Override
117
    public boolean takeFile(String tfsFileHashCode, OutputStream out) throws Exception {
118

119
        String cacheFilePath = FileUtil.connectFilePath(FsConstant.CACHE_DIRECTORY, tfsFileHashCode);
120
        File cacheFile = new File(FileUtil.connectFilePath(tfsUploadFileHome, cacheFilePath));
121

122
        String md5 = DbFsUnique.getMd5ByFileHashCode(tfsFileHashCode);
123
        // 使用缓存文件需要先判断md5值是否相同
124
        if (needLocalFileCache && cacheFile.exists() && MD5.hexDigestByFile(cacheFile).equals(md5)) {
125
            FileUtil.writeFile(new FileInputStream(cacheFile), out);// 是否需要内部关闭out输出流
126
            return true;
127
        } else {
128
            /* 没有目录先建目录 */
129
            FileUtil.initDirectory(FileUtil.connectFilePath(tfsUploadFileHome, cacheFilePath));
130

131
            boolean bo = super.takeFile(tfsFileHashCode, cacheFilePath);// 先下载到缓存目录下
132
            if (bo) {
133
                FileInputStream fis = new FileInputStream(FileUtil.connectFilePath(tfsUploadFileHome, cacheFilePath));
134
                FileUtil.writeFile(fis, out);
135
                fsUnique.recordUnique(md5, tfsFileHashCode);
136
            }
137

138
            return bo;
139
        }
140
    }
141

142
    // public boolean takeFile(String tfsFileHashCode, OutputStream out) throws
143
    // Exception {
144
    //
145
    // String cacheFilePath =
146
    // FileUtil.connectFilePath(FsConstant.CACHE_DIRECTORY, tfsFileHashCode);
147
    // File cacheFile = new File(FileUtil.connectFilePath(tfsUploadFileHome,
148
    // cacheFilePath));
149
    //
150
    // String md5 = DbFsUnique.getMd5ByFileHashCode(tfsFileHashCode);
151
    // // 使用缓存文件需要先判断md5值是否相同
152
    // if (needLocalFileCache && cacheFile.exists() &&
153
    // MD5.hexDigestByFile(cacheFile).equals(md5)) {
154
    // FileUtil.writeFile(new FileInputStream(cacheFile), out);// 是否需要内部关闭out输出流
155
    // return true;
156
    // } else {
157
    // /* 没有目录先建目录 */
158
    // FileUtil.initDirectory(FileUtil.connectFilePath(tfsUploadFileHome,
159
    // cacheFilePath));
160
    //
161
    // boolean bo = super.takeFile(tfsFileHashCode, cacheFilePath);// 先下载到缓存目录下
162
    // if (bo) {
163
    // FileInputStream fis = new
164
    // FileInputStream(FileUtil.connectFilePath(tfsUploadFileHome,
165
    // cacheFilePath));
166
    // FileUtil.writeFile(fis, out);
167
    // fsUnique.recordUnique(md5, tfsFileHashCode);
168
    // }
169
    //
170
    // return bo;
171
    // }
172
    // }
173

174
    public boolean deleteFile(String tfsFileHashCode) throws Exception {
175
        boolean bo = super.deleteFile(tfsFileHashCode);
176
        if (bo) {
177
            fsUnique.cleanUnique(tfsFileHashCode);
178
        }
179
        return bo;
180
    }
181
}

+ 105 - 0
display-server/src/main/java/com/ai/tfs/DbFsUnique.java

@ -0,0 +1,105 @@
1
package com.ai.tfs;
2

3
import com.ai.ipu.basic.log.ILogger;
4
import com.ai.ipu.basic.log.IpuLoggerFactory;
5
import com.ai.ipu.fs.cache.IFsUnique;
6
import com.ai.server.dao.TfsMapDao;
7
import com.ai.server.util.Constant;
8
import com.ailk.common.data.IData;
9
import com.ailk.common.data.IDataset;
10
import com.ailk.common.data.impl.DataMap;
11

12
/**
13
 * @author chengwb3 通过数据库判断唯一性的工具类
14
 */
15
public class DbFsUnique implements IFsUnique {
16

17
    transient protected static final ILogger log = IpuLoggerFactory.createLogger(DbFsUnique.class);
18

19
    /*
20
     * 根据文件的md5,检测tfs是否包含此文件
21
     */
22
    @Override
23
    public String checkExists(String md5) {
24
        String fileHashCode = null;
25
        try {
26
            TfsMapDao dao = new TfsMapDao(Constant.CONNNAME);
27
            IData param = new DataMap();
28
            param.put("md5", md5);
29
            IDataset fileTfsList = dao.queryTfsMapTfsListByMd5(param);
30
            if (fileTfsList != null && !fileTfsList.isEmpty()) {
31
                fileHashCode = fileTfsList.getData(0).getString("file_hash_code");
32
            }
33
        }
34
        catch (Exception e) {
35
            e.printStackTrace();
36
            return fileHashCode;
37
        }
38
        return fileHashCode;
39
    }
40

41
    /*
42
     * 记录数据唯一性
43
     */
44
    @Override
45
    public void recordUnique(String md5, String fileHashCode) {
46
        try {
47
            TfsMapDao dao = new TfsMapDao(Constant.CONNNAME);
48
            IData param = new DataMap();
49
            param.put("file_hash_code", fileHashCode);
50
            param.put("md5", md5);
51
            IData tfsMapObj = dao.queryTfsMapListByFileHashCode(param);
52
            if (null == tfsMapObj) {
53
                log.debug("之前未发现记录,插入数据库。");
54
                dao.insertTfsMap(param);
55
            } else {
56
                if (md5.equals(tfsMapObj.getString("md5"))) {
57
                    log.debug("发现对应记录且相同。");
58
                } else {
59
                    dao.updateTfsMap(param);
60
                    log.debug("发现对应记录且不相同,更改记录。");
61
                }
62
            }
63
        }
64
        catch (Exception e) {
65
            e.printStackTrace();
66
        }
67
    }
68

69
    /*
70
     * 清楚文件记录信息
71
     */
72
    @Override
73
    public boolean cleanUnique(String fileHashCode) {
74
        try {
75
            TfsMapDao dao = new TfsMapDao(Constant.CONNNAME);
76
            IData param = new DataMap();
77
            param.put("file_hash_code", fileHashCode);
78
            dao.deleteTfsMapByFileHashCode(param);
79
        }
80
        catch (Exception e) {
81
            e.printStackTrace();
82
            return false;
83
        }
84
        return true;
85
    }
86

87
    public static String getMd5ByFileHashCode(String fileHashCode) {
88
        String md5 = null;
89
        try {
90
            TfsMapDao dao = new TfsMapDao(Constant.CONNNAME);
91
            IData param = new DataMap();
92
            param.put("file_hash_code", fileHashCode);
93
            IData tfsMapObj = dao.queryTfsMapListByFileHashCode(param);
94
            if (tfsMapObj != null) {
95
                md5 = tfsMapObj.getString("md5");
96
            }
97
        }
98
        catch (Exception e) {
99
            e.printStackTrace();
100
            return md5;
101
        }
102
        return md5;
103
    }
104

105
}