Java Code Examples for org.apache.commons.codec.digest.DigestUtils#getMd5Digest()

The following examples show how to use org.apache.commons.codec.digest.DigestUtils#getMd5Digest() . You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: ChecksumUtil.java    From arcusplatform with Apache License 2.0 6 votes vote down vote up
public static void updateChecksum(ChangeSet changeSet) {
   if(changeSet.getCommands().isEmpty()) {
      return;
   }

   MessageDigest checksum = DigestUtils.getMd5Digest();

   for(Command command : changeSet.getCommands()) {
      if(command instanceof CQLCommand) {
         DigestUtils.updateDigest(checksum, ((CQLCommand) command).getUpdateCql());
         DigestUtils.updateDigest(checksum, ((CQLCommand) command).getRollbackCql());
      } else if(command instanceof JavaCommand) {
         DigestUtils.updateDigest(checksum, ((JavaCommand) command).getClassName());
      }
   }

   byte[] bytes = checksum.digest();

   changeSet.setChecksum(Hex.encodeHexString(bytes));
}
 
Example 2
Source File: MapperFactory.java    From freehealth-connector with GNU Affero General Public License v3.0 6 votes vote down vote up
public static Mapper getMapper(String... mappingFiles) {
   Set<String> mappingSet = new TreeSet();
   mappingSet.addAll(Arrays.asList(mappingFiles));
   MessageDigest complete = DigestUtils.getMd5Digest();
   Iterator i$ = mappingSet.iterator();

   while(i$.hasNext()) {
      String mapping = (String)i$.next();
      complete.update(mapping.getBytes());
   }

   String key = new String(Base64.encode(complete.digest()));
   if (!cache.containsKey(key)) {
      Map<String, Object> options = new HashMap();
      options.put("be.ehealth.technicalconnector.mapper.configfiles", mappingFiles);

      try {
         cache.put(key, helper.getImplementation(options));
      } catch (TechnicalConnectorException var6) {
         throw new IllegalArgumentException(var6);
      }
   }

   return (Mapper)cache.get(key);
}
 
Example 3
Source File: MapperFactory.java    From freehealth-connector with GNU Affero General Public License v3.0 6 votes vote down vote up
public static Mapper getMapper(String... mappingFiles) {
   Set<String> mappingSet = new TreeSet();
   mappingSet.addAll(Arrays.asList(mappingFiles));
   MessageDigest complete = DigestUtils.getMd5Digest();
   Iterator i$ = mappingSet.iterator();

   while(i$.hasNext()) {
      String mapping = (String)i$.next();
      complete.update(mapping.getBytes());
   }

   String key = new String(Base64.encode(complete.digest()));
   if (!cache.containsKey(key)) {
      Map<String, Object> options = new HashMap();
      options.put("be.ehealth.technicalconnector.mapper.configfiles", mappingFiles);

      try {
         cache.put(key, helper.getImplementation(options));
      } catch (TechnicalConnectorException var6) {
         throw new IllegalArgumentException(var6);
      }
   }

   return (Mapper)cache.get(key);
}
 
Example 4
Source File: MapperFactory.java    From freehealth-connector with GNU Affero General Public License v3.0 6 votes vote down vote up
public static Mapper getMapper(String... mappingFiles) {
   Set<String> mappingSet = new TreeSet();
   mappingSet.addAll(Arrays.asList(mappingFiles));
   MessageDigest complete = DigestUtils.getMd5Digest();
   Iterator i$ = mappingSet.iterator();

   while(i$.hasNext()) {
      String mapping = (String)i$.next();
      complete.update(mapping.getBytes());
   }

   String key = new String(Base64.encode(complete.digest()));
   if (!cache.containsKey(key)) {
      Map<String, Object> options = new HashMap();
      options.put("be.ehealth.technicalconnector.mapper.configfiles", mappingFiles);

      try {
         cache.put(key, helper.getImplementation(options));
      } catch (TechnicalConnectorException var6) {
         throw new IllegalArgumentException(var6);
      }
   }

   return (Mapper)cache.get(key);
}
 
Example 5
Source File: MapperFactory.java    From freehealth-connector with GNU Affero General Public License v3.0 6 votes vote down vote up
public static Mapper getMapper(String... mappingFiles) {
   Set<String> mappingSet = new TreeSet();
   mappingSet.addAll(Arrays.asList(mappingFiles));
   MessageDigest complete = DigestUtils.getMd5Digest();
   Iterator i$ = mappingSet.iterator();

   while(i$.hasNext()) {
      String mapping = (String)i$.next();
      complete.update(mapping.getBytes());
   }

   String key = new String(Base64.encode(complete.digest()));
   if (!cache.containsKey(key)) {
      Map<String, Object> options = new HashMap();
      options.put("be.ehealth.technicalconnector.mapper.configfiles", mappingFiles);

      try {
         cache.put(key, helper.getImplementation(options));
      } catch (TechnicalConnectorException var6) {
         throw new IllegalArgumentException(var6);
      }
   }

   return (Mapper)cache.get(key);
}
 
Example 6
Source File: MapperFactory.java    From freehealth-connector with GNU Affero General Public License v3.0 6 votes vote down vote up
public static Mapper getMapper(String... mappingFiles) {
   Set<String> mappingSet = new TreeSet();
   mappingSet.addAll(Arrays.asList(mappingFiles));
   MessageDigest complete = DigestUtils.getMd5Digest();
   Iterator i$ = mappingSet.iterator();

   while(i$.hasNext()) {
      String mapping = (String)i$.next();
      complete.update(mapping.getBytes());
   }

   String key = new String(Base64.encode(complete.digest()));
   if (!cache.containsKey(key)) {
      Map<String, Object> options = new HashMap();
      options.put("org.taktik.connector.technical.mapper.configfiles", mappingFiles);

      try {
         cache.put(key, helper.getImplementation(options));
      } catch (TechnicalConnectorException var6) {
         throw new IllegalArgumentException(var6);
      }
   }

   return (Mapper)cache.get(key);
}
 
Example 7
Source File: ConnectionInformationFileUtils.java    From rapidminer-studio with GNU Affero General Public License v3.0 6 votes vote down vote up
/**
 * Copies the given files to the specified {@code root/dirName} and creates {@code .md5} hash files for each.
 */
static void copyFilesToZip(Path root, List<Path> filesToCopy, String dirName) throws IOException {
	if (filesToCopy == null || filesToCopy.isEmpty() || dirName == null) {
		return;
	}
	Path path = root.resolve(dirName);
	Files.createDirectory(path);
	MessageDigest md5 = DigestUtils.getMd5Digest();
	for (Path fileToCopy : filesToCopy) {
		Path fileName = fileToCopy.getFileName();
		Path nestedFilePath = path.resolve(fileName.toString());
		try (InputStream is = Files.newInputStream(fileToCopy);
			 DigestInputStream dis = new DigestInputStream(is, md5);
			 BufferedWriter md5Writer = Files.newBufferedWriter(path.resolve(fileName + ConnectionInformationSerializer.MD5_SUFFIX))) {
			Files.copy(dis, nestedFilePath);
			md5Writer.write(Hex.encodeHexString(md5.digest()));
		}
	}
}
 
Example 8
Source File: PluginsZip.java    From gocd with Apache License 2.0 6 votes vote down vote up
public void create() {
    checkFilesAccessibility(bundledPlugins, externalPlugins);
    reset();

    MessageDigest md5Digest = DigestUtils.getMd5Digest();
    try (ZipOutputStream zos = new ZipOutputStream(new DigestOutputStream(new BufferedOutputStream(new FileOutputStream(destZipFile)), md5Digest))) {
        for (GoPluginBundleDescriptor agentPlugins : agentPlugins()) {
            String zipEntryPrefix = "external/";

            if (agentPlugins.isBundledPlugin()) {
                zipEntryPrefix = "bundled/";
            }

            zos.putNextEntry(new ZipEntry(zipEntryPrefix + new File(agentPlugins.bundleJARFileLocation()).getName()));
            Files.copy(new File(agentPlugins.bundleJARFileLocation()).toPath(), zos);
            zos.closeEntry();
        }
    } catch (Exception e) {
        LOG.error("Could not create zip of plugins for agent to download.", e);
    }

    md5DigestOfPlugins = Hex.encodeHexString(md5Digest.digest());
}
 
Example 9
Source File: PasswordUtils.java    From fast-family-master with Apache License 2.0 5 votes vote down vote up
/**
 * 转md5 16进制数据
 *
 * @param data           元数据
 * @param salt           盐
 * @param hashIterations hash次数
 * @return
 */
public static String md5Hex(byte[] data, byte[] salt, int hashIterations) {
    MessageDigest digest = DigestUtils.getMd5Digest();
    if (salt != null) {
        digest.reset();
        digest.update(salt);
    }
    byte[] digestData = digest.digest(data);
    for (int i = 0; i < hashIterations; i++) {
        digest.reset();
        digestData = digest.digest(digestData);
    }
    return String.valueOf(Hex.encode(digestData));
}
 
Example 10
Source File: SuperWriter.java    From Dragonfly with Apache License 2.0 5 votes vote down vote up
@Override
public void run() {
    try {
        MessageDigest pieceM5 = DigestUtils.getMd5Digest();
        int pieceNum;
        ByteBuffer bb, byteBuf = ByteBuffer.allocate(this.pieceSize);
        byteBuf.order(ByteOrder.BIG_ENDIAN);

        while (true) {
            ProtocolContent protocolContent = contQu.poll(waitTime, TimeUnit.SECONDS);
            if (protocolContent == null) {
                logger.warn("taskId:{} get piece timeout", taskId);
                break;
            }
            if (protocolContent.isPieceType()) {
                pieceNum = protocolContent.getPieceNum();
                bb = protocolContent.getContent();

                byteBuf.putInt(bb.limit() | this.pieceSizeBit);
                byteBuf.put(bb);
                byteBuf.put((byte)0x7f);
                bb.clear();
                reusedCache.offer(bb);
                reportPiece(pieceM5, byteBuf, pieceNum);
            } else {
                contQu.put(protocolContent);
                break;
            }
        }
        sucCount.incrementAndGet();
    } catch (Exception e) {
        logger.error("write piece error for taskId:{}", taskId, e);
    } finally {
        downLatch.countDown();
    }
}
 
Example 11
Source File: RepositoryS3.java    From github-bucket with ISC License 5 votes vote down vote up
private boolean walk(Iterator<S3ObjectSummary> iter, ObjectId file, String path) throws IOException {
    byte[] content;
    byte[] newHash;
    LOG.debug("Start processing file: {}", path);
    try (DigestInputStream is = new DigestInputStream(repository.open(file).openStream(), DigestUtils.getMd5Digest())) {
        // Get content
        content = IOUtils.toByteArray(is);
        // Get hash
        newHash = is.getMessageDigest().digest();
    }
    if (isUploadFile(iter, path, Hex.encodeHexString(newHash))) {
        LOG.info("Uploading file: {}", path);
        ObjectMetadata bucketMetadata = new ObjectMetadata();
        bucketMetadata.setContentMD5(Base64.encodeAsString(newHash));
        bucketMetadata.setContentLength(content.length);
        // Give Tika a few hints for the content detection
        Metadata tikaMetadata = new Metadata();
        tikaMetadata.set(Metadata.RESOURCE_NAME_KEY, FilenameUtils.getName(FilenameUtils.normalize(path)));
        // Fire!
        try (InputStream bis = TikaInputStream.get(content, tikaMetadata)) {
            bucketMetadata.setContentType(TIKA_DETECTOR.detect(bis, tikaMetadata).toString());
            s3.putObject(bucket.getName(), path, bis, bucketMetadata);
            return true;
        }
    }
    LOG.info("Skipping file (same checksum): {}", path);
    return false;
}
 
Example 12
Source File: RepositoryS3.java    From github-bucket with ISC License 5 votes vote down vote up
private boolean walk(Iterator<S3ObjectSummary> iter, ObjectId file, String path) throws IOException {
    byte[] content;
    byte[] newHash;
    LOG.debug("Start processing file: {}", path);
    try (DigestInputStream is = new DigestInputStream(repository.open(file).openStream(), DigestUtils.getMd5Digest())) {
        // Get content
        content = IOUtils.toByteArray(is);
        // Get hash
        newHash = is.getMessageDigest().digest();
    }
    if (isUploadFile(iter, path, Hex.encodeHexString(newHash))) {
        LOG.info("Uploading file: {}", path);
        ObjectMetadata bucketMetadata = new ObjectMetadata();
        bucketMetadata.setContentMD5(Base64.encodeAsString(newHash));
        bucketMetadata.setContentLength(content.length);
        // Give Tika a few hints for the content detection
        Metadata tikaMetadata = new Metadata();
        tikaMetadata.set(Metadata.RESOURCE_NAME_KEY, FilenameUtils.getName(FilenameUtils.normalize(path)));
        // Fire!
        try (InputStream bis = TikaInputStream.get(content, tikaMetadata)) {
            bucketMetadata.setContentType(TIKA_DETECTOR.detect(bis, tikaMetadata).toString());
            s3.putObject(bucket.getName(), path, bis, bucketMetadata);
            return true;
        }
    }
    LOG.info("Skipping file (same checksum): {}", path);
    return false;
}
 
Example 13
Source File: CacheDetectorImpl.java    From Dragonfly with Apache License 2.0 4 votes vote down vote up
/**
 * @param breakNum
 * @param cacheResult
 * @param metaData
 * @param task
 */
private void processCacheByChannel(int breakNum, CacheResult cacheResult, FileMetaData metaData,
    Task task) {
    String taskId = task.getTaskId();
    Integer pieceSize = task.getPieceSize();
    try (FileInputStream fis = new FileInputStream(PathUtil.getDownloadPath(taskId).toFile());
        FileChannel fc = fis.getChannel()) {

        List<String> pieceMd5s = new ArrayList<>();
        MessageDigest pieceMd5 = DigestUtils.getMd5Digest();
        MessageDigest fileM5 = cacheResult.getFileM5();
        if (breakNum == -1 && StringUtils.isNotBlank(metaData.getRealMd5())) {
            fileM5 = null;
        }

        ByteBuffer bb = generateByteBuffer();
        String pieceMd5Value;
        long curFileLen = fc.size();
        int curPieceTotal =
            breakNum > 0 ? breakNum : (int)((curFileLen + pieceSize - 1) / pieceSize);
        int pieceHead, pieceLen;

        for (int pieceNum = 0; pieceNum < curPieceTotal; pieceNum++) {
            fc.position(pieceNum * (long)pieceSize);
            bb.limit(Constants.PIECE_HEAD_SIZE);
            fc.read(bb);
            bb.flip();
            pieceHead = bb.getInt();
            pieceLen = pieceHead & 0xffffff;
            bb.limit(pieceLen + Constants.PIECE_WRAP_SIZE);
            fc.read(bb);
            bb.flip();
            pieceMd5.update(bb);
            pieceMd5Value = Hex.encodeHexString(pieceMd5.digest()) + ":" + bb.limit();
            cdnReporter.reportPieceStatus(taskId, pieceNum, pieceMd5Value, PeerPieceStatus.SUCCESS,
                FromType.LOCAL.type());
            pieceMd5s.add(pieceMd5Value);
            pieceMd5.reset();

            if (fileM5 != null) {
                bb.flip();
                bb.limit(bb.limit() - 1);
                bb.position(Constants.PIECE_HEAD_SIZE);
                fileM5.update(bb);

            }
            bb.clear();
        }
        if (breakNum == -1) {
            String fileMd5Value = metaData.getRealMd5();
            if (StringUtils.isBlank(fileMd5Value)) {
                fileMd5Value = Hex.encodeHexString(fileM5.digest());
                fileMetaDataService.updateStatusAndResult(taskId, true, true, fileMd5Value, curFileLen);
            }
            cdnReporter.reportTaskStatus(taskId, CdnStatus.SUCCESS, fileMd5Value, curFileLen,
                FromType.LOCAL.type());
            fileMetaDataService.writePieceMd5(taskId, fileMd5Value, pieceMd5s);
        }
        cacheResult.setStartPieceNum(breakNum);

    } catch (Exception e) {
        throw new RuntimeException("report cache by channel error for taskId:" + taskId, e);
    }
}
 
Example 14
Source File: FdfsFileService.java    From hsweb-framework with Apache License 2.0 4 votes vote down vote up
@Override
    public FileInfoEntity saveFile(InputStream fileStream, String fileName, String type, String creatorId) throws IOException {
//        MetaData createIdMeta = new MetaData("creatorId", creatorId);
        MessageDigest digest = DigestUtils.getMd5Digest();
        String suffix = fileName.contains(".") ?
                fileName.substring(fileName.lastIndexOf(".") + 1, fileName.length()) : "";

        StorePath path;
        int fileSize;
        try (InputStream tmp = new InputStream() {

            @Override
            public int read(byte[] b, int off, int len) throws IOException {
                int r = super.read(b, off, len);
                digest.update(b, off, len);
                return r;
            }

            @Override
            public int read() throws IOException {
                return fileStream.read();
            }

            @Override
            public void close() throws IOException {
                fileStream.close();
                super.close();
            }

            @Override
            public int available() throws IOException {
                return fileStream.available();
            }
        }) {
            path = fastFileStorageClient.uploadFile(tmp, fileSize = tmp.available(), suffix, new HashSet<>());
        }
        String md5 = Hex.encodeHexString(digest.digest());
        FileInfoEntity fileInfo = fileInfoService.createEntity();
        fileInfo.setLocation(path.getFullPath());
        fileInfo.setMd5(md5);
        fileInfo.setStatus(DataStatus.STATUS_ENABLED);
        fileInfo.setSize((long) fileSize);
        fileInfo.setName(fileName);
        fileInfo.setType(type);
        fileInfo.setCreatorId(creatorId);
        fileInfo.setCreateTimeNow();
        fileInfoService.insert(fileInfo);

        return fileInfo;
    }
 
Example 15
Source File: LocalFileService.java    From hsweb-framework with Apache License 2.0 4 votes vote down vote up
@Override
@SuppressWarnings("all")
public FileInfoEntity saveFile(InputStream fileStream, String fileName, String type, String creatorId) throws IOException {
    //配置中的文件上传根路径
    String fileBasePath = getFilePath();
    //文件存储的相对路径,以日期分隔,每天创建一个新的目录
    String filePath = DateFormatter.toString(new Date(), "yyyyMMdd");
    //文件存储绝对路径
    String absPath = fileBasePath.concat("/").concat(filePath);
    File path = new File(absPath);
    if (!path.exists()) {
        path.mkdirs(); //创建目录
    }
    String newName = String.valueOf(System.nanoTime()); //临时文件名 ,纳秒的md5值
    String fileAbsName = absPath.concat("/").concat(newName);
    int fileSize;
    MessageDigest digest = DigestUtils.getMd5Digest();
    try (InputStream proxyStream = new InputStream() {
        @Override
        public int read(byte[] b, int off, int len) throws IOException {
            int l = fileStream.read(b, off, len);
            digest.update(b, off, len);
            return l;
        }

        @Override
        public void close() throws IOException {
            fileStream.close();
            super.close();
        }

        @Override
        public int available() throws IOException {
            return fileStream.available();
        }

        @Override
        public int read() throws IOException {
            return fileStream.read();
        }
    }; FileOutputStream os = new FileOutputStream(fileAbsName)) {
        int remainBytes = fileSize = proxyStream.available();
        byte[] buff = new byte[remainBytes > 1024 * 10 ? 1024 * 10 : remainBytes];
        int bytes;
        logger.info("开始写出文件:{}到:{}, size: {} bytes", fileName, fileAbsName, fileSize);
        while (remainBytes > 0) {
            bytes = proxyStream.read(buff, 0, remainBytes > buff.length ? buff.length : remainBytes);
            os.write(buff, 0, bytes);
            remainBytes -= bytes;
            logger.info("写出文件:{}:{},剩余数据量: {} bytes", fileName, fileAbsName, remainBytes);
        }
        // StreamUtils.copy(in, os);
    }

    String md5 = Hex.encodeHexString(digest.digest());

    File newFile = new File(fileAbsName);
    //获取文件的md5值
    //判断文件是否已经存在
    FileInfoEntity fileInfo = fileInfoService.selectByMd5(md5);
    if (fileInfo != null) {
        logger.info("文件:{}已上传过", fileAbsName);
        if (new File(getFilePath() + "/" + fileInfo.getLocation()).exists()) {
            newFile.delete();//文件已存在则删除临时文件不做处理
        } else {
            newFile.renameTo(new File(absPath.concat("/").concat(md5)));
        }
        return fileInfo;
    } else {
        logger.info("上传文件{}完成:{}->{}", fileName, fileAbsName, absPath.concat("/").concat(md5));
        newFile.renameTo(new File(absPath.concat("/").concat(md5)));
    }
    FileInfoEntity infoEntity = fileInfoService.createEntity();
    infoEntity.setCreateTimeNow();
    infoEntity.setCreatorId(creatorId);
    infoEntity.setLocation(filePath.concat("/").concat(md5));
    infoEntity.setName(fileName);
    infoEntity.setType(type);
    infoEntity.setSize((long) fileSize);
    infoEntity.setMd5(md5);
    infoEntity.setStatus(DataStatus.STATUS_ENABLED);
    fileInfoService.insert(infoEntity);
    return infoEntity;
}
 
Example 16
Source File: ConnectionInformationSerializer.java    From rapidminer-studio with GNU Affero General Public License v3.0 4 votes vote down vote up
/**
 * Copies the given files to the specified {@code root/dirName} and creates {@code .md5} hash files for each.
 *
 * @param targetFolder the target folder
 * @param filesToCopy  the files to write
 * @param zos          the ZipOutputStream to write to
 * @throws IOException if writing the zip entry goes wrong
 */
static void writeAsZipEntriesWithMD5(Path targetFolder, List<Path> filesToCopy, ZipOutputStream zos) throws IOException {
	if (filesToCopy == null || filesToCopy.isEmpty()) {
		return;
	}

	if (targetFolder == null) {
		targetFolder = Paths.get("");
	}

	if (zos == null) {
		throw new IOException("Zip output stream cannot be null");
	}

	MessageDigest md5 = DigestUtils.getMd5Digest();
	for (Path file : filesToCopy) {
		String fileName = targetFolder.resolve(file.getFileName()).toString().replace(File.separatorChar, ZIP_FILE_SEPARATOR_CHAR);
		ZipEntry entry = new ZipEntry(fileName);
		ZipEntry md5Entry = new ZipEntry(fileName + MD5_SUFFIX);

		// We read every file twice to speed up reading later
		// - We don't have to copy every file into memory (see else case in handleOtherFile)
		// - Connections should be way often read than written

		// Write Checksum first
		try (InputStream is = Files.newInputStream(file);
			 DigestInputStream dis = new DigestInputStream(is, md5)) {
			zos.putNextEntry(md5Entry);
			IOUtils.skip(dis, Long.MAX_VALUE);
			zos.write(Hex.encodeHexString(md5.digest()).getBytes());
			zos.closeEntry();
		}

		// Copy entry
		try (InputStream is = Files.newInputStream(file)) {
			zos.putNextEntry(entry);
			IOUtils.copy(is, zos);
			zos.closeEntry();
		}
	}
}