Java Code Examples for org.apache.commons.lang.StringUtils.split()

The following are Jave code examples for showing how to use split() of the org.apache.commons.lang.StringUtils class. You can vote up the examples you like. Your votes will be used in our system to get more good examples.
+ Save this method
Example 1
Project: configx   File: ConfigSearchForm.java   View Source Code Vote up 6 votes
/**
 * 返回指定搜索的配置ID列表
 *
 * @return
 */
public List<Long> getConfigIdList() {
    if (StringUtils.isEmpty(configId)) {
        return Collections.emptyList();
    } else {
        List<Long> configIdList = new ArrayList<>();
        for (String str : StringUtils.split(this.configId)) {
            try {
                configIdList.add(Long.valueOf(str));
            } catch (Exception e) {
                // Ingore
            }
        }
        return configIdList;
    }
}
 
Example 2
Project: jfrog-idea-plugin   File: VersionImpl.java   View Source Code Vote up 6 votes
@Override
public boolean isAtLeast(String atLeast) {
    if (StringUtils.isBlank(atLeast)) {
        return true;
    }

    String[] versionTokens = StringUtils.split(version, ".");
    String[] otherVersionTokens = StringUtils.split(atLeast, ".");
    for (int tokenIndex = 0; tokenIndex < otherVersionTokens.length; tokenIndex++) {
        String atLeastToken = otherVersionTokens[tokenIndex].trim();
        String versionToken = versionTokens.length < (tokenIndex + 1) ? "0" : versionTokens[tokenIndex].trim();
        int comparison = compareTokens(versionToken, atLeastToken);
        if (comparison != 0) {
            return comparison > 0;
        }
    }
    return true;
}
 
Example 3
Project: util4j   File: DataTools.java   View Source Code Vote up 6 votes
public byte[] subByteArrays(byte[] data,byte[] startArray)
	{
		if(data==null||data.length<=0)
		{
			System.out.println("data参数错误!");
			return null;
		}
		String[] dataHex=byteArrayToHexArray(data);//转换为hex字符数组
		String dataHexStr=Arrays.toString(dataHex);//转换为hex字符串
		dataHexStr=StringUtils.substringBetween(dataHexStr, "[", "]").replaceAll("\\s", "");//去括号空格
		
		String[] startHex=byteArrayToHexArray(startArray);//转换为hex字符数组
		String startHexStr=Arrays.toString(startHex);//转换为hex字符串
		startHexStr=StringUtils.substringBetween(startHexStr, "[", "]").replaceAll("\\s", "");//去括号空格
		String resultHex=StringUtils.substringAfter(dataHexStr, startHexStr);//截取并转换为hex字符串
		if(resultHex==null)
		{
			//System.out.println("注意:截取内容为空,无数据!");
			return null;
		}
		String[] result=StringUtils.split(resultHex, ',');//重组为hexstr数组
//		System.out.println(Arrays.toString(result));
		return hexArrayToBtyeArray(result);
	}
 
Example 4
Project: hadoop   File: TestS3AConfiguration.java   View Source Code Vote up 6 votes
/**
 * Test if custom endpoint is picked up.
 * <p/>
 * The test expects TEST_ENDPOINT to be defined in the Configuration
 * describing the endpoint of the bucket to which TEST_FS_S3A_NAME points
 * (f.i. "s3-eu-west-1.amazonaws.com" if the bucket is located in Ireland).
 * Evidently, the bucket has to be hosted in the region denoted by the
 * endpoint for the test to succeed.
 * <p/>
 * More info and the list of endpoint identifiers:
 * http://docs.aws.amazon.com/general/latest/gr/rande.html#s3_region
 *
 * @throws Exception
 */
@Test
public void TestEndpoint() throws Exception {
  conf = new Configuration();
  String endpoint = conf.getTrimmed(TEST_ENDPOINT, "");
  if (endpoint.isEmpty()) {
    LOG.warn("Custom endpoint test skipped as " + TEST_ENDPOINT + "config " +
        "setting was not detected");
  } else {
    conf.set(Constants.ENDPOINT, endpoint);
    fs = S3ATestUtils.createTestFileSystem(conf);
    AmazonS3Client s3 = fs.getAmazonS3Client();
    String endPointRegion = "";
    // Differentiate handling of "s3-" and "s3." based endpoint identifiers
    String[] endpointParts = StringUtils.split(endpoint, '.');
    if (endpointParts.length == 3) {
      endPointRegion = endpointParts[0].substring(3);
    } else if (endpointParts.length == 4) {
      endPointRegion = endpointParts[1];
    } else {
      fail("Unexpected endpoint");
    }
    assertEquals("Endpoint config setting and bucket location differ: ",
        endPointRegion, s3.getBucketLocation(fs.getUri().getHost()));
  }
}
 
Example 5
Project: kaltura-ce-sakai-extension   File: KalturaEntityProducer.java   View Source Code Vote up 5 votes
public boolean parseEntityReference(String reference, Reference ref) {
    if (reference.startsWith(REFERENCE_ROOT)) {
        if (log.isDebugEnabled()) log.debug("kaltura EP.parseEntityReference(reference="+reference+", ref="+ref+")");
        String[] parts = StringUtils.split(reference, Entity.SEPARATOR);

        String subType = REFERENCE_SUB_TYPE;
        String context = null;
        String id = null;
        String container = "";
        if (parts.length > 1) {
            String type = parts[1];
            if ("collection".equals(type) && parts.length > 2) {
                // assume /kaltura/collection/collectionid
                id = parts[2];
                MediaCollection mc = service.getCollection(id, 0, 0);
                if (mc != null) {
                    context = mc.getLocationId();
                }
                ref.set("kaltura", subType, id, container, context);
                if (log.isDebugEnabled()) log.debug("kaltura EP.parseEntityReference: TRUE context="+context+", ref="+ref);
                return true;
            }
            // handle other types?
        }
        if (log.isDebugEnabled()) log.debug("kaltura EP.parseEntityReference: FALSE");
    }
    return false;
}
 
Example 6
Project: Hydrograph   File: OutputSparkRedshiftConverter.java   View Source Code Vote up 5 votes
private TypePrimaryKeys getPrimaryKeyColumnFields(String primaryKeyFeilds) {
	TypePrimaryKeys primaryKeys = new TypePrimaryKeys();
	String[] primaryKeyColumnsFields = StringUtils.split(primaryKeyFeilds, Constants.LOAD_TYPE_NEW_TABLE_VALUE_SEPERATOR);
	if(primaryKeyColumnsFields !=null && primaryKeyColumnsFields.length>0){
		TypeKeyFields primaryTypeKeyFields = new TypeKeyFields();
		primaryKeys.setPrimaryKeys(primaryTypeKeyFields);
		for(String fieldValue : primaryKeyColumnsFields){
			TypeFieldName primaryTypeFieldName = new TypeFieldName();
			primaryTypeFieldName.setName(fieldValue);
			primaryTypeKeyFields.getField().add(primaryTypeFieldName);
		}
	}
			
	return primaryKeys;
}
 
Example 7
Project: sierra   File: MutationPatternScoresExporter.java   View Source Code Vote up 5 votes
private static List<MutationElem> parsePatternString(
		DrugClass drugClass, String pattern) {
	List<MutationElem> mutElements = new ArrayList<>();
	String[] mutArr = StringUtils.split(pattern, ',');
	Gene gene = drugClass.gene();
	for (int i=0; i < mutArr.length; i++) {
		Matcher mm = mutationRegex.matcher(mutArr[i]);
		mm.find();
		int pos = Integer.parseInt(mm.group(2));
		String aa = mm.group(3);
		MutationElem mutElem = new MutationElem(gene, pos, aa);
		mutElements.add(mutElem);
	}
	return mutElements;
}
 
Example 8
Project: ymate-module-sso   File: DefaultSSOTokenAdapter.java   View Source Code Vote up 5 votes
@Override
public ISSOToken decryptToken(String tokenSeriStr) throws Exception {
    if (StringUtils.isNotBlank(tokenSeriStr)) {
        String _userAgent = WebContext.getRequest().getHeader("User-Agent");
        String[] _tokenArr = StringUtils.split(WebUtils.decryptStr(tokenSeriStr, _userAgent), "|");
        if (_tokenArr != null && _tokenArr.length == 2) {
            return new DefaultSSOToken(_tokenArr[0], _userAgent, null, BlurObject.bind(_tokenArr[1]).toLongValue()).build();
        }
    }
    return null;
}
 
Example 9
Project: hadoop-oss   File: Count.java   View Source Code Vote up 5 votes
private List<StorageType> getAndCheckStorageTypes(String types) {
  if ("".equals(types) || "all".equalsIgnoreCase(types)) {
    return StorageType.getTypesSupportingQuota();
  }

  String[] typeArray = StringUtils.split(types, ',');
  List<StorageType> stTypes = new ArrayList<>();

  for (String t : typeArray) {
    stTypes.add(StorageType.parseStorageType(t));
  }

  return stTypes;
}
 
Example 10
Project: soundwave   File: Query.java   View Source Code Vote up 5 votes
@POST
@Consumes(MediaType.APPLICATION_JSON)
@Path("/query")
@ApiOperation(
    value = "Send a general query",
    response = Map.class,
    responseContainer = "List")
public Response query(EsQuery esQuery) {

  OperationStats opStats = new OperationStats("cmdb_api", "query", new HashMap<>());
  Map<String, String> tags = new HashMap<>();

  String queryString = esQuery.getQueryString();
  String fields = esQuery.getFields();

  try {

    // Metrics tags
    tags.put("status", String.valueOf(Response.Status.OK.getStatusCode()));
    opStats.succeed(tags);

    logger.info("Success: query - {}", queryString);

    String[] includeFields = StringUtils.split(fields, ",");
    Iterator<EsQueryResult> output = cmdbInstanceStore.query(queryString, includeFields);
    // Process iterator items one by one instead of converting to list
    List<Map<String, Object>> results = generateFlatOutput(output, includeFields);
    return Response.status(Response.Status.OK)
        .type(MediaType.APPLICATION_JSON)
        .entity(results)
        .build();

  } catch (Exception e) {
    return Utils.responseException(e, logger, opStats, tags);

  }

  // End of query function
}
 
Example 11
Project: util4j   File: DataTools.java   View Source Code Vote up 5 votes
/**
 * 截取以什么开头的数据
 * @param data
 * @param startHexStr
 * @return
 * @throws UnsupportedEncodingException
 */
public String[] SubHexArraysByStr(byte[] data,String startStr) throws UnsupportedEncodingException
{
	if(data==null||data.length<=0)
	{
		System.out.println("data数据无效!");
		return null;
	}
	String[] result=null;
	//转换原数据
	String[] hexarray=byteArrayToHexArray(data);
	String hexstr=Arrays.toString(hexarray);
	hexstr=StringUtils.substringBetween(hexstr, "[", "]").replaceAll("\\s", "");//原数据字符串去括号空格
	////转换匹配参数数据
	byte[] startArray=startStr.getBytes("utf-8");//转换为字节
	String[] startHex=byteArrayToHexArray(startArray);//转换为hex字符数组
	String startHexStr=Arrays.toString(startHex);//转换为hex字符串
	startHexStr=StringUtils.substringBetween(startHexStr, "[", "]").replaceAll("\\s", "");//去括号空格
	String resultHex=StringUtils.substringAfter(hexstr, startHexStr);
	if(resultHex==null)
	{
		//System.out.println("注意:截取内容为空,无数据!");
		return null;
	}
	result=StringUtils.split(resultHex, ',');//重组为hexstr数组
	return result;
}
 
Example 12
Project: configx   File: EnvService.java   View Source Code Vote up 5 votes
/**
 * 判断名字是否是别名
 *
 * @param env
 * @param name
 * @return
 */
private boolean isAlias(Env env, String name) {
    if (StringUtils.isEmpty(env.getAlias())) {
        return false;
    }
    String[] aliases = StringUtils.split(env.getAlias(), ",");
    for (String alias : aliases) {
        if (alias != null && alias.equalsIgnoreCase(name)) {
            return true;
        }
    }
    return false;
}
 
Example 13
Project: Hydrograph   File: ParameterUtil.java   View Source Code Vote up 5 votes
/**
 * Returns true if input string starts with parameter.
 * 
 * @param path
 * @param separator
 * @return
 */
public static boolean startsWithParameter(String path,char separator) {
	String pathSegments[]=StringUtils.split(path, separator);
	if(isParameter(pathSegments[0])){
			return true;
	}
	return false;
}
 
Example 14
Project: morf   File: XmlDataSetProducer.java   View Source Code Vote up 5 votes
/**
 * Creates the index and reads the meta-data
 */
public PullProcessorIndex() {
  super();
  indexName = xmlStreamReader.getAttributeValue(XmlDataSetNode.URI, XmlDataSetNode.NAME_ATTRIBUTE);
  isUnique = Boolean.parseBoolean(xmlStreamReader.getAttributeValue(XmlDataSetNode.URI, XmlDataSetNode.UNIQUE_ATTRIBUTE));

  String columnsNamesCombined = xmlStreamReader.getAttributeValue(XmlDataSetNode.URI, XmlDataSetNode.COLUMNS_ATTRIBUTE);

  for (String columnName : StringUtils.split(columnsNamesCombined, ",")) {
    columnNames.add(columnName.trim());
  }
}
 
Example 15
Project: Hydrograph   File: DatabaseKeyValidation.java   View Source Code Vote up 5 votes
@Override
public boolean validate(Object object, String propertyName, Map<String, List<FixedWidthGridRow>> inputSchemaMap,
		boolean isJobFileImported) {
	if (object != null) {
		List<String> tmpList = new LinkedList<>();
		Map<String, Object> keyFieldsList = (LinkedHashMap<String, Object>) object;
		
		if (keyFieldsList != null && !keyFieldsList.isEmpty()&& keyFieldsList.containsKey(Constants.LOAD_TYPE_NEW_TABLE_KEY)) {
			if(inputSchemaMap != null){
				for(java.util.Map.Entry<String, List<FixedWidthGridRow>> entry : inputSchemaMap.entrySet()){
					List<FixedWidthGridRow> gridRowList = entry.getValue();
					gridRowList.forEach(gridRow -> tmpList.add(gridRow.getFieldName()));
				}
			}
			for(Entry<String, Object> grid: keyFieldsList.entrySet()){
			    String[] keyValues = StringUtils.split((String) grid.getValue(), ",");
			    for(String values : keyValues){
			    	if(!tmpList.contains(values)){
			    		errorMessage = "Target Fields Should be present in Available Fields";
						return false;
			    	}
			    }
			}
			
			return true;
		}else if(keyFieldsList != null && !keyFieldsList.isEmpty() && keyFieldsList.containsKey(Constants.LOAD_TYPE_INSERT_KEY)){
			return true;
		}else if(keyFieldsList != null && !keyFieldsList.isEmpty() && keyFieldsList.containsKey(Constants.LOAD_TYPE_REPLACE_KEY)){
			return true;
		}
	}
	return true;
}
 
Example 16
Project: Hydrograph   File: ELTJoinPortCount.java   View Source Code Vote up 5 votes
private void setPropertyNames() {
	String propertyNameArray[] = StringUtils.split(this.propertyName, "|");
	if (StringUtils.isNotEmpty(propertyNameArray[0]))
		firstPortPropertyName = propertyNameArray[0];

	if (propertyNameArray.length == 2
			&& StringUtils.equals(Constants.UNUSED_PORT_COUNT_PROPERTY, propertyNameArray[1]))
		this.unusedPortPropertyName = propertyNameArray[1];

}
 
Example 17
Project: Hydrograph   File: OutputRedshiftConverter.java   View Source Code Vote up 5 votes
private TypePrimaryKeys getPrimaryKeyColumnFields(String primaryKeyFeilds) {
	TypePrimaryKeys primaryKeys = new TypePrimaryKeys();
	String[] primaryKeyColumnsFields = StringUtils.split(primaryKeyFeilds, Constants.LOAD_TYPE_NEW_TABLE_VALUE_SEPERATOR);
	if(primaryKeyColumnsFields !=null && primaryKeyColumnsFields.length>0){
		TypeKeyFields primaryTypeKeyFields = new TypeKeyFields();
		primaryKeys.setPrimaryKeys(primaryTypeKeyFields);
		for(String fieldValue : primaryKeyColumnsFields){
			TypeFieldName primaryTypeFieldName = new TypeFieldName();
			primaryTypeFieldName.setName(fieldValue);
			primaryTypeKeyFields.getField().add(primaryTypeFieldName);
		}
	}
			
	return primaryKeys;
}
 
Example 18
Project: Hydrograph   File: OutputDBUpdateConverter.java   View Source Code Vote up 4 votes
@Override
public void prepareForXML() {
	logger.debug("Generating XML for {}", properties.get(Constants.PARAM_NAME));
	super.prepareForXML();
	JdbcUpdate jdbcUpdate = (JdbcUpdate) baseComponent;
	jdbcUpdate.setRuntimeProperties(getRuntimeProperties());

	if(StringUtils.isNotBlank(String.valueOf(properties.get(PropertyNameConstants.JDBC_DB_DRIVER.value())))){
		String jdbcDriverClassUIValue = (String) properties.get(PropertyNameConstants.JDBC_DB_DRIVER.value());
		
		if(StringUtils.isNotBlank(jdbcDriverClassUIValue)){
			ElementValueStringType jdbcDriverClass = new ElementValueStringType();
					jdbcDriverClass.setValue(jdbcDriverClassUIValue);
					jdbcUpdate.setJdbcDriverClass(jdbcDriverClass);
		}
	}
	
	if(StringUtils.isNotBlank((String) properties.get(PropertyNameConstants.URL.value()))){
		ElementValueStringType url = new ElementValueStringType();
		url.setValue(String.valueOf(properties.get(PropertyNameConstants.URL.value())));
		jdbcUpdate.setUrl(url);
	}
	
	if(StringUtils.isNotBlank((String) properties.get(PropertyNameConstants.TABLE_NAME.value()))){
		ElementValueStringType tableName = new ElementValueStringType();
		tableName.setValue(String.valueOf(properties.get(PropertyNameConstants.TABLE_NAME.value())));
		jdbcUpdate.setTableName(tableName);
	}
	
	if(StringUtils.isNotBlank((String)properties.get(PropertyNameConstants.BATCH_SIZE.value()))){
		ElementValueIntegerType chunkSize = new ElementValueIntegerType();
		BigInteger db_chunkSize = getBigInteger(PropertyNameConstants.BATCH_SIZE.value());
		chunkSize.setValue(db_chunkSize);
		jdbcUpdate.setBatchSize(chunkSize);
	}
	
	if(StringUtils.isNotBlank((String) properties.get(PropertyNameConstants.USER_NAME.value()))){
		ElementValueStringType userName = new ElementValueStringType();
		userName.setValue(String.valueOf(properties.get(PropertyNameConstants.USER_NAME.value())));
		jdbcUpdate.setUserName(userName);
	}
	
	if(StringUtils.isNotBlank((String) properties.get(PropertyNameConstants.PASSWORD.value()))){
		ElementValueStringType password = new ElementValueStringType();
		password.setValue(String.valueOf(properties.get(PropertyNameConstants.PASSWORD.value())));
		jdbcUpdate.setPassword(password);
	}
	
	
	if(StringUtils.isNotBlank((String) properties.get(PropertyNameConstants.SELECT_UPDATE_KEYS.value()))){
		String str = (String) properties.get(PropertyNameConstants.SELECT_UPDATE_KEYS.value());
		TypeUpdateKeys updateKeys = new TypeUpdateKeys();
		String[] updateKeyColumnsFeilds = StringUtils.split(str, Constants.LOAD_TYPE_NEW_TABLE_VALUE_SEPERATOR);
		if(updateKeyColumnsFeilds !=null && updateKeyColumnsFeilds.length>0){
			TypeKeyFields updateTypeKeyFields = new TypeKeyFields();
			updateKeys.setUpdateByKeys(updateTypeKeyFields);
			for(String fieldValue : updateKeyColumnsFeilds){
				TypeFieldName updateTypeFieldName = new TypeFieldName();
				updateTypeFieldName.setName(fieldValue);
				updateTypeKeyFields.getField().add(updateTypeFieldName);
			}
			jdbcUpdate.setUpdate(updateKeys);
		}
	}
	
	
	
}
 
Example 19
Project: morf   File: UpgradeTestHelper.java   View Source Code Vote up 4 votes
/**
 * Validate that the upgrade step meets the basic requirements.
 */
private void validateUpgradeStep(UpgradeStep upgradeStep) {
  Class<? extends UpgradeStep> upgradeStepClass = upgradeStep.getClass();

  // Check the upgrade step has a Sequence
  if (upgradeStepClass.getAnnotation(Sequence.class) == null) {
    fail("Upgrade step [" + upgradeStepClass.getName() + "] should have a Sequence set. How about ["
        + System.currentTimeMillis() / 1000 + "]");
  }
  // Check the upgrade step has a UUID
  UUID uuidAnnotation = upgradeStepClass.getAnnotation(UUID.class);
  String currentUuid = uuidAnnotation == null ? null : uuidAnnotation.value();

  if (StringUtils.isBlank(currentUuid) || !uuids.add(currentUuid)) {
    fail("Upgrade step [" + upgradeStepClass + "] should have a non blank, unique UUID set. How about ["
        + java.util.UUID.randomUUID().toString() + "]");
  }

  // verify we can parse the UUID
  try {
    assertNotNull(java.util.UUID.fromString(currentUuid));
  } catch (Exception e) {
    throw new RuntimeException("Could not parse UUID [" + currentUuid + "] from [" + upgradeStepClass+"]", e);
  }

  // Check the upgrade step has a description
  final String description = upgradeStep.getDescription();
  assertTrue("Should have a description", StringUtils.isNotEmpty(description));
  assertTrue("Description must not be more than 200 characters", description.length() <= 200);

  // Descriptions should not end with full-stops
  if (description.endsWith(".")) {
    fail(String.format("Description for [%s] should not end with full stop - [%s]", upgradeStepClass.getSimpleName(),
      description));
  }

  assertTrue("Should have a JIRA ID", StringUtils.isNotEmpty(upgradeStep.getJiraId()));

  for (String jiraId : StringUtils.split(upgradeStep.getJiraId(), ',')) {
    assertTrue("Should be a valid JIRA ID [" + upgradeStep.getJiraId() + "]", jiraIdIsValid(jiraId));
  }
}
 
Example 20
Project: esup-sgc   File: IpService.java   View Source Code Vote up 3 votes
public List<String> getBannedIp(){
 
 List<String> bannedIp = new ArrayList<String>();
 
 if(appliConfigService.getBannedIpStats() != null){
  String [] split = StringUtils.split(appliConfigService.getBannedIpStats(),",");
  
  bannedIp = Arrays.asList(split);
 }
 
 
 return bannedIp;
}