Java Code Examples for java.util.LinkedHashMap#entrySet()

The following examples show how to use java.util.LinkedHashMap#entrySet() . You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: SignatureUtil.java    From singleton with Eclipse Public License 2.0 6 votes vote down vote up
/**
 * A helper util for generate signature
 *
 * @param paramMap The parameters for generation signature
 * @return signature string
 */
public static String sign(LinkedHashMap<String, String> paramMap) {
    if (null == paramMap) {
    	logger.error("signature parameter can't null");
       return "";
    }
    StringBuilder paramStr = new StringBuilder();
    for (Entry<String,String> entry : paramMap.entrySet()) {
        paramStr.append(entry.getKey());
        paramStr.append(entry.getValue());
    }
    try {
        String signatureStr = EncryptUtil.SHA512(paramStr.toString().getBytes(
                ConstantsUnicode.UTF8));
        return signatureStr.toUpperCase();
    } catch (UnsupportedEncodingException e) {
    	logger.error(e.getMessage(), e);
    }
    return "";
}
 
Example 2
Source File: StatsTest.java    From cucumber-performance with MIT License 6 votes vote down vote up
@Test
public void testGetStatistics() {
	Stats stats = new Stats();
	stats.putStatistic("avg", 1.0,"grp","scn","stp");
	stats.putStatistic("cnt", 2.0,"grp","scn","stp");
	LinkedHashMap<String,Double> result =stats.getStatistics("grp", "scn", "stp");
	int i = 0;
	for (Entry<String, Double> set : result.entrySet()) {
		if (i==0)
			assertEquals("avg",set.getKey());
		else
			assertEquals("cnt",set.getKey());
		i++;
	}
	
}
 
Example 3
Source File: TestEdge.java    From tez with Apache License 2.0 6 votes vote down vote up
private void verifyEvents(TezTaskAttemptID srcTAID, LinkedHashMap<TezTaskID, Task> destTasks) {
  int count = 0;

  for (Entry<TezTaskID, Task> taskEntry : destTasks.entrySet()) {
    Task mockTask = taskEntry.getValue();
    ArgumentCaptor<TezEvent> args = ArgumentCaptor.forClass(TezEvent.class);
    verify(mockTask, times(1)).registerTezEvent(args.capture());
    TezEvent capturedEvent = args.getValue();

    DataMovementEvent dmEvent = (DataMovementEvent) capturedEvent.getEvent();
    assertEquals(srcTAID.getId(), dmEvent.getVersion());
    assertEquals(count++, dmEvent.getSourceIndex());
    assertEquals(srcTAID.getTaskID().getId(), dmEvent.getTargetIndex());
    byte[] res = new byte[dmEvent.getUserPayload().limit() - dmEvent.getUserPayload().position()];
    dmEvent.getUserPayload().slice().get(res);
    assertTrue(Arrays.equals("bytes".getBytes(), res));
  }
}
 
Example 4
Source File: ManLog_8.java    From manifold with Apache License 2.0 6 votes vote down vote up
private void recordSuspendedIssues()
{
  LinkedHashMap<JCTree, Stack<Stack<JCDiagnostic>>> suspendedIssues =
    _suspendedIssues.get( getDiagnosticHandler() );

  for( Map.Entry<JCTree, Stack<Stack<JCDiagnostic>>> entry: suspendedIssues.entrySet() )
  {
    Stack<Stack<JCDiagnostic>> issueFrames = entry.getValue();
    Stack<JCDiagnostic> issueFrame = issueFrames.pop();
    if( !issueFrames.isEmpty() )
    {
      throw new IllegalStateException( "Invalid issue frames, should be only one frame" );
    }
    for( JCDiagnostic d: issueFrame )
    {
      super.report( d );
    }
  }
}
 
Example 5
Source File: ClusteringComponent.java    From lucene-solr with Apache License 2.0 5 votes vote down vote up
/**
 * Setup the default clustering engine.
 * @see "https://issues.apache.org/jira/browse/SOLR-5219"
 */
private static <T extends ClusteringEngine> void setupDefaultEngine(String type, LinkedHashMap<String,T> map) {
  // If there's already a default algorithm, leave it as is.
  String engineName = ClusteringEngine.DEFAULT_ENGINE_NAME;
  T defaultEngine = map.get(engineName);

  if (defaultEngine == null ||
      !defaultEngine.isAvailable()) {
    // If there's no default algorithm, and there are any algorithms available, 
    // the first definition becomes the default algorithm.
    for (Map.Entry<String, T> e : map.entrySet()) {
      if (e.getValue().isAvailable()) {
        engineName = e.getKey();
        defaultEngine = e.getValue();
        map.put(ClusteringEngine.DEFAULT_ENGINE_NAME, defaultEngine);
        break;
      }
    }
  }

  if (defaultEngine != null) {
    if (log.isInfoEnabled()) {
      log.info("Default engine for {}: {} [{}]", type, engineName, defaultEngine.getClass().getSimpleName());
    }
  } else {
    log.warn("No default engine for {}.", type);
  }
}
 
Example 6
Source File: DemonymsTableExtractor.java    From yago3 with GNU General Public License v3.0 5 votes vote down vote up
public static void main(String[] args) {

    System.out.println("# generated by DenonymsTableExtractor");
    LinkedHashMap<String, String> patterns = getLocationPatterns();

    for (Entry<String, String> location : patterns.entrySet()) {
      StringBuilder sb = new StringBuilder();
      sb.append("\"");
      sb.append(location.getValue());
      sb.append("\" <_categoryPattern> \"$0 <isCitizenOf> ");
      sb.append(location.getKey());
      sb.append("\" .");
      System.out.println(sb.toString());
    }
  }
 
Example 7
Source File: DataSetTransformer.java    From Knowage-Server with GNU Affero General Public License v3.0 5 votes vote down vote up
public String seriesRangeName(LinkedHashMap<String, LinkedHashMap> serieMap) throws JSONException {
	String serieName = "";
	for (Map.Entry<String, LinkedHashMap> entry : serieMap.entrySet()) {
		LinkedHashMap value = entry.getValue();
		if (value.get("type").equals("arearangelow") || value.get("type").equals("arearangehigh")) {
			serieName += value.get("column") + " ";
		}
	}
	return serieName;
}
 
Example 8
Source File: Bean64Table.java    From kripton with Apache License 2.0 5 votes vote down vote up
/**
 * for attribute valueLinkedMapStringBean serialization
 */
public static byte[] serializeValueLinkedMapStringBean(LinkedHashMap<String, Bean64> value) {
  if (value==null) {
    return null;
  }
  KriptonJsonContext context=KriptonBinder.jsonBind();
  try (KriptonByteArrayOutputStream stream=new KriptonByteArrayOutputStream(); JacksonWrapperSerializer wrapper=context.createSerializer(stream)) {
    JsonGenerator jacksonSerializer=wrapper.jacksonGenerator;
    jacksonSerializer.writeStartObject();
    int fieldCount=0;
    if (value!=null)  {
      fieldCount++;
      // write wrapper tag
      if (value.size()>0) {
        jacksonSerializer.writeFieldName("element");
        jacksonSerializer.writeStartArray();
        for (Map.Entry<String, Bean64> item: value.entrySet()) {
          jacksonSerializer.writeStartObject();
          jacksonSerializer.writeStringField("key", item.getKey());
          if (item.getValue()==null) {
            jacksonSerializer.writeNullField("value");
          } else {
            jacksonSerializer.writeFieldName("value");
            bean64BindMap.serializeOnJackson(item.getValue(), jacksonSerializer);
          }
          jacksonSerializer.writeEndObject();
        }
        jacksonSerializer.writeEndArray();
      } else {
        jacksonSerializer.writeNullField("element");
      }
    }
    jacksonSerializer.writeEndObject();
    jacksonSerializer.flush();
    return stream.toByteArray();
  } catch(Exception e) {
    e.printStackTrace();
    throw(new KriptonRuntimeException(e.getMessage()));
  }
}
 
Example 9
Source File: Storage.java    From litchi with Apache License 2.0 5 votes vote down vote up
/**
 * 根据配置实体类的索引字段和对应索引值为索引表建立索引
 * @param modelAdapterList
 */
private void createIndex(List<ConfigAdapter> modelAdapterList) {
	if (this.fieldMaps.isEmpty()) {
		return;
	}

	if (modelAdapterList.isEmpty()) {
		return;
	}

	List<IndexObject> indexObjectList = new ArrayList<>();
	modelAdapterList.get(0).registerIndex(indexObjectList);

	for (IndexObject indexObject : indexObjectList) {
		if (indexObject.getColumnList().isEmpty()) {
			LOGGER.error("IndexObject column is empty. object{}", indexObject);
			continue;
		}

		List<String> columnsList = new ArrayList<>();
		columnsList.addAll(indexObject.getColumnList());

		LinkedHashMap<Object, List<ConfigAdapter>> valueMaps = getFilterList(columnsList.remove(0), modelAdapterList);
		for (Entry<Object, List<ConfigAdapter>> entry : valueMaps.entrySet()) {
			List<String> newColumnsList = new ArrayList<>(columnsList);
			String newIndexName = getIndexKey(indexObject.getIndexName(), entry.getKey());
			this.indexTable.put(newIndexName, entry.getValue());
			filter(entry.getValue(), newColumnsList, newIndexName);
		}
	}
}
 
Example 10
Source File: TtlUpdateManagerTest.java    From ambry with Apache License 2.0 5 votes vote down vote up
/**
 * Runs the router code resolution test based on the input
 * @param codesToSetAndTest a {@link LinkedHashMap} that defines the ordering of the router error codes and also
 *                          provides the server error codes that must be set and their equivalent router error codes.
 * @throws Exception
 */
private void doRouterErrorCodeResolutionTest(LinkedHashMap<ServerErrorCode, RouterErrorCode> codesToSetAndTest)
    throws Exception {
  if (codesToSetAndTest.size() * 2 > serverCount) {
    throw new IllegalStateException("Cannot run test because there aren't enough servers for the given codes");
  }
  List<ServerErrorCode> serverErrorCodes =
      new ArrayList<>(Collections.nCopies(serverCount, ServerErrorCode.Blob_Not_Found));
  List<RouterErrorCode> expected = new ArrayList<>(codesToSetAndTest.size());
  // fill in the array with all the error codes that need resolution and knock them off one by one
  // has to be repeated because the op tracker returns failure if it sees 8/9 failures and the success target is 2
  int serverIdx = 0;
  for (Map.Entry<ServerErrorCode, RouterErrorCode> entry : codesToSetAndTest.entrySet()) {
    serverErrorCodes.set(serverIdx, entry.getKey());
    serverErrorCodes.set(serverIdx + 1, entry.getKey());
    expected.add(entry.getValue());
    serverIdx += 2;
  }
  expected.add(RouterErrorCode.BlobDoesNotExist);
  for (int i = 0; i < expected.size(); i++) {
    List<ServerErrorCode> shuffled = new ArrayList<>(serverErrorCodes);
    Collections.shuffle(shuffled);
    setServerErrorCodes(shuffled, serverLayout);
    RouterErrorCode expectedRouterError = resolveRouterErrorCode(serverErrorCodes, expected.get(i));
    executeOpAndVerify(blobIds, expectedRouterError, false, true, true, false);
    if (i * 2 + 1 < serverErrorCodes.size()) {
      serverErrorCodes.set(i * 2, ServerErrorCode.Blob_Not_Found);
      serverErrorCodes.set(i * 2 + 1, ServerErrorCode.Blob_Not_Found);
    }
  }
  serverLayout.getMockServers().forEach(MockServer::resetServerErrors);
  assertTtl(router, blobIds, TTL_SECS);
}
 
Example 11
Source File: Utility.java    From fast with GNU General Public License v2.0 5 votes vote down vote up
public static String[] linkedHashMapToStrings(LinkedHashMap<String, Double> aMap, String delimiter){
	String[] strs = {"", ""};
	for (Map.Entry<String, Double> entry : aMap.entrySet()) {
    String key = entry.getKey();
    Double value = entry.getValue();
    strs[0] += key + delimiter;
    strs[1] += value + delimiter;
	}
	return strs;
}
 
Example 12
Source File: DeleteManagerTest.java    From ambry with Apache License 2.0 5 votes vote down vote up
/**
 * Runs the router code resolution test based on the input
 * @param codesToSetAndTest a {@link LinkedHashMap} that defines the ordering of the router error codes and also
 *                          provides the server error codes that must be set and their equivalent router error codes.
 * @throws Exception
 */
private void doRouterErrorCodeResolutionTest(LinkedHashMap<ServerErrorCode, RouterErrorCode> codesToSetAndTest)
    throws Exception {
  if (codesToSetAndTest.size() * 2 > serverLayout.getMockServers().size()) {
    throw new IllegalStateException("Cannot run test because there aren't enough servers for the given codes");
  }
  List<ServerErrorCode> serverErrorCodes =
      new ArrayList<>(Collections.nCopies(serverLayout.getMockServers().size(), ServerErrorCode.Blob_Not_Found));
  List<RouterErrorCode> expected = new ArrayList<>(codesToSetAndTest.size());
  // fill in the array with all the error codes that need resolution and knock them off one by one
  // has to be repeated because the op tracker returns failure if it sees 8/9 failures and the success target is 2
  int serverIdx = 0;
  for (Map.Entry<ServerErrorCode, RouterErrorCode> entry : codesToSetAndTest.entrySet()) {
    serverErrorCodes.set(serverIdx, entry.getKey());
    serverErrorCodes.set(serverIdx + 1, entry.getKey());
    expected.add(entry.getValue());
    serverIdx += 2;
  }
  expected.add(RouterErrorCode.BlobDoesNotExist);
  for (int i = 0; i < expected.size(); i++) {
    List<ServerErrorCode> shuffled = new ArrayList<>(serverErrorCodes);
    Collections.shuffle(shuffled);
    setServerErrorCodes(shuffled, serverLayout);
    deleteErrorCodeChecker.testAndAssert(expected.get(i));
    if (i * 2 + 1 < serverErrorCodes.size()) {
      serverErrorCodes.set(i * 2, ServerErrorCode.Blob_Not_Found);
      serverErrorCodes.set(i * 2 + 1, ServerErrorCode.Blob_Not_Found);
    }
  }
  serverLayout.getMockServers().forEach(MockServer::resetServerErrors);
}
 
Example 13
Source File: OmsGeopaparazzi4Converter.java    From hortonmachine with GNU General Public License v3.0 5 votes vote down vote up
private void projectInfo( IHMConnection connection, File outputFolderFile ) throws Exception {
    StringBuilder sb = new StringBuilder();
    sb.append("PROJECT INFO\n");
    sb.append("----------------------\n\n");

    LinkedHashMap<String, String> metadataMap = GeopaparazziUtilities.getProjectMetadata(connection);
    if (metadataMap == null || metadataMap.size() == 0) {
        return;
    }
    for( Entry<String, String> entry : metadataMap.entrySet() ) {
        sb.append(entry.getKey()).append(" = ").append(entry.getValue()).append("\n");
    }

    FileUtilities.writeFile(sb.toString(), new File(outputFolderFile, "project_info.txt"));
}
 
Example 14
Source File: Scaffolder.java    From medusa with GNU General Public License v3.0 5 votes vote down vote up
private double n50avaluation(String scaffoldsfilename) throws Exception {
	ArrayList<Integer> lenghts = new ArrayList<>();
	LinkedHashMap<String, ProteinSequence> a = FastaReaderHelper
			.readFastaProteinSequence(new File(scaffoldsfilename));
	for (Entry<String, ProteinSequence> entry : a.entrySet()) {
		int l = entry.getValue().getLength();
		lenghts.add(l);
	}
	System.out.println("Computing N50 on " + lenghts.size()+ " sequences.");
	System.out.println("N50: " + N50.n50(lenghts));
	System.out.println("----------------------");

	return N50.n50(lenghts);
}
 
Example 15
Source File: PreferenceStore.java    From translationstudio8 with GNU General Public License v2.0 5 votes vote down vote up
public static void saveCustomCondition(String key, LinkedHashMap<String, ArrayList<String[]>> map) {
	if (map == null) {
		return;
	}
	if(map.isEmpty()){
		store.setValue(key, "");
	}
	StringBuffer temp = new StringBuffer();
	for (Entry<String, ArrayList<String[]>> entry : map.entrySet()) {
		temp.append(entry.getKey());
		temp.append(separator);
		for(String[] temp1 : entry.getValue()){
			StringBuilder tmpSb = new StringBuilder();
			for(String tmpStr : temp1){
				tmpSb.append(tmpStr.trim());
				tmpSb.append(",");
			}
			if(tmpSb.length()>0){
				tmpSb.delete(tmpSb.length()-1, tmpSb.length());
				temp.append(tmpSb+separator1);
			}
		}
		if(entry.getValue().size()>0){
			temp.delete(temp.length() - separator1.length(), temp.length());
		}
		temp.append(separator);
	}
	temp.delete(temp.length() - separator.length(), temp.length());
	store.setValue(key+"_custom", temp.toString());
}
 
Example 16
Source File: EncodeMultipleTfbsOverlap.java    From systemsgenetics with GNU General Public License v3.0 4 votes vote down vote up
private static LinkedHashMap<String,HashMap<String, ArrayList<EncodeNarrowPeak>>> readMultipleTfbsInformation(String inputFolderTfbsData) throws IOException {
        LinkedHashMap<String,HashMap<String, ArrayList<EncodeNarrowPeak>>> data = new LinkedHashMap<>();
        File file = new File(inputFolderTfbsData);
        File[] files = file.listFiles();
        ArrayList<String> vecFiles = new ArrayList<>();
        for (File f : files) {
//            System.out.println(f.getAbsolutePath());
            vecFiles.add(f.getAbsolutePath());
        }
        
        for (String fileToRead : vecFiles) {
            TextFile reader = new TextFile(fileToRead, TextFile.R);
            
            String[] storingInformation = fileToRead.split("_");
//            String cellLine = storingInformation[1].replace("TFBS\\","");
            String transcriptionFactor = storingInformation[2].replace(".narrowPeak","");
            if(storingInformation.length>4){
                for(int i=3;i<(storingInformation.length-1);++i){
                    transcriptionFactor = transcriptionFactor +"_"+storingInformation[i].replace(".narrowPeak","");
                }
            }
            
            String row;
            while((row=reader.readLine())!=null){

                String[] parts = StringUtils.split(row, '\t');
                if(!data.containsKey(transcriptionFactor)){
                    data.put(transcriptionFactor, new HashMap<String, ArrayList<EncodeNarrowPeak>>());
                }
                if(!data.get(transcriptionFactor).containsKey(parts[0])){
                    data.get(transcriptionFactor).put(parts[0], new ArrayList<EncodeNarrowPeak>());
                }
                data.get(transcriptionFactor).get(parts[0]).add(new EncodeNarrowPeak(parts, fileToRead));
            }

            reader.close();
        
        }
        ArrayList<String> cleanList = new ArrayList<>();
        for(Entry<String,HashMap<String, ArrayList<EncodeNarrowPeak>>> tfInformation : data.entrySet()){
            System.out.println("Transcription factor: "+tfInformation.getKey());
            int counter = 0;
            for(Entry<String, ArrayList<EncodeNarrowPeak>> tfEntry : tfInformation.getValue().entrySet()){
                Collections.sort(tfEntry.getValue());
                counter+=tfEntry.getValue().size();
            }
            System.out.println("\tcontacts: "+counter);
            
            //remove all with less than 750 contacts
//            if(counter<750){
//                cleanList.add(tfInformation.getKey());
//            }
        }
        
        for(String k : cleanList){
            data.remove(k);
        }
        
        return data;
    }
 
Example 17
Source File: GetCapabilitiesHandler.java    From web-feature-service with Apache License 2.0 4 votes vote down vote up
private void addServiceAndOperationConstraints(OperationsMetadata operationsMetadata) {
	ValueType trueValue = new ValueType();
	trueValue.setValue("TRUE");

	ValueType falseValue = new ValueType();
	falseValue.setValue("FALSE");

	// mandatory constraints
	LinkedHashMap<String, ValueType> constraints = new LinkedHashMap<>();
	constraints.put("ImplementsBasicWFS", conformance.implementsBasicWFS() ? trueValue : falseValue);
	constraints.put("ImplementsTransactionalWFS", conformance.implementsTransactionalWFS() ? trueValue : falseValue);
	constraints.put("ImplementsLockingWFS", conformance.implementsLockingWFS() ? trueValue : falseValue);
	constraints.put("KVPEncoding", conformance.implementsKVPEncoding() ? trueValue : falseValue);
	constraints.put("XMLEncoding", conformance.implementsXMLEncoding() ? trueValue : falseValue);
	constraints.put("SOAPEncoding", conformance.implementsSOAPEncoding() ? trueValue : falseValue);
	constraints.put("ImplementsInheritance", conformance.implementsInheritance() ? trueValue : falseValue);
	constraints.put("ImplementsRemoteResolve", conformance.implementsRemoteResolve() ? trueValue : falseValue);
	constraints.put("ImplementsResultPaging", conformance.implementsResultPaging() ? trueValue : falseValue);
	constraints.put("ImplementsStandardJoins", conformance.implementsStandardJoins() ? trueValue : falseValue);
	constraints.put("ImplementsSpatialJoins", conformance.implementsSpatialJoins() ? trueValue : falseValue);
	constraints.put("ImplementsTemporalJoins", conformance.implementsTemporalJoins() ? trueValue : falseValue);
	constraints.put("ImplementsFeatureVersioning", conformance.implementsFeatureVersioning() ? trueValue : falseValue);
	constraints.put("ManageStoredQueries", conformance.implementsManageStoredQueries() ? trueValue : falseValue);			

	for (Entry<String, ValueType> entry : constraints.entrySet()) {
		DomainType constraint = new DomainType();
		constraint.setName(entry.getKey());
		constraint.setDefaultValue(entry.getValue());
		constraint.setNoValues(new NoValues());
		operationsMetadata.getConstraint().add(constraint);
	}

	// optional constraints
	// default count
	if (wfsConfig.getConstraints().isSetCountDefault()) {
		DomainType countDefault = new DomainType();
		countDefault.setName("CountDefault");
		ValueType countDefaultValue = new ValueType();
		countDefaultValue.setValue(String.valueOf(wfsConfig.getConstraints().getCountDefault()));
		countDefault.setDefaultValue(countDefaultValue);
		countDefault.setNoValues(new NoValues());
		operationsMetadata.getConstraint().add(countDefault);
	}

	// announce supported query types
	DomainType queryExpressions = new DomainType();
	queryExpressions.setName("QueryExpressions");
	queryExpressions.setAllowedValues(new AllowedValues());

	ValueType storedQueryValue = new ValueType();
	storedQueryValue.setValue(new StringBuilder(Constants.WFS_NAMESPACE_PREFIX).append(":").append("StoredQuery").toString());
	queryExpressions.getAllowedValues().getValueOrRange().add(storedQueryValue);
	operationsMetadata.getConstraint().add(queryExpressions);
}
 
Example 18
Source File: SqlQueryBuilder.java    From incubator-pinot with Apache License 2.0 4 votes vote down vote up
public PreparedStatement createUpdateStatementForIndexTable(Connection connection,
    AbstractIndexEntity entity) throws Exception {
  String tableName =
      entityMappingHolder.tableToEntityNameMap.inverse().get(entity.getClass().getSimpleName());
  LinkedHashMap<String, ColumnInfo> columnInfoMap =
      entityMappingHolder.columnInfoPerTable.get(tableName);

  StringBuilder sqlBuilder = new StringBuilder("UPDATE " + tableName + " SET ");
  String delim = "";
  LinkedHashMap<String, Object> parameterMap = new LinkedHashMap<>();
  for (ColumnInfo columnInfo : columnInfoMap.values()) {
    String columnNameInDB = columnInfo.columnNameInDB;
    if (!columnNameInDB.equalsIgnoreCase(BASE_ID)
        && !AUTO_UPDATE_COLUMN_SET.contains(columnNameInDB)) {
      Object val = columnInfo.field.get(entity);
      if (val != null) {
        if (Enum.class.isAssignableFrom(val.getClass())) {
          val = val.toString();
        }
        sqlBuilder.append(delim);
        sqlBuilder.append(columnNameInDB);
        sqlBuilder.append("=");
        sqlBuilder.append("?");
        delim = ",";
        parameterMap.put(columnNameInDB, val);
      }
    }
  }
  //ADD WHERE CLAUSE TO CHECK FOR ENTITY ID
  sqlBuilder.append(" WHERE base_id=?");
  parameterMap.put(BASE_ID, entity.getBaseId());
  LOG.debug("Update statement:{}" , sqlBuilder);
  int parameterIndex = 1;
  PreparedStatement prepareStatement = connection.prepareStatement(sqlBuilder.toString());
  for (Entry<String, Object> paramEntry : parameterMap.entrySet()) {
    String dbFieldName = paramEntry.getKey();
    ColumnInfo info = columnInfoMap.get(dbFieldName);
    LOG.debug("Setting value: {} for {}", paramEntry.getValue(), dbFieldName);
    prepareStatement.setObject(parameterIndex++, paramEntry.getValue(), info.sqlType);
  }
  return prepareStatement;
}
 
Example 19
Source File: ServerStatistics.java    From iaf with Apache License 2.0 4 votes vote down vote up
@PUT
@RolesAllowed({"IbisAdmin", "IbisTester"})
@Path("/server/log")
@Consumes(MediaType.APPLICATION_JSON)
@Produces(MediaType.APPLICATION_JSON)
public Response updateLogConfiguration(LinkedHashMap<String, Object> json) throws ApiException {

	Level loglevel = null;
	Boolean logIntermediaryResults = true;
	int maxMessageLength = -1;
	Boolean enableDebugger = null;
	StringBuilder msg = new StringBuilder();

	Logger rootLogger = LogUtil.getRootLogger();

	for (Entry<String, Object> entry : json.entrySet()) {
		String key = entry.getKey();
		Object value = entry.getValue();
		if(key.equalsIgnoreCase("loglevel")) {
			loglevel = Level.toLevel(""+value);
		}
		else if(key.equalsIgnoreCase("logIntermediaryResults")) {
			logIntermediaryResults = Boolean.parseBoolean(""+value);
		}
		else if(key.equalsIgnoreCase("maxMessageLength")) {
			maxMessageLength = Integer.parseInt(""+value);
		}
		else if(key.equalsIgnoreCase("enableDebugger")) {
			enableDebugger = Boolean.parseBoolean(""+value);
		}
	}

	if(loglevel != null && rootLogger.getLevel() != loglevel) {
		Configurator.setLevel(rootLogger.getName(), loglevel);
		msg.append("LogLevel changed from [" + rootLogger.getLevel() + "] to [" + loglevel +"]");
	}

	boolean logIntermediary = AppConstants.getInstance().getBoolean("log.logIntermediaryResults", true);
	if(logIntermediary != logIntermediaryResults) {
		AppConstants.getInstance().put("log.logIntermediaryResults", "" + logIntermediaryResults);

		if(msg.length() > 0)
			msg.append(", logIntermediaryResults from [" + logIntermediary+ "] to [" + logIntermediaryResults + "]");
		else
			msg.append("logIntermediaryResults changed from [" + logIntermediary+ "] to [" + logIntermediaryResults + "]");
	}

	if (maxMessageLength != IbisMaskingLayout.getMaxLength()) {
		if(msg.length() > 0)
			msg.append(", logMaxMessageLength from [" + IbisMaskingLayout.getMaxLength() + "] to [" + maxMessageLength + "]");
		else
			msg.append("logMaxMessageLength changed from [" + IbisMaskingLayout.getMaxLength() + "] to [" + maxMessageLength + "]");
		IbisMaskingLayout.setMaxLength(maxMessageLength);
	}

	if (enableDebugger!=null) {
		boolean testtoolEnabled=AppConstants.getInstance().getBoolean("testtool.enabled", true);
		if (testtoolEnabled!=enableDebugger) {
			AppConstants.getInstance().put("testtool.enabled", "" + enableDebugger);
			DebuggerStatusChangedEvent event = new DebuggerStatusChangedEvent(this, enableDebugger);
			ApplicationEventPublisher applicationEventPublisher = getIbisManager().getApplicationEventPublisher();
			if (applicationEventPublisher!=null) {
				log.info("setting debugger enabled ["+enableDebugger+"]");
				applicationEventPublisher.publishEvent(event);
			} else {
				log.warn("no applicationEventPublisher, cannot set debugger enabled ["+enableDebugger+"]");
			}
		}
		}
	
	
	if(msg.length() > 0) {
		log.warn(msg.toString());
		LogUtil.getLogger("SEC").info(msg.toString());
	}

	return Response.status(Response.Status.NO_CONTENT).build();
}
 
Example 20
Source File: ShowScheduler.java    From iaf with Apache License 2.0 4 votes vote down vote up
@PUT
@RolesAllowed({"IbisDataAdmin", "IbisAdmin", "IbisTester"})
@Path("/schedules/{groupName}/job/{jobName}")
@Relation("schedules")
@Consumes(MediaType.APPLICATION_JSON)
@Produces(MediaType.APPLICATION_JSON)
public Response trigger(@PathParam("jobName") String jobName, @PathParam("groupName") String groupName, LinkedHashMap<String, Object> json) throws ApiException {
	Scheduler scheduler = getScheduler();

	String commandIssuedBy = servletConfig.getInitParameter("remoteHost");
	commandIssuedBy += servletConfig.getInitParameter("remoteAddress");
	commandIssuedBy += servletConfig.getInitParameter("remoteUser");

	if(log.isInfoEnabled()) log.info("trigger job jobName [" + jobName + "] groupName [" + groupName + "] " + commandIssuedBy);
	JobKey jobKey = JobKey.jobKey(jobName, groupName);

	String action = ""; //PAUSE,RESUME,TRIGGER

	for (Entry<String, Object> entry : json.entrySet()) {
		String key = entry.getKey();
		if(key.equalsIgnoreCase("action")) {//Start or stop an adapter!
			action = (String) entry.getValue();
		}
	}

	try {
		if("pause".equals(action)) {
			scheduler.pauseJob(jobKey);
		}
		else if("resume".equals(action)) {
			scheduler.resumeJob(jobKey);
		}
		else if("trigger".equals(action)) {
			scheduler.triggerJob(jobKey);
		}
		else {
			throw new ApiException("no (valid) action provided! Expected one of PAUSE,RESUME,TRIGGER");
		}
	} catch (SchedulerException e) {
		throw new ApiException("Failed to "+action+" job", e); 
	}

	return Response.status(Response.Status.OK).build();
}