Java Code Examples for gnu.trove.map.hash.TObjectIntHashMap#put()

The following examples show how to use gnu.trove.map.hash.TObjectIntHashMap#put() . You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: DataAccessKeyValueStore.java    From ambiverse-nlu with Apache License 2.0 6 votes vote down vote up
public TObjectIntHashMap<String> getAllWordIds() throws EntityLinkingDataAccessException {
  TObjectIntHashMap<String> wordIds;
  try {
    wordIds = new TObjectIntHashMap<>();
    DatabaseKeyValueStore db = DatabaseKeyValueStore.WORD_IDS_WORD;
    if(db.getPartitions() != 1) {
      throw new IllegalArgumentException("Multiple partitions not supported for this key-value store");
    }
    Codec codec = DataAccessKeyValueStoreHandler.singleton().getCodec(db);
    KeyValueStore<byte[], byte[]> keyValueStore = DataAccessKeyValueStoreHandler.singleton().getKeyValueStore(db);
    KeyValueStore.EntryIterator it = keyValueStore.entryIterator();
    while (it.hasNext()) {
      KeyValueStore.Entry<byte[], byte[]> entry = it.next();
      String key = (String) codec.decodeKey(entry.getKey());
      Integer value = (Integer) codec.decodeValue(entry.getValue());
      wordIds.put(key, value);
    }
  } catch (Exception e) {
    throw new EntityLinkingDataAccessException(e);
  }
  return wordIds;
}
 
Example 2
Source File: TriTyperExpressionData.java    From systemsgenetics with GNU General Public License v3.0 6 votes vote down vote up
public void pruneAndReorderSamples(List<String> colObjects) {
	double[][] newMatrix = new double[matrix.length][colObjects.size()];
	TObjectIntHashMap<String> indToInd = new TObjectIntHashMap<String>(colObjects.size(), 1f, -9);
	for (int i = 0; i < colObjects.size(); i++) {
		String ind = colObjects.get(i);
		indToInd.put(ind, i);
	}
	
	for (int i = 0; i < individuals.length; i++) {
		Integer newId = indToInd.get(individuals[i]);
		if (newId != -9) {
			for (int row = 0; row < matrix.length; row++) {
				newMatrix[row][newId] = matrix[row][i];
			}
		}
	}
	
	matrix = newMatrix;
	individualNameToId = indToInd;
	individuals = colObjects.toArray(new String[0]);
}
 
Example 3
Source File: InitialAlgebra.java    From CQL with GNU Affero General Public License v3.0 5 votes vote down vote up
private boolean add(En en, Term<Void, En, Void, Fk, Void, Gen, Void> term) {
	int x = nf0(en, term);
	if (x != -1) {
		return false;
	}
	x = fresh++;

	nfs.put(term, x);
	ens.get(en).add(x);
	reprs.put(x, term);

	TObjectIntHashMap<Fk> map = new TObjectIntHashMap<>(16, .75f, -1);
	for (Fk fk : schema().fksFrom(en)) {
		En e = schema().fks.get(fk).second;
		Term<Void, En, Void, Fk, Void, Gen, Void> z = Term.Fk(fk, term);
		add(e, z);
		map.put(fk, nf0(e, z));
	}
	fks.put(x, map);

	if (fresh % 10000 == 0) {
		if (Thread.currentThread().isInterrupted() ) {
			
			throw new IgnoreException();
		}
	}
	return true;
}
 
Example 4
Source File: DictionaryBuilder.java    From ambiverse-nlu with Apache License 2.0 5 votes vote down vote up
private void computeEntityIds(Map<String, TObjectIntHashMap<DictionaryEntity>> dictionaryEntries, TObjectIntHashMap<KBIdentifiedEntity> entityIds) {
  int entityId = entityIds.size() + 1;
  // Get int ids for entities and mentions, starting with 1.
  for (Entry<String, TObjectIntHashMap<DictionaryEntity>> e : dictionaryEntries.entrySet()) {
    // All entities are added as-is.
    for (DictionaryEntity entity : e.getValue().keySet()) {
      if (!entityIds.containsKey(KBIdentifiedEntity.getKBIdentifiedEntity(entity.entity, entity.knowledgebase))) {
        entityIds.put(KBIdentifiedEntity.getKBIdentifiedEntity(entity.entity, entity.knowledgebase), entityId++);
      }
    }
  }
}
 
Example 5
Source File: DataAccessForTesting.java    From ambiverse-nlu with Apache License 2.0 5 votes vote down vote up
@Override public TObjectIntHashMap<KBIdentifiedEntity> getInternalIdsForKBEntities(Collection<KBIdentifiedEntity> entities) {
  TObjectIntHashMap<KBIdentifiedEntity> ids = new TObjectIntHashMap<KBIdentifiedEntity>(entities.size());
  for (KBIdentifiedEntity entity : entities) {
    if (entity2id.containsKey(entity.getIdentifier())) {
      ids.put(entity, entity2id.get(entity.getIdentifier()));
    }
  }
  return ids;
}
 
Example 6
Source File: DataAccessForTesting.java    From ambiverse-nlu with Apache License 2.0 5 votes vote down vote up
@Override public TObjectIntHashMap<String> getIdsForWords(Collection<String> words) {
  TObjectIntHashMap<String> ids = new TObjectIntHashMap<String>(words.size());
  for (String word : words) {
    int id = ids.getNoEntryValue();
    if (word2id.containsKey(word)) {
      id = word2id.get(word);
    }
    ids.put(word, id);
  }
  return ids;
}
 
Example 7
Source File: DataAccessKeyValueStore.java    From ambiverse-nlu with Apache License 2.0 5 votes vote down vote up
public TObjectIntHashMap<String> getIdsForWords(Collection<String> keywords) throws EntityLinkingDataAccessException {
  logger.debug("Getting ids for words.");
  TObjectIntHashMap<String> wordIds = new TObjectIntHashMap<>(getCapacity(keywords.size()), troveLoadFactor);
  if (keywords.isEmpty()) {
    return wordIds;
  }
  try {
    DatabaseKeyValueStore db = DatabaseKeyValueStore.WORD_IDS_WORD;
    if(db.getPartitions() != 1) {
      throw new IllegalArgumentException("Multiple partitions not supported for this key-value store");
    }

    logger.debug("Creating codec.");
    Codec codec = DataAccessKeyValueStoreHandler.singleton().getCodec(db);

    logger.debug("Creating key-value store for db {}.", db.getName());
    KeyValueStore<byte[], byte[]> keyValueStore = DataAccessKeyValueStoreHandler.singleton().getKeyValueStore(db);

    logger.debug("Key-value store returned.");
    List<byte[]> encodedKeys = new ArrayList<>();
    for (String word : keywords) {
      logger.debug("Encoding keys for word {}.", word);
      encodedKeys.add(codec.encodeKey(word));
    }
    Map<byte[], byte[]> keyValueMap = keyValueStore.getAll(encodedKeys);

    for (Map.Entry<byte[], byte[]> entry : keyValueMap.entrySet()) {
      if (entry.getKey() == null || entry.getValue() == null) continue;
      wordIds.put((String) codec.decodeKey(entry.getKey()), (int) codec.decodeValue(entry.getValue()));
    }
  } catch (Exception e) {
    e.printStackTrace();
    throw new EntityLinkingDataAccessException(e);
  }
  logger.debug("Word ids fetched.");
  return wordIds;
}
 
Example 8
Source File: StrippedPartition.java    From metanome-algorithms with Apache License 2.0 5 votes vote down vote up
public StrippedPartition(String[] columnContent) {
	TObjectIntHashMap<String> valueToIndex = new TObjectIntHashMap<>();
	LinkedHashMap<Integer, TEquivalence> helpMap = new LinkedHashMap<>();
	
	for (int rowIndex = 0; rowIndex < columnContent.length; rowIndex++) {
		String value = columnContent[rowIndex];
		// if the value wasn't there yet, the row index becomes the representative 
		// for that equivalence class
		if (!valueToIndex.containsKey(value)) {
			valueToIndex.put(value, rowIndex);
			TEquivalence equivalenceGroup = new EquivalenceGroupTIntHashSet();
			equivalenceGroup.add(rowIndex);
			helpMap.put(Integer.valueOf(rowIndex), equivalenceGroup);
		} 
		// otherwise find the right equivalence class and add the current element index
		else {
			int equivalenceGroupIndex = valueToIndex.get(value);
			TEquivalence equivalenceClass = helpMap.get(Integer.valueOf(equivalenceGroupIndex));
			equivalenceClass.add(rowIndex);
		}
	}
	// remove equivalence classes with only one element
	for(Iterator<Map.Entry<Integer, TEquivalence>> it=helpMap.entrySet().iterator(); it.hasNext();) {
	     Map.Entry<Integer, TEquivalence> entry = it.next();
	     if (entry.getValue().size() <= 1) {
	          it.remove();
	     }
	 }

	// sort the stripped partition by equivalence group sizes
	this.addAll(helpMap.values());
}
 
Example 9
Source File: BinaryMetaAnalysis.java    From systemsgenetics with GNU General Public License v3.0 5 votes vote down vote up
protected void loadSNPAnnotation() throws IOException {

		snpChr = new String[snpList.length];
		snpPositions = new int[snpList.length];
		for (int s = 0; s < snpList.length; s++) {
			snpChr[s] = "-10".intern();
			snpPositions[s] = -10;
		}

		TObjectIntHashMap<String> snpMap = new TObjectIntHashMap<String>(snpList.length);
		for (int s = 0; s < snpList.length; s++) {
			snpMap.put(snpList[s], s);
		}

		// loads only annotation for snps that are in the datasets..
		TextFile tf = new TextFile(settings.getSNPAnnotationFile(), TextFile.R, 10 * 1048576);
		String[] elems = tf.readLineElems(TextFile.tab);

		while (elems != null) {
			if (elems.length > 2) {
				String snp = elems[2];
				if (snpMap.contains(snp)) {
					int id = snpMap.get(snp);
					snpChr[id] = new String(elems[0].getBytes("UTF-8")).intern();
					snpPositions[id] = Integer.parseInt(elems[1]);
				}
			}
			elems = tf.readLineElems(TextFile.tab);
		}

		tf.close();

	}
 
Example 10
Source File: BinaryMicrobePcaAnalysis.java    From systemsgenetics with GNU General Public License v3.0 5 votes vote down vote up
private void loadSNPAnnotation() throws IOException {
	
	snpChr = new String[snpList.length];
	snpPositions = new int[snpList.length];
	for (int s = 0; s < snpList.length; s++) {
		snpChr[s] = "-10".intern();
		snpPositions[s] = -10;
	}
	
	TObjectIntHashMap<String> snpMap = new TObjectIntHashMap<String>(snpList.length);
	for (int s = 0; s < snpList.length; s++) {
		snpMap.put(snpList[s], s);
	}
	TextFile tf = new TextFile(settings.getSNPAnnotationFile(), TextFile.R);
	
	String[] elems = tf.readLineElems(TextFile.tab);
	while (elems != null) {
		String snp = elems[2];
		if (snpMap.contains(snp)) {
			int id = snpMap.get(snp);
			snpChr[id] = new String(elems[0].getBytes("UTF-8")).intern();
			snpPositions[id] = Integer.parseInt(elems[1]);
		}
		elems = tf.readLineElems(TextFile.tab);
	}
	tf.close();
	
}
 
Example 11
Source File: STLMeshDecoder.java    From cineast with MIT License 4 votes vote down vote up
/**
 * Reads an ASCII STL file.
 *
 * @param is InputStream to read from.
 * @return Mesh
 * @throws IOException If an error occurs during reading.
 */
private Mesh readAscii(InputStream is)  throws IOException {
    BufferedReader br = new BufferedReader(new InputStreamReader(is));
    String line = null;

    /* Prepare empty mesh. */
    Mesh mesh = new Mesh(100,100);

    /* Prepare helper structures. */
    TObjectIntHashMap<Vector3f> vertexBuffer = new TObjectIntHashMap<>();
    int index = 0;
    int[] vertexindices = new int[3];

    while ((line = br.readLine()) != null && !line.startsWith("endsolid")) {
        line = line.trim();

        /* Detect end of STL file. */
        if (line.startsWith("endsolid")) {
          break;
        }

        /* Detect begin of facet. */
        if (line.startsWith("facet normal ")) {
            int vidx = 0;

            while ((line = br.readLine()) != null) {

                line = line.trim(); /* Trim line. */

                /* Detect end of facet. */
                if (line.equals("endfacet")) {
                  break;
                }

                /* Detect vertex. */
                if (line.startsWith("vertex")) {
                    String[] splitVertex = line.split("\\s+");
                    Vector3f vertex = new Vector3f(Float.parseFloat(splitVertex[1]),Float.parseFloat(splitVertex[2]), Float.parseFloat(splitVertex[3]));
                    if (!vertexBuffer.containsKey(vertex)) {
                        mesh.addVertex(vertex);
                        vertexBuffer.put(vertex, index);
                        index++;
                    }
                    vertexindices[vidx] = vertexBuffer.get(vertex);
                    vidx++;
                }
            }

             /* Add a new face to the Mesh. */
            mesh.addFace(new Vector3i(vertexindices[0], vertexindices[1], vertexindices[2]));
        }
    }

    /* Close the buffered reader. */
    br.close();

    /* This covers the case, where the file starts with 'solid ' but is not an ASCII file. Unfortunately, such files do exist. */
    if (mesh.numberOfVertices() == 0) {
        LOGGER.warn("The provided ASCII STL file does not seem to contain any normals or vertices. Trying to decode it as binary STL even though it was marked as being ASCII.");
        InputStream newIs = Files.newInputStream(this.inputFile);
        return this.readBinary(newIs, 80);
    } else {
        return mesh;
    }
}
 
Example 12
Source File: STLMeshDecoder.java    From cineast with MIT License 4 votes vote down vote up
/**
 * Reads a binary STL file.
 *
 * @param is InputStream to read from.
 * @param skip Number of bytes to skip before reading the STL file.
 * @return Mesh
 * @throws IOException If an error occurs during reading.
 */
private Mesh readBinary(InputStream is, int skip) throws IOException {
    /* Prepare a ByteBuffer to read the rest of the STL file. */
    byte[] bytes = new byte[50];
    ByteBuffer buffer = ByteBuffer.wrap(bytes);
    buffer.order(ByteOrder.LITTLE_ENDIAN);

    /* Skip the STL header! */
    is.skip(skip);

    /* Read the bytes for the size (unsigned 32 bit int, little-endian). */
    byte[] sizeBytes = new byte[4];
    is.read(sizeBytes, 0, 4);
    long triangles = ((sizeBytes[0] & 0xFF)) | ((sizeBytes[1] & 0xFF) << 8) | ((sizeBytes[2] & 0xFF) << 16) | ((sizeBytes[3] & 0xFF) << 24);

    /* TODO: Properly handle models whose triangles > MAX_TRIANGLES. */
    if (triangles <= 0) {
        LOGGER.error("The number of triangles in the Mesh seems to be smaller than zero. This STL file is probably corrupt!");
        return null;
    } else if (triangles > MAX_TRIANGLES) {
        LOGGER.error("The number of triangles in the Mesh exceeds the limit that can currently be processed by STLMeshDecoder. The Mesh will be downsampled!");
        return null;
    }

    /* Prepare Mesh. */
    Mesh mesh = new Mesh((int)triangles, (int)triangles);

    /* Prepare helper structures. */
    TObjectIntHashMap<Vector3f> vertexBuffer = new TObjectIntHashMap<>();
    int index = 0;
    int[] vertexindices = new int[3];

    /* Now add all triangles. */
    for (int i=0; i<triangles; i++) {
        /* Ready 48 bytes from the stream. */
        buffer.rewind();
        is.read(bytes);

        /* Read and ignore three floats. */
        buffer.getFloat();
        buffer.getFloat();
        buffer.getFloat();

        /* Add the vertices and the vertex-normal to the mesh. */
        for (int vidx = 0; vidx < 3; vidx++) {
            Vector3f vertex = new Vector3f(buffer.getFloat(), buffer.getFloat(), buffer.getFloat());
            if (!vertexBuffer.containsKey(vertex)) {
                mesh.addVertex(vertex);
                vertexBuffer.put(vertex, index);
                index++;
            }
            vertexindices[vidx] = vertexBuffer.get(vertex);
        }

        /* Add a new face to the Mesh. */
        if (!mesh.addFace(new Vector3i(vertexindices[0], vertexindices[1], vertexindices[2]))) {
            LOGGER.warn("Could not add face {}/{}/{} because index points to non-existing vertex.", vertexindices[0], vertexindices[1], vertexindices[2]);
        }
    }

    /* Closes the InputStream. */
    is.close();
    return mesh;
}
 
Example 13
Source File: MeshParser.java    From cineast with MIT License 4 votes vote down vote up
/**
   * Parses a Base64 encoded data url and treats it as Geometry JSON used by the Three.js JavaScript library.
   * Tries to parse the structure into a 3D mesh.
   *
   * @param dataUrl Data URL that should be parsed.
   * @return Mesh, if parsing fails that Mesh will be empty!
   */
  public static Mesh parseThreeJSV4Geometry(String dataUrl) {
/* Convert Base64 string into byte array. */
      byte[] bytes = dataURLtoByteArray(dataUrl, MIME_TYPE);

      ObjectMapper mapper = new ObjectMapper();
      try {
          /* Read the JSON structure of the transmitted mesh data. */
          JsonNode node = mapper.readTree(bytes);
          JsonNode vertices = node.get(VERTICES_PROPERTY_NAME_THREEV4);
          if (vertices == null || !vertices.isArray() || vertices.size() == 0)  {
              LOGGER.error("Submitted mesh does not contain any vertices. Aborting...");
              return Mesh.EMPTY;
          }

          /* Create new Mesh. */
          Mesh mesh = new Mesh(vertices.size()/9, vertices.size()/3);

          /* Prepare helper structures. */
          TObjectIntHashMap<Vector3f> vertexBuffer = new TObjectIntHashMap<>();
          int index = 0;
          int[] vertexindices = new int[3];

          /* Add all the vertices and normals in the structure. */
          for (int i=0; i<=vertices.size()-9; i+=9) {
              for (int j=0; j<3; j++) {
                  int idx = i + 3*j;
                  Vector3f vertex = new Vector3f((float)vertices.get(idx).asDouble(), (float)vertices.get(idx+1).asDouble(),(float)vertices.get(idx+2).asDouble());
                  if (!vertexBuffer.containsKey(vertex)) {
                      vertexBuffer.put(vertex, index++);
                      mesh.addVertex(vertex);
                  }
                  vertexindices[j] = vertexBuffer.get(vertex);
              }

              mesh.addFace(new Vector3i(vertexindices[0], vertexindices[1], vertexindices[2]));
          }

          return mesh;
      } catch (IOException e) {
          LOGGER.error("Could not create 3d mesh from Base64 input because the file-format is not supported. {}", LogHelper.getStackTrace(e));
          return Mesh.EMPTY;
      }
  }