Java Code Examples for gnu.trove.map.hash.TObjectIntHashMap#get()

The following examples show how to use gnu.trove.map.hash.TObjectIntHashMap#get() . You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: TriTyperExpressionData.java    From systemsgenetics with GNU General Public License v3.0 6 votes vote down vote up
public void pruneAndReorderSamples(List<String> colObjects) {
	double[][] newMatrix = new double[matrix.length][colObjects.size()];
	TObjectIntHashMap<String> indToInd = new TObjectIntHashMap<String>(colObjects.size(), 1f, -9);
	for (int i = 0; i < colObjects.size(); i++) {
		String ind = colObjects.get(i);
		indToInd.put(ind, i);
	}
	
	for (int i = 0; i < individuals.length; i++) {
		Integer newId = indToInd.get(individuals[i]);
		if (newId != -9) {
			for (int row = 0; row < matrix.length; row++) {
				newMatrix[row][newId] = matrix[row][i];
			}
		}
	}
	
	matrix = newMatrix;
	individualNameToId = indToInd;
	individuals = colObjects.toArray(new String[0]);
}
 
Example 2
Source File: InputTextWrapper.java    From ambiverse-nlu with Apache License 2.0 5 votes vote down vote up
public InputTextWrapper(Context context, UnitType unitType, boolean removeStopwords) throws EntityLinkingDataAccessException {
  logger.debug("Wrapping input text.");
  mentionToIgnore = null;
  this.unitType = unitType;
  int unitLength = unitType.getUnitSize();
  if (context.getTokenCount() < unitLength) return;
  List<String> unitStrings = new ArrayList<>(context.getTokenCount());
  Queue<String> curTokens = new ArrayDeque<>(unitLength);
  String[] curTokensArray = new String[unitLength];
  for (String token : context.getTokens()) {
    curTokens.add(token);
    if (curTokens.size() == unitLength || (!curTokens.isEmpty() && curTokens.size() - 1 == unitLength)) {
      unitStrings.add(UnitBuilder.buildUnit(curTokens.toArray(curTokensArray)));
      curTokens.remove();
    }
  }

  logger.debug("Get ids for words.");
  TObjectIntHashMap<String> wordIds = DataAccess.getIdsForWords(unitStrings);
  units = new int[unitStrings.size()];
  unitCounts = new TIntIntHashMap((int) (wordIds.size() / Constants.DEFAULT_LOAD_FACTOR), Constants.DEFAULT_LOAD_FACTOR);
  numOfUnits = 0;
  for (int i = 0; i < unitStrings.size(); i++) {
    int unitId = wordIds.get(unitStrings.get(i));
    if (unitId == 0) continue;

    logger.debug("Get contract term for unit id {}.", unitId);
    int contractedUnitId = DataAccess.contractTerm(unitId);
    if (contractedUnitId != 0) unitId = contractedUnitId;
    if (removeStopwords && StopWord.isStopwordOrSymbol(unitId, Language.getLanguageForString("en")))  continue;
    units[i] = unitId;
    unitCounts.adjustOrPutValue(unitId, 1, 1);
    numOfUnits++;
  }
}
 
Example 3
Source File: StrippedPartition.java    From metanome-algorithms with Apache License 2.0 5 votes vote down vote up
public StrippedPartition(String[] columnContent) {
	TObjectIntHashMap<String> valueToIndex = new TObjectIntHashMap<>();
	LinkedHashMap<Integer, TEquivalence> helpMap = new LinkedHashMap<>();
	
	for (int rowIndex = 0; rowIndex < columnContent.length; rowIndex++) {
		String value = columnContent[rowIndex];
		// if the value wasn't there yet, the row index becomes the representative 
		// for that equivalence class
		if (!valueToIndex.containsKey(value)) {
			valueToIndex.put(value, rowIndex);
			TEquivalence equivalenceGroup = new EquivalenceGroupTIntHashSet();
			equivalenceGroup.add(rowIndex);
			helpMap.put(Integer.valueOf(rowIndex), equivalenceGroup);
		} 
		// otherwise find the right equivalence class and add the current element index
		else {
			int equivalenceGroupIndex = valueToIndex.get(value);
			TEquivalence equivalenceClass = helpMap.get(Integer.valueOf(equivalenceGroupIndex));
			equivalenceClass.add(rowIndex);
		}
	}
	// remove equivalence classes with only one element
	for(Iterator<Map.Entry<Integer, TEquivalence>> it=helpMap.entrySet().iterator(); it.hasNext();) {
	     Map.Entry<Integer, TEquivalence> entry = it.next();
	     if (entry.getValue().size() <= 1) {
	          it.remove();
	     }
	 }

	// sort the stripped partition by equivalence group sizes
	this.addAll(helpMap.values());
}
 
Example 4
Source File: CauldronHooks.java    From Thermos with GNU General Public License v3.0 5 votes vote down vote up
private static <T> void writeChunkCounts(JsonWriter writer, String name, final TObjectIntHashMap<T> map, int max) throws IOException
{
    List<T> sortedCoords = new ArrayList<T>(map.keySet());
    Collections.sort(sortedCoords, new Comparator<T>()
    {
        @Override
        public int compare(T s1, T s2)
        {
            return map.get(s2) - map.get(s1);
        }
    });

    int i = 0;
    writer.name(name).beginArray();
    for (T key : sortedCoords)
    {
        if ((max > 0) && (i++ > max))
        {
            break;
        }
        if (map.get(key) < 5)
        {
            continue;
        }
        writer.beginObject();
        writer.name("key").value(key.toString());
        writer.name("count").value(map.get(key));
        writer.endObject();
    }
    writer.endArray();
}
 
Example 5
Source File: BinaryMetaAnalysis.java    From systemsgenetics with GNU General Public License v3.0 5 votes vote down vote up
protected void loadSNPAnnotation() throws IOException {

		snpChr = new String[snpList.length];
		snpPositions = new int[snpList.length];
		for (int s = 0; s < snpList.length; s++) {
			snpChr[s] = "-10".intern();
			snpPositions[s] = -10;
		}

		TObjectIntHashMap<String> snpMap = new TObjectIntHashMap<String>(snpList.length);
		for (int s = 0; s < snpList.length; s++) {
			snpMap.put(snpList[s], s);
		}

		// loads only annotation for snps that are in the datasets..
		TextFile tf = new TextFile(settings.getSNPAnnotationFile(), TextFile.R, 10 * 1048576);
		String[] elems = tf.readLineElems(TextFile.tab);

		while (elems != null) {
			if (elems.length > 2) {
				String snp = elems[2];
				if (snpMap.contains(snp)) {
					int id = snpMap.get(snp);
					snpChr[id] = new String(elems[0].getBytes("UTF-8")).intern();
					snpPositions[id] = Integer.parseInt(elems[1]);
				}
			}
			elems = tf.readLineElems(TextFile.tab);
		}

		tf.close();

	}
 
Example 6
Source File: BinaryMicrobePcaAnalysis.java    From systemsgenetics with GNU General Public License v3.0 5 votes vote down vote up
private void loadSNPAnnotation() throws IOException {
	
	snpChr = new String[snpList.length];
	snpPositions = new int[snpList.length];
	for (int s = 0; s < snpList.length; s++) {
		snpChr[s] = "-10".intern();
		snpPositions[s] = -10;
	}
	
	TObjectIntHashMap<String> snpMap = new TObjectIntHashMap<String>(snpList.length);
	for (int s = 0; s < snpList.length; s++) {
		snpMap.put(snpList[s], s);
	}
	TextFile tf = new TextFile(settings.getSNPAnnotationFile(), TextFile.R);
	
	String[] elems = tf.readLineElems(TextFile.tab);
	while (elems != null) {
		String snp = elems[2];
		if (snpMap.contains(snp)) {
			int id = snpMap.get(snp);
			snpChr[id] = new String(elems[0].getBytes("UTF-8")).intern();
			snpPositions[id] = Integer.parseInt(elems[1]);
		}
		elems = tf.readLineElems(TextFile.tab);
	}
	tf.close();
	
}
 
Example 7
Source File: UnitUtil.java    From ambiverse-nlu with Apache License 2.0 4 votes vote down vote up
public static int getUnitId(TIntList unitTokens, TIntObjectHashMap<String> id2word, TObjectIntHashMap<String> word2id) {
  if (unitTokens == null || unitTokens.size() == 0) return 0;
  if (unitTokens.size() == 1) return unitTokens.get(0);
  return word2id.get(UnitBuilder.buildUnit(unitTokens, id2word));
}
 
Example 8
Source File: STLMeshDecoder.java    From cineast with MIT License 4 votes vote down vote up
/**
 * Reads an ASCII STL file.
 *
 * @param is InputStream to read from.
 * @return Mesh
 * @throws IOException If an error occurs during reading.
 */
private Mesh readAscii(InputStream is)  throws IOException {
    BufferedReader br = new BufferedReader(new InputStreamReader(is));
    String line = null;

    /* Prepare empty mesh. */
    Mesh mesh = new Mesh(100,100);

    /* Prepare helper structures. */
    TObjectIntHashMap<Vector3f> vertexBuffer = new TObjectIntHashMap<>();
    int index = 0;
    int[] vertexindices = new int[3];

    while ((line = br.readLine()) != null && !line.startsWith("endsolid")) {
        line = line.trim();

        /* Detect end of STL file. */
        if (line.startsWith("endsolid")) {
          break;
        }

        /* Detect begin of facet. */
        if (line.startsWith("facet normal ")) {
            int vidx = 0;

            while ((line = br.readLine()) != null) {

                line = line.trim(); /* Trim line. */

                /* Detect end of facet. */
                if (line.equals("endfacet")) {
                  break;
                }

                /* Detect vertex. */
                if (line.startsWith("vertex")) {
                    String[] splitVertex = line.split("\\s+");
                    Vector3f vertex = new Vector3f(Float.parseFloat(splitVertex[1]),Float.parseFloat(splitVertex[2]), Float.parseFloat(splitVertex[3]));
                    if (!vertexBuffer.containsKey(vertex)) {
                        mesh.addVertex(vertex);
                        vertexBuffer.put(vertex, index);
                        index++;
                    }
                    vertexindices[vidx] = vertexBuffer.get(vertex);
                    vidx++;
                }
            }

             /* Add a new face to the Mesh. */
            mesh.addFace(new Vector3i(vertexindices[0], vertexindices[1], vertexindices[2]));
        }
    }

    /* Close the buffered reader. */
    br.close();

    /* This covers the case, where the file starts with 'solid ' but is not an ASCII file. Unfortunately, such files do exist. */
    if (mesh.numberOfVertices() == 0) {
        LOGGER.warn("The provided ASCII STL file does not seem to contain any normals or vertices. Trying to decode it as binary STL even though it was marked as being ASCII.");
        InputStream newIs = Files.newInputStream(this.inputFile);
        return this.readBinary(newIs, 80);
    } else {
        return mesh;
    }
}
 
Example 9
Source File: STLMeshDecoder.java    From cineast with MIT License 4 votes vote down vote up
/**
 * Reads a binary STL file.
 *
 * @param is InputStream to read from.
 * @param skip Number of bytes to skip before reading the STL file.
 * @return Mesh
 * @throws IOException If an error occurs during reading.
 */
private Mesh readBinary(InputStream is, int skip) throws IOException {
    /* Prepare a ByteBuffer to read the rest of the STL file. */
    byte[] bytes = new byte[50];
    ByteBuffer buffer = ByteBuffer.wrap(bytes);
    buffer.order(ByteOrder.LITTLE_ENDIAN);

    /* Skip the STL header! */
    is.skip(skip);

    /* Read the bytes for the size (unsigned 32 bit int, little-endian). */
    byte[] sizeBytes = new byte[4];
    is.read(sizeBytes, 0, 4);
    long triangles = ((sizeBytes[0] & 0xFF)) | ((sizeBytes[1] & 0xFF) << 8) | ((sizeBytes[2] & 0xFF) << 16) | ((sizeBytes[3] & 0xFF) << 24);

    /* TODO: Properly handle models whose triangles > MAX_TRIANGLES. */
    if (triangles <= 0) {
        LOGGER.error("The number of triangles in the Mesh seems to be smaller than zero. This STL file is probably corrupt!");
        return null;
    } else if (triangles > MAX_TRIANGLES) {
        LOGGER.error("The number of triangles in the Mesh exceeds the limit that can currently be processed by STLMeshDecoder. The Mesh will be downsampled!");
        return null;
    }

    /* Prepare Mesh. */
    Mesh mesh = new Mesh((int)triangles, (int)triangles);

    /* Prepare helper structures. */
    TObjectIntHashMap<Vector3f> vertexBuffer = new TObjectIntHashMap<>();
    int index = 0;
    int[] vertexindices = new int[3];

    /* Now add all triangles. */
    for (int i=0; i<triangles; i++) {
        /* Ready 48 bytes from the stream. */
        buffer.rewind();
        is.read(bytes);

        /* Read and ignore three floats. */
        buffer.getFloat();
        buffer.getFloat();
        buffer.getFloat();

        /* Add the vertices and the vertex-normal to the mesh. */
        for (int vidx = 0; vidx < 3; vidx++) {
            Vector3f vertex = new Vector3f(buffer.getFloat(), buffer.getFloat(), buffer.getFloat());
            if (!vertexBuffer.containsKey(vertex)) {
                mesh.addVertex(vertex);
                vertexBuffer.put(vertex, index);
                index++;
            }
            vertexindices[vidx] = vertexBuffer.get(vertex);
        }

        /* Add a new face to the Mesh. */
        if (!mesh.addFace(new Vector3i(vertexindices[0], vertexindices[1], vertexindices[2]))) {
            LOGGER.warn("Could not add face {}/{}/{} because index points to non-existing vertex.", vertexindices[0], vertexindices[1], vertexindices[2]);
        }
    }

    /* Closes the InputStream. */
    is.close();
    return mesh;
}
 
Example 10
Source File: MeshParser.java    From cineast with MIT License 4 votes vote down vote up
/**
   * Parses a Base64 encoded data url and treats it as Geometry JSON used by the Three.js JavaScript library.
   * Tries to parse the structure into a 3D mesh.
   *
   * @param dataUrl Data URL that should be parsed.
   * @return Mesh, if parsing fails that Mesh will be empty!
   */
  public static Mesh parseThreeJSV4Geometry(String dataUrl) {
/* Convert Base64 string into byte array. */
      byte[] bytes = dataURLtoByteArray(dataUrl, MIME_TYPE);

      ObjectMapper mapper = new ObjectMapper();
      try {
          /* Read the JSON structure of the transmitted mesh data. */
          JsonNode node = mapper.readTree(bytes);
          JsonNode vertices = node.get(VERTICES_PROPERTY_NAME_THREEV4);
          if (vertices == null || !vertices.isArray() || vertices.size() == 0)  {
              LOGGER.error("Submitted mesh does not contain any vertices. Aborting...");
              return Mesh.EMPTY;
          }

          /* Create new Mesh. */
          Mesh mesh = new Mesh(vertices.size()/9, vertices.size()/3);

          /* Prepare helper structures. */
          TObjectIntHashMap<Vector3f> vertexBuffer = new TObjectIntHashMap<>();
          int index = 0;
          int[] vertexindices = new int[3];

          /* Add all the vertices and normals in the structure. */
          for (int i=0; i<=vertices.size()-9; i+=9) {
              for (int j=0; j<3; j++) {
                  int idx = i + 3*j;
                  Vector3f vertex = new Vector3f((float)vertices.get(idx).asDouble(), (float)vertices.get(idx+1).asDouble(),(float)vertices.get(idx+2).asDouble());
                  if (!vertexBuffer.containsKey(vertex)) {
                      vertexBuffer.put(vertex, index++);
                      mesh.addVertex(vertex);
                  }
                  vertexindices[j] = vertexBuffer.get(vertex);
              }

              mesh.addFace(new Vector3i(vertexindices[0], vertexindices[1], vertexindices[2]));
          }

          return mesh;
      } catch (IOException e) {
          LOGGER.error("Could not create 3d mesh from Base64 input because the file-format is not supported. {}", LogHelper.getStackTrace(e));
          return Mesh.EMPTY;
      }
  }