Java Code Examples for org.apache.cassandra.io.sstable.Descriptor#filenameFor()
The following examples show how to use
org.apache.cassandra.io.sstable.Descriptor#filenameFor() .
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: MetadataSerializer.java From stratio-cassandra with Apache License 2.0 | 6 votes |
public Map<MetadataType, MetadataComponent> deserialize(Descriptor descriptor, EnumSet<MetadataType> types) throws IOException { Map<MetadataType, MetadataComponent> components; logger.debug("Load metadata for {}", descriptor); File statsFile = new File(descriptor.filenameFor(Component.STATS)); if (!statsFile.exists()) { logger.debug("No sstable stats for {}", descriptor); components = Maps.newHashMap(); components.put(MetadataType.STATS, MetadataCollector.defaultStatsMetadata()); } else { try (RandomAccessReader r = RandomAccessReader.open(statsFile)) { components = deserialize(descriptor, r, types); } } return components; }
Example 2
Source File: DataIntegrityMetadata.java From stratio-cassandra with Apache License 2.0 | 6 votes |
public void writeFullChecksum(Descriptor descriptor) { File outFile = new File(descriptor.filenameFor(Component.DIGEST)); BufferedWriter out = null; try { out = Files.newBufferedWriter(outFile.toPath(), Charsets.UTF_8); out.write(String.valueOf(fullChecksum.getValue())); } catch (IOException e) { throw new FSWriteError(e, outFile); } finally { FileUtils.closeQuietly(out); } }
Example 3
Source File: ColumnFamilyStore.java From stratio-cassandra with Apache License 2.0 | 5 votes |
private String getTempSSTablePath(File directory, Descriptor.Version version) { Descriptor desc = new Descriptor(version, directory, keyspace.getName(), name, fileIndexGenerator.incrementAndGet(), Descriptor.Type.TEMP); return desc.filenameFor(Component.DATA); }
Example 4
Source File: MetadataSerializer.java From stratio-cassandra with Apache License 2.0 | 5 votes |
private void rewriteSSTableMetadata(Descriptor descriptor, Map<MetadataType, MetadataComponent> currentComponents) throws IOException { Descriptor tmpDescriptor = descriptor.asType(Descriptor.Type.TEMP); try (DataOutputStreamAndChannel out = new DataOutputStreamAndChannel(new FileOutputStream(tmpDescriptor.filenameFor(Component.STATS)))) { serialize(currentComponents, out); out.flush(); } // we cant move a file on top of another file in windows: if (FBUtilities.isWindows()) FileUtils.delete(descriptor.filenameFor(Component.STATS)); FileUtils.renameWithConfirm(tmpDescriptor.filenameFor(Component.STATS), descriptor.filenameFor(Component.STATS)); }
Example 5
Source File: DirectoriesTest.java From stratio-cassandra with Apache License 2.0 | 5 votes |
private static void createFakeSSTable(File dir, String cf, int gen, boolean temp, List<File> addTo) throws IOException { Descriptor desc = new Descriptor(dir, KS, cf, gen, temp ? Descriptor.Type.TEMP : Descriptor.Type.FINAL); for (Component c : new Component[]{ Component.DATA, Component.PRIMARY_INDEX, Component.FILTER }) { File f = new File(desc.filenameFor(c)); f.createNewFile(); addTo.add(f); } }
Example 6
Source File: ColumnIndex.java From sasi with Apache License 2.0 | 4 votes |
public String getPath(Descriptor sstable) { return sstable.filenameFor(component); }
Example 7
Source File: LegacyMetadataSerializer.java From stratio-cassandra with Apache License 2.0 | 4 votes |
/** * Legacy serializer deserialize all components no matter what types are specified. */ @Override public Map<MetadataType, MetadataComponent> deserialize(Descriptor descriptor, EnumSet<MetadataType> types) throws IOException { Map<MetadataType, MetadataComponent> components = Maps.newHashMap(); File statsFile = new File(descriptor.filenameFor(Component.STATS)); if (!statsFile.exists() && types.contains(MetadataType.STATS)) { components.put(MetadataType.STATS, MetadataCollector.defaultStatsMetadata()); } else { try (DataInputStream in = new DataInputStream(new BufferedInputStream(new FileInputStream(statsFile)))) { EstimatedHistogram rowSizes = EstimatedHistogram.serializer.deserialize(in); EstimatedHistogram columnCounts = EstimatedHistogram.serializer.deserialize(in); ReplayPosition replayPosition = ReplayPosition.serializer.deserialize(in); long minTimestamp = in.readLong(); long maxTimestamp = in.readLong(); int maxLocalDeletionTime = in.readInt(); double bloomFilterFPChance = in.readDouble(); double compressionRatio = in.readDouble(); String partitioner = in.readUTF(); int nbAncestors = in.readInt(); Set<Integer> ancestors = new HashSet<>(nbAncestors); for (int i = 0; i < nbAncestors; i++) ancestors.add(in.readInt()); StreamingHistogram tombstoneHistogram = StreamingHistogram.serializer.deserialize(in); int sstableLevel = 0; if (in.available() > 0) sstableLevel = in.readInt(); int colCount = in.readInt(); List<ByteBuffer> minColumnNames = new ArrayList<>(colCount); for (int i = 0; i < colCount; i++) minColumnNames.add(ByteBufferUtil.readWithShortLength(in)); colCount = in.readInt(); List<ByteBuffer> maxColumnNames = new ArrayList<>(colCount); for (int i = 0; i < colCount; i++) maxColumnNames.add(ByteBufferUtil.readWithShortLength(in)); if (types.contains(MetadataType.VALIDATION)) components.put(MetadataType.VALIDATION, new ValidationMetadata(partitioner, bloomFilterFPChance)); if (types.contains(MetadataType.STATS)) components.put(MetadataType.STATS, new StatsMetadata(rowSizes, columnCounts, replayPosition, minTimestamp, maxTimestamp, maxLocalDeletionTime, compressionRatio, tombstoneHistogram, sstableLevel, minColumnNames, maxColumnNames, true, ActiveRepairService.UNREPAIRED_SSTABLE)); if (types.contains(MetadataType.COMPACTION)) components.put(MetadataType.COMPACTION, new CompactionMetadata(ancestors, null)); } } return components; }
Example 8
Source File: CompressionMetadata.java From stratio-cassandra with Apache License 2.0 | 2 votes |
/** * Create metadata about given compressed file including uncompressed data length, chunk size * and list of the chunk offsets of the compressed data. * * This is an expensive operation! Don't create more than one for each * sstable. * * @param dataFilePath Path to the compressed file * * @return metadata about given compressed file. */ public static CompressionMetadata create(String dataFilePath) { Descriptor desc = Descriptor.fromFilename(dataFilePath); return new CompressionMetadata(desc.filenameFor(Component.COMPRESSION_INFO), new File(dataFilePath).length(), desc.version.hasPostCompressionAdlerChecksums); }