org.apache.hadoop.fs.FileSystem.Statistics Java Examples
The following examples show how to use
org.apache.hadoop.fs.FileSystem.Statistics.
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: SwiftAPIClient.java From stocator with Apache License 2.0 | 7 votes |
/** * Direct HTTP PUT request without JOSS package * * @param objName name of the object * @param contentType content type * @return HttpURLConnection */ @Override public FSDataOutputStream createObject(String objName, String contentType, Map<String, String> metadata, Statistics statistics, boolean overwrite) throws IOException { final URL url = new URL(mJossAccount.getAccessURL() + "/" + getURLEncodedObjName(objName)); LOG.debug("PUT {}. Content-Type : {}", url.toString(), contentType); // When overwriting an object, cached metadata will be outdated String cachedName = getObjName(container + "/", objName); objectCache.remove(cachedName); try { final OutputStream sos; if (nonStreamingUpload) { sos = new SwiftNoStreamingOutputStream(mJossAccount, url, contentType, metadata, swiftConnectionManager, this); } else { sos = new SwiftOutputStream(mJossAccount, url, contentType, metadata, swiftConnectionManager); } return new FSDataOutputStream(sos, statistics); } catch (IOException e) { LOG.error(e.getMessage()); throw e; } }
Example #2
Source File: MapTask.java From hadoop with Apache License 2.0 | 6 votes |
@SuppressWarnings("unchecked") public void init(MapOutputCollector.Context context ) throws IOException, ClassNotFoundException { this.reporter = context.getReporter(); JobConf job = context.getJobConf(); String finalName = getOutputName(getPartition()); FileSystem fs = FileSystem.get(job); OutputFormat<K, V> outputFormat = job.getOutputFormat(); mapOutputRecordCounter = reporter.getCounter(TaskCounter.MAP_OUTPUT_RECORDS); fileOutputByteCounter = reporter .getCounter(FileOutputFormatCounter.BYTES_WRITTEN); List<Statistics> matchedStats = null; if (outputFormat instanceof FileOutputFormat) { matchedStats = getFsStatistics(FileOutputFormat.getOutputPath(job), job); } fsStats = matchedStats; long bytesOutPrev = getOutputBytes(fsStats); out = job.getOutputFormat().getRecordWriter(fs, job, finalName, reporter); long bytesOutCurr = getOutputBytes(fsStats); fileOutputByteCounter.increment(bytesOutCurr - bytesOutPrev); }
Example #3
Source File: MapTask.java From hadoop with Apache License 2.0 | 6 votes |
NewTrackingRecordReader(org.apache.hadoop.mapreduce.InputSplit split, org.apache.hadoop.mapreduce.InputFormat<K, V> inputFormat, TaskReporter reporter, org.apache.hadoop.mapreduce.TaskAttemptContext taskContext) throws InterruptedException, IOException { this.reporter = reporter; this.inputRecordCounter = reporter .getCounter(TaskCounter.MAP_INPUT_RECORDS); this.fileInputByteCounter = reporter .getCounter(FileInputFormatCounter.BYTES_READ); List <Statistics> matchedStats = null; if (split instanceof org.apache.hadoop.mapreduce.lib.input.FileSplit) { matchedStats = getFsStatistics(((org.apache.hadoop.mapreduce.lib.input.FileSplit) split) .getPath(), taskContext.getConfiguration()); } fsStats = matchedStats; long bytesInPrev = getInputBytes(fsStats); this.real = inputFormat.createRecordReader(split, taskContext); long bytesInCurr = getInputBytes(fsStats); fileInputByteCounter.increment(bytesInCurr - bytesInPrev); }
Example #4
Source File: FCStatisticsBaseTest.java From big-c with Apache License 2.0 | 6 votes |
@Test public void testStatistics() throws IOException, URISyntaxException { URI fsUri = getFsUri(); Statistics stats = FileContext.getStatistics(fsUri); Assert.assertEquals(0, stats.getBytesRead()); Path filePath = fileContextTestHelper .getTestRootPath(fc, "file1"); createFile(fc, filePath, numBlocks, blockSize); Assert.assertEquals(0, stats.getBytesRead()); verifyWrittenBytes(stats); FSDataInputStream fstr = fc.open(filePath); byte[] buf = new byte[blockSize]; int bytesRead = fstr.read(buf, 0, blockSize); fstr.read(0, buf, 0, blockSize); Assert.assertEquals(blockSize, bytesRead); verifyReadBytes(stats); verifyWrittenBytes(stats); verifyReadBytes(FileContext.getStatistics(getFsUri())); Map<URI, Statistics> statsMap = FileContext.getAllStatistics(); URI exactUri = getSchemeAuthorityUri(); verifyWrittenBytes(statsMap.get(exactUri)); fc.delete(filePath, true); }
Example #5
Source File: MapTask.java From hadoop with Apache License 2.0 | 6 votes |
TrackedRecordReader(TaskReporter reporter, JobConf job) throws IOException{ inputRecordCounter = reporter.getCounter(TaskCounter.MAP_INPUT_RECORDS); fileInputByteCounter = reporter.getCounter(FileInputFormatCounter.BYTES_READ); this.reporter = reporter; List<Statistics> matchedStats = null; if (this.reporter.getInputSplit() instanceof FileSplit) { matchedStats = getFsStatistics(((FileSplit) this.reporter .getInputSplit()).getPath(), job); } fsStats = matchedStats; bytesInPrev = getInputBytes(fsStats); rawIn = job.getInputFormat().getRecordReader(reporter.getInputSplit(), job, reporter); bytesInCurr = getInputBytes(fsStats); fileInputByteCounter.increment(bytesInCurr - bytesInPrev); }
Example #6
Source File: MapTask.java From hadoop with Apache License 2.0 | 6 votes |
@SuppressWarnings("unchecked") NewDirectOutputCollector(MRJobConfig jobContext, JobConf job, TaskUmbilicalProtocol umbilical, TaskReporter reporter) throws IOException, ClassNotFoundException, InterruptedException { this.reporter = reporter; mapOutputRecordCounter = reporter .getCounter(TaskCounter.MAP_OUTPUT_RECORDS); fileOutputByteCounter = reporter .getCounter(FileOutputFormatCounter.BYTES_WRITTEN); List<Statistics> matchedStats = null; if (outputFormat instanceof org.apache.hadoop.mapreduce.lib.output.FileOutputFormat) { matchedStats = getFsStatistics(org.apache.hadoop.mapreduce.lib.output.FileOutputFormat .getOutputPath(taskContext), taskContext.getConfiguration()); } fsStats = matchedStats; long bytesOutPrev = getOutputBytes(fsStats); out = outputFormat.getRecordWriter(taskContext); long bytesOutCurr = getOutputBytes(fsStats); fileOutputByteCounter.increment(bytesOutCurr - bytesOutPrev); }
Example #7
Source File: ReduceTask.java From big-c with Apache License 2.0 | 6 votes |
@SuppressWarnings("unchecked") NewTrackingRecordWriter(ReduceTask reduce, org.apache.hadoop.mapreduce.TaskAttemptContext taskContext) throws InterruptedException, IOException { this.outputRecordCounter = reduce.reduceOutputCounter; this.fileOutputByteCounter = reduce.fileOutputByteCounter; List<Statistics> matchedStats = null; if (reduce.outputFormat instanceof org.apache.hadoop.mapreduce.lib.output.FileOutputFormat) { matchedStats = getFsStatistics(org.apache.hadoop.mapreduce.lib.output.FileOutputFormat .getOutputPath(taskContext), taskContext.getConfiguration()); } fsStats = matchedStats; long bytesOutPrev = getOutputBytes(fsStats); this.real = (org.apache.hadoop.mapreduce.RecordWriter<K, V>) reduce.outputFormat .getRecordWriter(taskContext); long bytesOutCurr = getOutputBytes(fsStats); fileOutputByteCounter.increment(bytesOutCurr - bytesOutPrev); }
Example #8
Source File: ReduceTask.java From hadoop with Apache License 2.0 | 6 votes |
@SuppressWarnings("unchecked") NewTrackingRecordWriter(ReduceTask reduce, org.apache.hadoop.mapreduce.TaskAttemptContext taskContext) throws InterruptedException, IOException { this.outputRecordCounter = reduce.reduceOutputCounter; this.fileOutputByteCounter = reduce.fileOutputByteCounter; List<Statistics> matchedStats = null; if (reduce.outputFormat instanceof org.apache.hadoop.mapreduce.lib.output.FileOutputFormat) { matchedStats = getFsStatistics(org.apache.hadoop.mapreduce.lib.output.FileOutputFormat .getOutputPath(taskContext), taskContext.getConfiguration()); } fsStats = matchedStats; long bytesOutPrev = getOutputBytes(fsStats); this.real = (org.apache.hadoop.mapreduce.RecordWriter<K, V>) reduce.outputFormat .getRecordWriter(taskContext); long bytesOutCurr = getOutputBytes(fsStats); fileOutputByteCounter.increment(bytesOutCurr - bytesOutPrev); }
Example #9
Source File: ReduceTask.java From big-c with Apache License 2.0 | 6 votes |
@SuppressWarnings({ "deprecation", "unchecked" }) public OldTrackingRecordWriter(ReduceTask reduce, JobConf job, TaskReporter reporter, String finalName) throws IOException { this.reduceOutputCounter = reduce.reduceOutputCounter; this.fileOutputByteCounter = reduce.fileOutputByteCounter; List<Statistics> matchedStats = null; if (job.getOutputFormat() instanceof FileOutputFormat) { matchedStats = getFsStatistics(FileOutputFormat.getOutputPath(job), job); } fsStats = matchedStats; FileSystem fs = FileSystem.get(job); long bytesOutPrev = getOutputBytes(fsStats); this.real = job.getOutputFormat().getRecordWriter(fs, job, finalName, reporter); long bytesOutCurr = getOutputBytes(fsStats); fileOutputByteCounter.increment(bytesOutCurr - bytesOutPrev); }
Example #10
Source File: MapTask.java From big-c with Apache License 2.0 | 6 votes |
@SuppressWarnings("unchecked") public void init(MapOutputCollector.Context context ) throws IOException, ClassNotFoundException { this.reporter = context.getReporter(); JobConf job = context.getJobConf(); String finalName = getOutputName(getPartition()); FileSystem fs = FileSystem.get(job); OutputFormat<K, V> outputFormat = job.getOutputFormat(); mapOutputRecordCounter = reporter.getCounter(TaskCounter.MAP_OUTPUT_RECORDS); fileOutputByteCounter = reporter .getCounter(FileOutputFormatCounter.BYTES_WRITTEN); List<Statistics> matchedStats = null; if (outputFormat instanceof FileOutputFormat) { matchedStats = getFsStatistics(FileOutputFormat.getOutputPath(job), job); } fsStats = matchedStats; long bytesOutPrev = getOutputBytes(fsStats); out = job.getOutputFormat().getRecordWriter(fs, job, finalName, reporter); long bytesOutCurr = getOutputBytes(fsStats); fileOutputByteCounter.increment(bytesOutCurr - bytesOutPrev); }
Example #11
Source File: MapTask.java From big-c with Apache License 2.0 | 6 votes |
@SuppressWarnings("unchecked") NewDirectOutputCollector(MRJobConfig jobContext, JobConf job, TaskUmbilicalProtocol umbilical, TaskReporter reporter) throws IOException, ClassNotFoundException, InterruptedException { this.reporter = reporter; mapOutputRecordCounter = reporter .getCounter(TaskCounter.MAP_OUTPUT_RECORDS); fileOutputByteCounter = reporter .getCounter(FileOutputFormatCounter.BYTES_WRITTEN); List<Statistics> matchedStats = null; if (outputFormat instanceof org.apache.hadoop.mapreduce.lib.output.FileOutputFormat) { matchedStats = getFsStatistics(org.apache.hadoop.mapreduce.lib.output.FileOutputFormat .getOutputPath(taskContext), taskContext.getConfiguration()); } fsStats = matchedStats; long bytesOutPrev = getOutputBytes(fsStats); out = outputFormat.getRecordWriter(taskContext); long bytesOutCurr = getOutputBytes(fsStats); fileOutputByteCounter.increment(bytesOutCurr - bytesOutPrev); }
Example #12
Source File: MapTask.java From big-c with Apache License 2.0 | 6 votes |
NewTrackingRecordReader(org.apache.hadoop.mapreduce.InputSplit split, org.apache.hadoop.mapreduce.InputFormat<K, V> inputFormat, TaskReporter reporter, org.apache.hadoop.mapreduce.TaskAttemptContext taskContext) throws InterruptedException, IOException { this.reporter = reporter; this.inputRecordCounter = reporter .getCounter(TaskCounter.MAP_INPUT_RECORDS); this.fileInputByteCounter = reporter .getCounter(FileInputFormatCounter.BYTES_READ); List <Statistics> matchedStats = null; if (split instanceof org.apache.hadoop.mapreduce.lib.input.FileSplit) { matchedStats = getFsStatistics(((org.apache.hadoop.mapreduce.lib.input.FileSplit) split) .getPath(), taskContext.getConfiguration()); } fsStats = matchedStats; long bytesInPrev = getInputBytes(fsStats); this.real = inputFormat.createRecordReader(split, taskContext); long bytesInCurr = getInputBytes(fsStats); fileInputByteCounter.increment(bytesInCurr - bytesInPrev); }
Example #13
Source File: MapTask.java From big-c with Apache License 2.0 | 6 votes |
TrackedRecordReader(TaskReporter reporter, JobConf job) throws IOException{ inputRecordCounter = reporter.getCounter(TaskCounter.MAP_INPUT_RECORDS); fileInputByteCounter = reporter.getCounter(FileInputFormatCounter.BYTES_READ); this.reporter = reporter; List<Statistics> matchedStats = null; if (this.reporter.getInputSplit() instanceof FileSplit) { matchedStats = getFsStatistics(((FileSplit) this.reporter .getInputSplit()).getPath(), job); } fsStats = matchedStats; bytesInPrev = getInputBytes(fsStats); rawIn = job.getInputFormat().getRecordReader(reporter.getInputSplit(), job, reporter); bytesInCurr = getInputBytes(fsStats); fileInputByteCounter.increment(bytesInCurr - bytesInPrev); }
Example #14
Source File: FCStatisticsBaseTest.java From hadoop with Apache License 2.0 | 6 votes |
@Test public void testStatistics() throws IOException, URISyntaxException { URI fsUri = getFsUri(); Statistics stats = FileContext.getStatistics(fsUri); Assert.assertEquals(0, stats.getBytesRead()); Path filePath = fileContextTestHelper .getTestRootPath(fc, "file1"); createFile(fc, filePath, numBlocks, blockSize); Assert.assertEquals(0, stats.getBytesRead()); verifyWrittenBytes(stats); FSDataInputStream fstr = fc.open(filePath); byte[] buf = new byte[blockSize]; int bytesRead = fstr.read(buf, 0, blockSize); fstr.read(0, buf, 0, blockSize); Assert.assertEquals(blockSize, bytesRead); verifyReadBytes(stats); verifyWrittenBytes(stats); verifyReadBytes(FileContext.getStatistics(getFsUri())); Map<URI, Statistics> statsMap = FileContext.getAllStatistics(); URI exactUri = getSchemeAuthorityUri(); verifyWrittenBytes(statsMap.get(exactUri)); fc.delete(filePath, true); }
Example #15
Source File: COSInputStream.java From stocator with Apache License 2.0 | 5 votes |
public COSInputStream(String bucketT, String keyT, long contentLengthT, AmazonS3 clientT, long readahead, COSInputPolicy inputPolicyT, Statistics statisticsT) { bucket = bucketT; key = keyT; contentLength = contentLengthT; client = clientT; uri = bucket + "/" + key; inputPolicy = inputPolicyT; stats = statisticsT; setReadahead(readahead); }
Example #16
Source File: FCStatisticsBaseTest.java From big-c with Apache License 2.0 | 5 votes |
@Test(timeout=60000) public void testStatisticsOperations() throws Exception { final Statistics stats = new Statistics("file"); Assert.assertEquals(0L, stats.getBytesRead()); Assert.assertEquals(0L, stats.getBytesWritten()); Assert.assertEquals(0, stats.getWriteOps()); stats.incrementBytesWritten(1000); Assert.assertEquals(1000L, stats.getBytesWritten()); Assert.assertEquals(0, stats.getWriteOps()); stats.incrementWriteOps(123); Assert.assertEquals(123, stats.getWriteOps()); Thread thread = new Thread() { @Override public void run() { stats.incrementWriteOps(1); } }; thread.start(); Uninterruptibles.joinUninterruptibly(thread); Assert.assertEquals(124, stats.getWriteOps()); // Test copy constructor and reset function Statistics stats2 = new Statistics(stats); stats.reset(); Assert.assertEquals(0, stats.getWriteOps()); Assert.assertEquals(0L, stats.getBytesWritten()); Assert.assertEquals(0L, stats.getBytesRead()); Assert.assertEquals(124, stats2.getWriteOps()); Assert.assertEquals(1000L, stats2.getBytesWritten()); Assert.assertEquals(0L, stats2.getBytesRead()); }
Example #17
Source File: ReduceTask.java From big-c with Apache License 2.0 | 5 votes |
private long getOutputBytes(List<Statistics> stats) { if (stats == null) return 0; long bytesWritten = 0; for (Statistics stat: stats) { bytesWritten = bytesWritten + stat.getBytesWritten(); } return bytesWritten; }
Example #18
Source File: AbstractFileSystem.java From big-c with Apache License 2.0 | 5 votes |
/** * Prints statistics for all file systems. */ public static synchronized void printStatistics() { for (Map.Entry<URI, Statistics> pair : STATISTICS_TABLE.entrySet()) { System.out.println(" FileSystem " + pair.getKey().getScheme() + "://" + pair.getKey().getAuthority() + ": " + pair.getValue()); } }
Example #19
Source File: TestLocalFileSystem.java From big-c with Apache License 2.0 | 5 votes |
@Test(timeout = 1000) public void testStatistics() throws Exception { int fileSchemeCount = 0; for (Statistics stats : FileSystem.getAllStatistics()) { if (stats.getScheme().equals("file")) { fileSchemeCount++; } } assertEquals(1, fileSchemeCount); }
Example #20
Source File: AbstractFileSystem.java From big-c with Apache License 2.0 | 5 votes |
protected static synchronized Map<URI, Statistics> getAllStatistics() { Map<URI, Statistics> statsMap = new HashMap<URI, Statistics>( STATISTICS_TABLE.size()); for (Map.Entry<URI, Statistics> pair : STATISTICS_TABLE.entrySet()) { URI key = pair.getKey(); Statistics value = pair.getValue(); Statistics newStatsObj = new Statistics(value); statsMap.put(URI.create(key.toString()), newStatsObj); } return statsMap; }
Example #21
Source File: MapTask.java From big-c with Apache License 2.0 | 5 votes |
private long getInputBytes(List<Statistics> stats) { if (stats == null) return 0; long bytesRead = 0; for (Statistics stat: stats) { bytesRead = bytesRead + stat.getBytesRead(); } return bytesRead; }
Example #22
Source File: MapTask.java From big-c with Apache License 2.0 | 5 votes |
private long getOutputBytes(List<Statistics> stats) { if (stats == null) return 0; long bytesWritten = 0; for (Statistics stat: stats) { bytesWritten = bytesWritten + stat.getBytesWritten(); } return bytesWritten; }
Example #23
Source File: Task.java From RDFS with Apache License 2.0 | 5 votes |
private synchronized void updateCounters() { for(Statistics stat: FileSystem.getAllStatistics()) { String uriScheme = stat.getScheme(); FileSystemStatisticUpdater updater = statisticUpdaters.get(uriScheme); if(updater==null) {//new FileSystem has been found in the cache updater = new FileSystemStatisticUpdater(uriScheme, stat); statisticUpdaters.put(uriScheme, updater); } updater.updateCounters(); } updateResourceCounters(); }
Example #24
Source File: MapTask.java From big-c with Apache License 2.0 | 5 votes |
private long getInputBytes(List<Statistics> stats) { if (stats == null) return 0; long bytesRead = 0; for (Statistics stat: stats) { bytesRead = bytesRead + stat.getBytesRead(); } return bytesRead; }
Example #25
Source File: MRTask.java From incubator-tez with Apache License 2.0 | 5 votes |
/** * Gets a handle to the Statistics instance based on the scheme associated * with path. * * @param path the path. * @param conf the configuration to extract the scheme from if not part of * the path. * @return a Statistics instance, or null if none is found for the scheme. */ @Private public static List<Statistics> getFsStatistics(Path path, Configuration conf) throws IOException { List<Statistics> matchedStats = new ArrayList<FileSystem.Statistics>(); path = path.getFileSystem(conf).makeQualified(path); String scheme = path.toUri().getScheme(); for (Statistics stats : FileSystem.getAllStatistics()) { if (stats.getScheme().equals(scheme)) { matchedStats.add(stats); } } return matchedStats; }
Example #26
Source File: Utils.java From incubator-tez with Apache License 2.0 | 5 votes |
/** * Gets a handle to the Statistics instance based on the scheme associated * with path. * * @param path the path. * @param conf the configuration to extract the scheme from if not part of * the path. * @return a Statistics instance, or null if none is found for the scheme. */ @Private public static List<Statistics> getFsStatistics(Path path, Configuration conf) throws IOException { List<Statistics> matchedStats = new ArrayList<FileSystem.Statistics>(); path = path.getFileSystem(conf).makeQualified(path); String scheme = path.toUri().getScheme(); for (Statistics stats : FileSystem.getAllStatistics()) { if (stats.getScheme().equals(scheme)) { matchedStats.add(stats); } } return matchedStats; }
Example #27
Source File: Task.java From big-c with Apache License 2.0 | 5 votes |
void updateCounters() { if (readBytesCounter == null) { readBytesCounter = counters.findCounter(scheme, FileSystemCounter.BYTES_READ); } if (writeBytesCounter == null) { writeBytesCounter = counters.findCounter(scheme, FileSystemCounter.BYTES_WRITTEN); } if (readOpsCounter == null) { readOpsCounter = counters.findCounter(scheme, FileSystemCounter.READ_OPS); } if (largeReadOpsCounter == null) { largeReadOpsCounter = counters.findCounter(scheme, FileSystemCounter.LARGE_READ_OPS); } if (writeOpsCounter == null) { writeOpsCounter = counters.findCounter(scheme, FileSystemCounter.WRITE_OPS); } long readBytes = 0; long writeBytes = 0; long readOps = 0; long largeReadOps = 0; long writeOps = 0; for (FileSystem.Statistics stat: stats) { readBytes = readBytes + stat.getBytesRead(); writeBytes = writeBytes + stat.getBytesWritten(); readOps = readOps + stat.getReadOps(); largeReadOps = largeReadOps + stat.getLargeReadOps(); writeOps = writeOps + stat.getWriteOps(); } readBytesCounter.setValue(readBytes); writeBytesCounter.setValue(writeBytes); readOpsCounter.setValue(readOps); largeReadOpsCounter.setValue(largeReadOps); writeOpsCounter.setValue(writeOps); }
Example #28
Source File: Task.java From big-c with Apache License 2.0 | 5 votes |
/** * Gets a handle to the Statistics instance based on the scheme associated * with path. * * @param path the path. * @param conf the configuration to extract the scheme from if not part of * the path. * @return a Statistics instance, or null if none is found for the scheme. */ protected static List<Statistics> getFsStatistics(Path path, Configuration conf) throws IOException { List<Statistics> matchedStats = new ArrayList<FileSystem.Statistics>(); path = path.getFileSystem(conf).makeQualified(path); String scheme = path.toUri().getScheme(); for (Statistics stats : FileSystem.getAllStatistics()) { if (stats.getScheme().equals(scheme)) { matchedStats.add(stats); } } return matchedStats; }
Example #29
Source File: Utils.java From tez with Apache License 2.0 | 5 votes |
/** * Gets a handle to the Statistics instance based on the scheme associated * with path. * * @param path the path. * @param conf the configuration to extract the scheme from if not part of * the path. * @return a Statistics instance, or null if none is found for the scheme. */ @Private public static List<Statistics> getFsStatistics(Path path, Configuration conf) throws IOException { List<Statistics> matchedStats = new ArrayList<FileSystem.Statistics>(); path = path.getFileSystem(conf).makeQualified(path); String scheme = path.toUri().getScheme(); for (Statistics stats : FileSystem.getAllStatistics()) { if (stats.getScheme().equals(scheme)) { matchedStats.add(stats); } } return matchedStats; }
Example #30
Source File: MapTask.java From big-c with Apache License 2.0 | 5 votes |
private long getOutputBytes(List<Statistics> stats) { if (stats == null) return 0; long bytesWritten = 0; for (Statistics stat: stats) { bytesWritten = bytesWritten + stat.getBytesWritten(); } return bytesWritten; }