Java Code Examples for java.io.InputStream#getClass()
The following examples show how to use
java.io.InputStream#getClass() .
These examples are extracted from open source projects.
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source Project: pipeline-maven-plugin File: XmlUtilsTest.java License: MIT License | 6 votes |
@Test public void test_listGeneratedArtifacts() throws Exception { InputStream in = Thread.currentThread().getContextClassLoader().getResourceAsStream("org/jenkinsci/plugins/pipeline/maven/maven-spy-deploy-jar.xml"); in.getClass(); // check non null Element mavenSpyLogs = DocumentBuilderFactory.newInstance().newDocumentBuilder().parse(in).getDocumentElement(); List<MavenArtifact> generatedArtifacts = XmlUtils.listGeneratedArtifacts(mavenSpyLogs, false); System.out.println(generatedArtifacts); Assert.assertThat(generatedArtifacts.size(), Matchers.is(2)); // a jar file and a pom file are generated for (MavenArtifact mavenArtifact:generatedArtifacts) { Assert.assertThat(mavenArtifact.getGroupId(), Matchers.is("com.example")); Assert.assertThat(mavenArtifact.getArtifactId(), Matchers.is("my-jar")); if("pom".equals(mavenArtifact.getType())) { Assert.assertThat(mavenArtifact.getExtension(), Matchers.is("pom")); Assert.assertThat(mavenArtifact.getClassifier(), Matchers.isEmptyOrNullString()); } else if ("jar".equals(mavenArtifact.getType())) { Assert.assertThat(mavenArtifact.getExtension(), Matchers.is("jar")); Assert.assertThat(mavenArtifact.getClassifier(), Matchers.isEmptyOrNullString()); } else { throw new AssertionFailedError("Unsupported type for " + mavenArtifact); } } }
Example 2
Source Project: pipeline-maven-plugin File: XmlUtilsTest.java License: MIT License | 6 votes |
@Test public void test_listGeneratedArtifacts_including_generated_artifacts() throws Exception { InputStream in = Thread.currentThread().getContextClassLoader().getResourceAsStream("org/jenkinsci/plugins/pipeline/maven/maven-spy-deploy-jar.xml"); in.getClass(); // check non null Element mavenSpyLogs = DocumentBuilderFactory.newInstance().newDocumentBuilder().parse(in).getDocumentElement(); List<MavenArtifact> generatedArtifacts = XmlUtils.listGeneratedArtifacts(mavenSpyLogs, true); System.out.println(generatedArtifacts); Assert.assertThat(generatedArtifacts.size(), Matchers.is(3)); // a jar file and a pom file are generated for (MavenArtifact mavenArtifact:generatedArtifacts) { Assert.assertThat(mavenArtifact.getGroupId(), Matchers.is("com.example")); Assert.assertThat(mavenArtifact.getArtifactId(), Matchers.is("my-jar")); if("pom".equals(mavenArtifact.getType())) { Assert.assertThat(mavenArtifact.getExtension(), Matchers.is("pom")); Assert.assertThat(mavenArtifact.getClassifier(), Matchers.isEmptyOrNullString()); } else if ("jar".equals(mavenArtifact.getType())) { Assert.assertThat(mavenArtifact.getExtension(), Matchers.is("jar")); Assert.assertThat(mavenArtifact.getClassifier(), Matchers.isEmptyOrNullString()); } else if ("java-source".equals(mavenArtifact.getType())) { Assert.assertThat(mavenArtifact.getExtension(), Matchers.is("jar")); Assert.assertThat(mavenArtifact.getClassifier(), Matchers.is("sources")); } else { throw new AssertionFailedError("Unsupported type for " + mavenArtifact); } } }
Example 3
Source Project: pipeline-maven-plugin File: XmlUtilsTest.java License: MIT License | 6 votes |
@Test public void test_listGeneratedArtifacts_includeAttachedArtifacts() throws Exception { InputStream in = Thread.currentThread().getContextClassLoader() .getResourceAsStream("org/jenkinsci/plugins/pipeline/maven/maven-spy-include-attached-artifacts.log"); in.getClass(); // check non null Element mavenSpyLogs = DocumentBuilderFactory.newInstance().newDocumentBuilder().parse(in).getDocumentElement(); List<MavenArtifact> generatedArtifacts = XmlUtils.listGeneratedArtifacts(mavenSpyLogs, true); System.out.println(generatedArtifacts); Assert.assertThat(generatedArtifacts.size(), Matchers.is(2)); // pom artifact plus 1 attachment for (MavenArtifact mavenArtifact : generatedArtifacts) { Assert.assertThat(mavenArtifact.getGroupId(), Matchers.is("com.example")); Assert.assertThat(mavenArtifact.getArtifactId(), Matchers.is("my-jar")); if ("pom".equals(mavenArtifact.getType())) { Assert.assertThat(mavenArtifact.getExtension(), Matchers.is("pom")); Assert.assertThat(mavenArtifact.getClassifier(), Matchers.isEmptyOrNullString()); } else if ("ova".equals(mavenArtifact.getType())) { Assert.assertThat(mavenArtifact.getExtension(), Matchers.is("ova")); Assert.assertThat(mavenArtifact.getClassifier(), Matchers.isEmptyOrNullString()); } else { throw new AssertionFailedError("Unsupported type for " + mavenArtifact); } } }
Example 4
Source Project: joyqueue File: Files.java License: Apache License 2.0 | 5 votes |
/** * 流拷贝,并刷盘 * * @param is 输入 * @param os 输出 * @param start 源起始位置 * @param length 长度 * @throws IOException */ public static void copy(final InputStream is, final OutputStream os, final long start, final long length) throws IOException { if (is == null || os == null || length == 0) { return; } if (is.getClass() == FileInputStream.class && os.getClass() == FileOutputStream.class) { // 采用zeroCopy技术拷贝 copy((FileInputStream) is, (FileOutputStream) os, start, length); } else { long bytes = 0; if (start > 0) { bytes = is.skip(start); if (bytes < start) { return; } } byte buffer[] = new byte[1024 * 4]; int c = 0; bytes = 0; while (bytes < length && ((c = is.read(buffer, 0, (int) Math.min(buffer.length, length - bytes))) >= 0)) { os.write(buffer, 0, c); bytes += c; } } }
Example 5
Source Project: pxf File: ChunkRecordReader.java License: Apache License 2.0 | 5 votes |
/** * Translates the FSDataInputStream into a DFSInputStream. */ private DFSInputStream getInputStream() throws IncompatibleInputStreamException { InputStream inputStream = fileIn.getWrappedStream(); if (inputStream instanceof DFSInputStream) { return (DFSInputStream) inputStream; } else { IOUtils.closeStream(fileIn); throw new IncompatibleInputStreamException(inputStream.getClass()); } }
Example 6
Source Project: Elasticsearch File: XContentFactory.java License: Apache License 2.0 | 5 votes |
/** * Guesses the content type based on the provided input stream without consuming it. */ public static XContentType xContentType(InputStream si) throws IOException { if (si.markSupported() == false) { throw new IllegalArgumentException("Cannot guess the xcontent type without mark/reset support on " + si.getClass()); } si.mark(GUESS_HEADER_LENGTH); try { final byte[] firstBytes = new byte[GUESS_HEADER_LENGTH]; final int read = Streams.readFully(si, firstBytes); return xContentType(new BytesArray(firstBytes, 0, read)); } finally { si.reset(); } }
Example 7
Source Project: pipeline-maven-plugin File: XmlUtilsTest.java License: MIT License | 5 votes |
@Test public void test_getExecutedLifecyclePhases() throws Exception { InputStream in = Thread.currentThread().getContextClassLoader().getResourceAsStream("org/jenkinsci/plugins/pipeline/maven/maven-spy-package-jar.xml"); in.getClass(); // check non null Element mavenSpyLogs = DocumentBuilderFactory.newInstance().newDocumentBuilder().parse(in).getDocumentElement(); List<String> executedLifecyclePhases = XmlUtils.getExecutedLifecyclePhases(mavenSpyLogs); System.out.println(executedLifecyclePhases); Assert.assertThat(executedLifecyclePhases, Matchers.contains("process-resources", "compile", "process-test-resources", "test-compile", "test", "package")); }
Example 8
Source Project: pipeline-maven-plugin File: XmlUtilsTest.java License: MIT License | 5 votes |
@Test public void test_getArtifactDeployedEvent() throws Exception { InputStream in = Thread.currentThread().getContextClassLoader().getResourceAsStream("org/jenkinsci/plugins/pipeline/maven/maven-spy-deploy-jar.xml"); in.getClass(); // check non null Element mavenSpyLogs = DocumentBuilderFactory.newInstance().newDocumentBuilder().parse(in).getDocumentElement(); List<Element> artifactDeployedEvents = XmlUtils.getArtifactDeployedEvents(mavenSpyLogs); Assert.assertThat(artifactDeployedEvents.size(), Matchers.is(3)); Element artifactDeployedEvent = XmlUtils.getArtifactDeployedEvent(artifactDeployedEvents, "/path/to/my-jar/target/my-jar-0.5-SNAPSHOT.jar"); String repositoryUrl = XmlUtils.getUniqueChildElement(artifactDeployedEvent, "repository").getAttribute("url"); Assert.assertThat(repositoryUrl, Matchers.is("https://nexus.beescloud.com/content/repositories/snapshots/")); }
Example 9
Source Project: pipeline-maven-plugin File: XmlUtilsTest.java License: MIT License | 5 votes |
@Test public void test_getExecutionEventsByPlugin() throws Exception { InputStream in = Thread.currentThread().getContextClassLoader().getResourceAsStream("org/jenkinsci/plugins/pipeline/maven/maven-spy-deploy-jar.xml"); in.getClass(); // check non null Element mavenSpyLogs = DocumentBuilderFactory.newInstance().newDocumentBuilder().parse(in).getDocumentElement(); List<Element> executionEvents = XmlUtils.getExecutionEventsByPlugin(mavenSpyLogs, "org.apache.maven.plugins", "maven-deploy-plugin","deploy", "MojoSucceeded", "MojoFailed"); Assert.assertThat(executionEvents.size(), Matchers.is(1)); Element deployExecutionEvent = executionEvents.get(0); Assert.assertThat(deployExecutionEvent.getAttribute("type"), Matchers.is("MojoSucceeded")); }
Example 10
Source Project: ion-java File: _Private_IonReaderFactory.java License: Apache License 2.0 | 5 votes |
private static UnifiedInputStreamX makeUnifiedStream(InputStream in) throws IOException { in.getClass(); // Force NPE // TODO avoid multiple wrapping streams, use the UIS for the pushback in = IonStreamUtils.unGzip(in); UnifiedInputStreamX uis = UnifiedInputStreamX.makeStream(in); return uis; }
Example 11
Source Project: hbase File: FSDataInputStreamWrapper.java License: Apache License 2.0 | 5 votes |
/** * This will free sockets and file descriptors held by the stream only when the stream implements * org.apache.hadoop.fs.CanUnbuffer. NOT THREAD SAFE. Must be called only when all the clients * using this stream to read the blocks have finished reading. If by chance the stream is * unbuffered and there are clients still holding this stream for read then on next client read * request a new socket will be opened by Datanode without client knowing about it and will serve * its read request. Note: If this socket is idle for some time then the DataNode will close the * socket and the socket will move into CLOSE_WAIT state and on the next client request on this * stream, the current socket will be closed and a new socket will be opened to serve the * requests. */ @SuppressWarnings({ "rawtypes" }) public void unbuffer() { FSDataInputStream stream = this.getStream(this.shouldUseHBaseChecksum()); if (stream != null) { InputStream wrappedStream = stream.getWrappedStream(); // CanUnbuffer interface was added as part of HDFS-7694 and the fix is available in Hadoop // 2.6.4+ and 2.7.1+ versions only so check whether the stream object implements the // CanUnbuffer interface or not and based on that call the unbuffer api. final Class<? extends InputStream> streamClass = wrappedStream.getClass(); if (this.instanceOfCanUnbuffer == null) { // To ensure we compute whether the stream is instance of CanUnbuffer only once. this.instanceOfCanUnbuffer = false; if (wrappedStream instanceof CanUnbuffer) { this.unbuffer = (CanUnbuffer) wrappedStream; this.instanceOfCanUnbuffer = true; } } if (this.instanceOfCanUnbuffer) { try { this.unbuffer.unbuffer(); } catch (UnsupportedOperationException e){ if (isLogTraceEnabled) { LOG.trace("Failed to invoke 'unbuffer' method in class " + streamClass + " . So there may be the stream does not support unbuffering.", e); } } } else { if (isLogTraceEnabled) { LOG.trace("Failed to find 'unbuffer' method in class " + streamClass); } } } }
Example 12
Source Project: datawave File: LongLineEventRecordReader.java License: Apache License 2.0 | 4 votes |
/** * @param genericSplit * @param context * @throws IOException */ public void initializeLineReader(InputSplit genericSplit, TaskAttemptContext context) throws IOException { FileSplit split = (FileSplit) genericSplit; Configuration job = context.getConfiguration(); start = split.getStart(); end = start + split.getLength(); final Path file = split.getPath(); final CompressionCodec codec = compressionCodecs.getCodec(file); // open the file and seek to the start of the split FileSystem fs = file.getFileSystem(job); FSDataInputStream fileIn = fs.open(split.getPath()); boolean skipFirstLine = false; if (codec != null) { in = new LfLineReader(codec.createInputStream(fileIn), job); in.setNewLineIncluded(newLineIncluded); end = Long.MAX_VALUE; } else { if (start != 0) { skipFirstLine = true; --start; fileIn.seek(start); } // Hadoop CodecFactory only checks the file suffix, let's double check for gzip since some data producers // may not append .gz to their files. InputStream iStream = GzipDetectionUtil.decompressTream(fileIn); Class streamClass = iStream.getClass(); if (GZIPInputStream.class == streamClass) { end = Long.MAX_VALUE; } in = new LfLineReader(iStream, job); in.setNewLineIncluded(newLineIncluded); } if (skipFirstLine) { // skip first line and re-establish "start". start += in.readLine(new Text(), 0, (int) Math.min(Integer.MAX_VALUE, end - start)); } this.pos = start; }
Example 13
Source Project: gwt-eclipse-plugin File: ContentDescriberUtilities.java License: Eclipse Public License 1.0 | 4 votes |
/** * Returns an {@link IFile} for the file backing an input stream. This method * is tailored to work with * {@link org.eclipse.core.runtime.content.IContentDescriber}, using it * elsewhere will likely not work. * * @return the filename, or null if it could not be determined */ public static IFile resolveFileFromInputStream( InputStream contentInputStream) { try { if (!(contentInputStream instanceof LazyInputStream)) { return null; } Class<?> c = contentInputStream.getClass(); Field in = c.getDeclaredField("in"); in.setAccessible(true); Object lazyFileInputStreamObj = in.get(contentInputStream); if (lazyFileInputStreamObj == null) { return null; } if (!Class.forName( "org.eclipse.core.internal.resources.ContentDescriptionManager$LazyFileInputStream").isAssignableFrom( lazyFileInputStreamObj.getClass())) { return null; } Field target = lazyFileInputStreamObj.getClass().getDeclaredField( "target"); target.setAccessible(true); Object fileStoreObj = target.get(lazyFileInputStreamObj); if (fileStoreObj == null) { return null; } if (!(fileStoreObj instanceof IFileStore)) { return null; } IFileStore fileStore = (IFileStore) fileStoreObj; String name = fileStore.getName(); if (name == null || name.length() == 0) { return null; } IFile[] files = ResourcesPlugin.getWorkspace().getRoot().findFilesForLocationURI( fileStore.toURI()); return files.length > 0 ? files[0] : null; } catch (Throwable e) { // Ignore on purpose } return null; }
Example 14
Source Project: Bytecoder File: Channels.java License: Apache License 2.0 | 3 votes |
/** * Constructs a channel that reads bytes from the given stream. * * <p> The resulting channel will not be buffered; it will simply redirect * its I/O operations to the given stream. Closing the channel will in * turn cause the stream to be closed. </p> * * @param in * The stream from which bytes are to be read * * @return A new readable byte channel */ public static ReadableByteChannel newChannel(InputStream in) { Objects.requireNonNull(in, "in"); if (in.getClass() == FileInputStream.class) { return ((FileInputStream) in).getChannel(); } return new ReadableByteChannelImpl(in); }
Example 15
Source Project: openjdk-jdk9 File: Channels.java License: GNU General Public License v2.0 | 3 votes |
/** * Constructs a channel that reads bytes from the given stream. * * <p> The resulting channel will not be buffered; it will simply redirect * its I/O operations to the given stream. Closing the channel will in * turn cause the stream to be closed. </p> * * @param in * The stream from which bytes are to be read * * @return A new readable byte channel */ public static ReadableByteChannel newChannel(InputStream in) { Objects.requireNonNull(in, "in"); if (in.getClass() == FileInputStream.class) { return ((FileInputStream) in).getChannel(); } return new ReadableByteChannelImpl(in); }