Java Code Examples for org.apache.commons.codec.Encoder

The following examples show how to use org.apache.commons.codec.Encoder. These examples are extracted from open source projects. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source Project: lucene-solr   Source File: PhoneticFilterFactory.java    License: Apache License 2.0 6 votes vote down vote up
/** Must be thread-safe. */
protected Encoder getEncoder() {
  // Unfortunately, Commons-Codec doesn't offer any thread-safe guarantees so we must play it safe and instantiate
  // every time.  A simple benchmark showed this as negligible.
  try {
    Encoder encoder = clazz.getConstructor().newInstance();
    // Try to set the maxCodeLength
    if(maxCodeLength != null && setMaxCodeLenMethod != null) {
      setMaxCodeLenMethod.invoke(encoder, maxCodeLength);
    }
    return encoder;
  } catch (Exception e) {
    final Throwable t = (e instanceof InvocationTargetException) ? e.getCause() : e;
    throw new IllegalArgumentException("Error initializing encoder: " + name + " / " + clazz, t);
  }
}
 
Example 2
Source Project: lucene-solr   Source File: TestPhoneticFilter.java    License: Apache License 2.0 6 votes vote down vote up
public void testEmptyTerm() throws IOException {
  Encoder encoders[] = new Encoder[] {
      new Metaphone(), new DoubleMetaphone(), new Soundex(), new RefinedSoundex(), new Caverphone2()
  };
  for (final Encoder e : encoders) {
    Analyzer a = new Analyzer() {
      @Override
      protected TokenStreamComponents createComponents(String fieldName) {
        Tokenizer tokenizer = new KeywordTokenizer();
        return new TokenStreamComponents(tokenizer, new PhoneticFilter(tokenizer, e, random().nextBoolean()));
      }
    };
    checkOneTerm(a, "", "");
    a.close();
  }
}
 
Example 3
Source Project: archiva   Source File: RssFeedServletTest.java    License: Apache License 2.0 6 votes vote down vote up
@Test
public void testInvalidAuthenticationRequest()
    throws Exception
{

    MockHttpServletRequest request = new MockHttpServletRequest();
    request.setRequestURI( "/feeds/unauthorized-repo" );
    request.addHeader( "User-Agent", "Apache Archiva unit test" );
    request.setMethod( "GET" );

    Encoder encoder = new Base64();
    String userPass = "unauthUser:unauthPass";
    String encodedUserPass = new String( (byte[]) encoder.encode( userPass.getBytes() ) );
    request.addHeader( "Authorization", "BASIC " + encodedUserPass );

    MockHttpServletResponse mockHttpServletResponse = new MockHttpServletResponse();
    rssFeedServlet.doGet( request, mockHttpServletResponse );

    assertEquals( HttpServletResponse.SC_UNAUTHORIZED, mockHttpServletResponse.getStatus() );

}
 
Example 4
Source Project: appengine-tck   Source File: LoggingTestBase.java    License: Apache License 2.0 6 votes vote down vote up
protected static WebArchive getDefaultDeployment(TestContext context) {
    context.setAppEngineWebXmlFile("appengine-web-with-logging-properties.xml");
    WebArchive war = getTckDeployment(context);
    war.addClasses(LoggingTestBase.class, TestBase.class)
        // classes for Base64.isBase64()
        .addClasses(Base64.class, BaseNCodec.class)
        .addClasses(BinaryEncoder.class, Encoder.class)
        .addClasses(BinaryDecoder.class, Decoder.class)
        .addClasses(EncoderException.class, DecoderException.class)
        .addAsWebInfResource("currentTimeUsec.jsp")
        .addAsWebInfResource("doNothing.jsp")
        .addAsWebInfResource("storeTestData.jsp")
        .addAsWebInfResource("throwException.jsp")
        .addAsWebInfResource("log4j-test.properties")
        .addAsWebInfResource("logging-all.properties");
    return war;
}
 
Example 5
Source Project: lucene-solr   Source File: PhoneticFilterFactory.java    License: Apache License 2.0 5 votes vote down vote up
private Class<? extends Encoder> resolveEncoder(String name, ResourceLoader loader) {
  String lookupName = name;
  if (name.indexOf('.') == -1) {
    lookupName = PACKAGE_CONTAINING_ENCODERS + name;
  }
  try {
    return loader.newInstance(lookupName, Encoder.class).getClass();
  } catch (RuntimeException e) {
    throw new IllegalArgumentException("Error loading encoder '" + name + "': must be full class name or one of " + registry.keySet(), e);
  }
}
 
Example 6
Source Project: lucene-solr   Source File: TestPhoneticFilter.java    License: Apache License 2.0 5 votes vote down vote up
static void assertAlgorithm(Encoder encoder, boolean inject, String input,
    String[] expected) throws Exception {
  Tokenizer tokenizer = new MockTokenizer(MockTokenizer.WHITESPACE, false);
  tokenizer.setReader(new StringReader(input));
  PhoneticFilter filter = new PhoneticFilter(tokenizer, encoder, inject);
  assertTokenStreamContents(filter, expected);
}
 
Example 7
Source Project: appengine-tck   Source File: DatastoreHelperTestBase.java    License: Apache License 2.0 5 votes vote down vote up
protected static WebArchive getHelperDeployment() {
    WebArchive war = getTckDeployment();
    war.addClass(DatastoreHelperTestBase.class)
        .addClasses(Base64.class, BaseNCodec.class)
        .addClasses(BinaryEncoder.class, Encoder.class)
        .addClasses(BinaryDecoder.class, Decoder.class)
        .addClasses(EncoderException.class, DecoderException.class);
    return war;
}
 
Example 8
Source Project: lucene-solr   Source File: PhoneticFilter.java    License: Apache License 2.0 4 votes vote down vote up
/** Creates a PhoneticFilter with the specified encoder, and either
 *  adding encoded forms as synonyms (<code>inject=true</code>) or
 *  replacing them.
 */
public PhoneticFilter(TokenStream in, Encoder encoder, boolean inject) {
  super(in);
  this.encoder = encoder;
  this.inject = inject;   
}
 
Example 9
Source Project: kite   Source File: TransformTask.java    License: Apache License 2.0 4 votes vote down vote up
public PipelineResult run() throws IOException {
  boolean isLocal = (isLocal(from.getDataset()) || isLocal(to.getDataset()));
  if (isLocal) {
    // copy to avoid making changes to the caller's configuration
    Configuration conf = new Configuration(getConf());
    conf.set("mapreduce.framework.name", "local");
    setConf(conf);
  }

  if (isHive(from) || isHive(to)) {
    setConf(addHiveDelegationToken(getConf()));

    // add jars needed for metastore interaction to the classpath
    if (!isLocal) {
      Class<?> fb303Class, thriftClass;
      try {
        // attempt to use libfb303 and libthrift 0.9.2 when async was added
        fb303Class = Class.forName(
            "com.facebook.fb303.FacebookService.AsyncProcessor");
        thriftClass = Class.forName(
            "org.apache.thrift.TBaseAsyncProcessor");
      } catch (ClassNotFoundException e) {
        try {
          // fallback to 0.9.0 or earlier
          fb303Class = Class.forName(
              "com.facebook.fb303.FacebookBase");
          thriftClass = Class.forName(
              "org.apache.thrift.TBase");
        } catch (ClassNotFoundException real) {
          throw new DatasetOperationException(
              "Cannot find thrift dependencies", real);
        }
      }

      TaskUtil.configure(getConf())
          .addJarForClass(Encoder.class) // commons-codec
          .addJarForClass(Log.class) // commons-logging
          .addJarForClass(CompressorInputStream.class) // commons-compress
          .addJarForClass(ApiAdapter.class) // datanucleus-core
          .addJarForClass(JDOAdapter.class) // datanucleus-api-jdo
          .addJarForClass(SQLQuery.class) // datanucleus-rdbms
          .addJarForClass(JDOHelper.class) // jdo-api
          .addJarForClass(Transaction.class) // jta
          .addJarForClass(fb303Class) // libfb303
          .addJarForClass(thriftClass) // libthrift
          .addJarForClass(HiveMetaStore.class) // hive-metastore
          .addJarForClass(HiveConf.class); // hive-exec
    }
  }

  PType<T> toPType = ptype(to);
  MapFn<T, T> validate = new CheckEntityClass<T>(to.getType());

  Pipeline pipeline = new MRPipeline(getClass(), getConf());

  PCollection<T> collection = pipeline.read(CrunchDatasets.asSource(from))
      .parallelDo(transform, toPType).parallelDo(validate, toPType);

  if (compact) {
    // the transform must be run before partitioning
    collection = CrunchDatasets.partition(collection, to, numWriters, numPartitionWriters);
  }

  pipeline.write(collection, CrunchDatasets.asTarget(to), mode);

  PipelineResult result = pipeline.done();

  StageResult sr = Iterables.getFirst(result.getStageResults(), null);
  if (sr != null && MAP_INPUT_RECORDS != null) {
    this.count = sr.getCounterValue(MAP_INPUT_RECORDS);
  }

  return result;
}