Java Code Examples for org.pentaho.di.core.Const#isEmpty()

The following examples show how to use org.pentaho.di.core.Const#isEmpty() . You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: MongoDbModel.java    From pentaho-mongodb-plugin with Apache License 2.0 6 votes vote down vote up
public void setCollections( Vector<String> collections ) {
  Collection<String> prevVal = this.collections;
  this.collections = collections;

  // add the current selection, even if not in the list...
  // users are allowed to manually add collection names
  // TODO: check behavior in Spoon...

  if ( !Const.isEmpty( collection ) ) {
    if ( !collections.contains( collection ) ) {
      collections.add( collection );
    }
  }

  firePropertyChange( "collection", prevVal, collections );
}
 
Example 2
Source File: PentahoTableInputFormat.java    From pentaho-hadoop-shims with Apache License 2.0 6 votes vote down vote up
public void validateInput( JobConf job ) throws IOException {
  // expecting a table name
  String tableName = job.get( INPUT_TABLE );
  if ( Const.isEmpty( tableName ) ) {
    throw new IOException( "expecting one table name" );
  }

  // connected to table?
  if ( !checkHBaseTable() ) {
    throw new IOException( "could not connect to table '"
      + tableName + "'" );
  }

  // expecting at least one column/column family

  String colArg = job.get( COLUMN_LIST );
  if ( colArg == null || colArg.length() == 0 ) {
    throw new IOException( "expecting at least one column/column family" );
  }
}
 
Example 3
Source File: CPythonScriptExecutorDialog.java    From pentaho-cpython-plugin with Apache License 2.0 6 votes vote down vote up
protected void checkWidgets() {
  wtvScriptLocation.setEnabled( wbLoadScriptFile.getSelection() );
  wstcScriptEditor.setEnabled( !wbLoadScriptFile.getSelection() );
  if ( wbLoadScriptFile.getSelection() ) {
    wtvScriptLocation.setEditable( true );
    wstcScriptEditor.getStyledText().setBackground( GUIResource.getInstance().getColorDemoGray() );
  } else {
    wtvScriptLocation.setEditable( false );
    wstcScriptEditor.getStyledText().setBackground( GUIResource.getInstance().getColorWhite() );
  }
  wbScriptBrowse.setEnabled( wbLoadScriptFile.getSelection() );

  String currVars = wtvPyVarsToGet.getText();
  if ( !Const.isEmpty( currVars ) ) {
    List<String> varList = stringToList( currVars );
    wbGetFields.setEnabled( varList.size() == 1 );
    wbIncludeRowIndex.setEnabled( varList.size() == 1 );
  }
}
 
Example 4
Source File: MongoDbModel.java    From pentaho-mongodb-plugin with Apache License 2.0 6 votes vote down vote up
public void setDbNames( Vector<String> dbs ) {
  Collection<String> prevVal = this.dbNames;
  this.dbNames = dbs;

  // add the current selection, even if not in the list...
  // users are allowed to manually add database names
  // TODO: check behavior in Spoon...

  if ( !Const.isEmpty( dbName ) ) {
    if ( !dbNames.contains( dbName ) ) {
      dbNames.add( dbName );
    }
  }

  firePropertyChange( "database", prevVal, dbs );
}
 
Example 5
Source File: SSTableOutputDialog.java    From learning-hadoop with Apache License 2.0 6 votes vote down vote up
protected void getData() {

    if (!Const.isEmpty(m_currentMeta.getYamlPath())) {
      m_yamlText.setText(m_currentMeta.getYamlPath());
    }

    if (!Const.isEmpty(m_currentMeta.getDirectory())) {
      m_directoryText.setText(m_currentMeta.getDirectory());
    }

    if (!Const.isEmpty(m_currentMeta.getCassandraKeyspace())) {
      m_keyspaceText.setText(m_currentMeta.getCassandraKeyspace());
    }

    if (!Const.isEmpty(m_currentMeta.getColumnFamilyName())) {
      m_columnFamilyText.setText(m_currentMeta.getColumnFamilyName());
    }

    if (!Const.isEmpty(m_currentMeta.getKeyField())) {
      m_keyFieldCombo.setText(m_currentMeta.getKeyField());
    }

    if (!Const.isEmpty(m_currentMeta.getBufferSize())) {
      m_bufferSizeText.setText(m_currentMeta.getBufferSize());
    }
  }
 
Example 6
Source File: CPythonScriptExecutorData.java    From pentaho-cpython-plugin with Apache License 2.0 6 votes vote down vote up
public static void initPython( VariableSpace vars, LogChannelInterface log ) throws KettleException {
  // check python availability
  if ( !PythonSession.pythonAvailable() ) {
    // initialize...
    PythonSession.initSession( "python", vars, log );
  } else {
    return;
  }
  if ( !PythonSession.pythonAvailable() ) {
    String pyCheckResults = PythonSession.getPythonEnvCheckResults();
    if ( !Const.isEmpty( pyCheckResults ) ) {
      throw new KettleException(
          BaseMessages.getString( PKG, "CPythonScriptExecutor.Error.PythonInitializationProblem" ) + ":\n\n"
              + pyCheckResults );
    } else {
      throw new KettleException(
          BaseMessages.getString( PKG, "CPythonScriptExecutor.Error.PythonInitializationProblem" ) );
    }
  }
}
 
Example 7
Source File: MongoArrayExpansion.java    From pentaho-mongodb-plugin with Apache License 2.0 5 votes vote down vote up
/**
 * Initialize this field by parsing the path etc.
 *
 * @throws KettleException if a problem occurs
 */
public void init() throws KettleException {
  if ( Const.isEmpty( m_expansionPath ) ) {
    throw new KettleException( BaseMessages.getString( PKG, "MongoDbInput.ErrorMessage.NoPathSet" ) ); //$NON-NLS-1$
  }
  if ( m_pathParts != null ) {
    return;
  }

  String expansionPath = MongoDbInputData.cleansePath( m_expansionPath );

  String[] temp = expansionPath.split( "\\." ); //$NON-NLS-1$
  m_pathParts = new ArrayList<String>();
  for ( String part : temp ) {
    m_pathParts.add( part );
  }

  if ( m_pathParts.get( 0 ).equals( "$" ) ) { //$NON-NLS-1$
    m_pathParts.remove( 0 ); // root record indicator
  } else if ( m_pathParts.get( 0 ).startsWith( "$[" ) ) { //$NON-NLS-1$

    // strip leading $ off of array
    String r = m_pathParts.get( 0 ).substring( 1, m_pathParts.get( 0 ).length() );
    m_pathParts.set( 0, r );
  }
  m_tempParts = new ArrayList<String>();

  // initialize the sub fields
  if ( m_subFields != null ) {
    for ( MongoField f : m_subFields ) {
      int outputIndex = m_outputRowMeta.indexOfValue( f.m_fieldName );
      f.init( outputIndex );
    }
  }
}
 
Example 8
Source File: MongoDbOutputData.java    From pentaho-mongodb-plugin with Apache License 2.0 5 votes vote down vote up
/**
 * Determines the top level structure of the outgoing Mongo document from the user-specified field paths. This can be
 * either RECORD ( for a top level structure that is an object), ARRAY or INCONSISTENT (if the user has some field
 * paths that start with an array and some that start with an object).
 *
 * @param fieldDefs the list of document field paths
 * @param vars      environment variables
 * @return the top level structure
 */
protected static MongoTopLevel checkTopLevelConsistency( List<MongoDbOutputMeta.MongoField> fieldDefs,
                                                         VariableSpace vars ) throws KettleException {

  if ( fieldDefs == null || fieldDefs.size() == 0 ) {
    throw new KettleException( BaseMessages.getString( PKG, "MongoDbOutput.Messages.Error.NoMongoPathsDefined" ) );
  }

  int numRecords = 0;
  int numArrays = 0;

  for ( MongoDbOutputMeta.MongoField field : fieldDefs ) {
    String mongoPath = vars.environmentSubstitute( field.m_mongoDocPath, true );

    if ( Const.isEmpty( mongoPath ) ) {
      numRecords++;
    } else if ( mongoPath.startsWith( "[" ) ) { //$NON-NLS-1$
      numArrays++;
    } else {
      numRecords++;
    }
  }

  if ( numRecords < fieldDefs.size() && numArrays < fieldDefs.size() ) {
    return MongoTopLevel.INCONSISTENT;
  }

  if ( numRecords > 0 ) {
    return MongoTopLevel.RECORD;
  }

  return MongoTopLevel.ARRAY;
}
 
Example 9
Source File: CassandraOutputMeta.java    From learning-hadoop with Apache License 2.0 5 votes vote down vote up
public void readRep(Repository rep, ObjectId id_step,
    List<DatabaseMeta> databases, Map<String, Counter> counters)
    throws KettleException {
  m_cassandraHost = rep.getStepAttributeString(id_step, 0, "cassandra_host");
  m_cassandraPort = rep.getStepAttributeString(id_step, 0, "cassandra_port");
  m_schemaHost = rep.getStepAttributeString(id_step, 0, "schema_host");
  m_schemaPort = rep.getStepAttributeString(id_step, 0, "schema_port");
  m_socketTimeout = rep.getStepAttributeString(id_step, 0, "socket_timeout");
  m_username = rep.getStepAttributeString(id_step, 0, "username");
  m_password = rep.getStepAttributeString(id_step, 0, "password");
  if (!Const.isEmpty(m_password)) {
    m_password = Encr.decryptPasswordOptionallyEncrypted(m_password);
  }
  m_cassandraKeyspace = rep.getStepAttributeString(id_step, 0,
      "cassandra_keyspace");
  m_columnFamily = rep.getStepAttributeString(id_step, 0, "column_family");
  m_keyField = rep.getStepAttributeString(id_step, 0, "key_field");
  m_consistency = rep.getStepAttributeString(id_step, 0, "consistency");
  m_batchSize = rep.getStepAttributeString(id_step, 0, "batch_size");
  m_cqlBatchTimeout = rep.getStepAttributeString(id_step, 0,
      "cql_batch_timeout");
  m_cqlSubBatchSize = rep.getStepAttributeString(id_step, 0,
      "cql_sub_batch_size");

  m_createColumnFamily = rep.getStepAttributeBoolean(id_step, 0,
      "create_column_family");
  m_useCompression = rep.getStepAttributeBoolean(id_step, 0,
      "use_compression");
  m_insertFieldsNotInMeta = rep.getStepAttributeBoolean(id_step, 0,
      "insert_fields_not_in_meta");
  m_updateCassandraMeta = rep.getStepAttributeBoolean(id_step, 0,
      "update_cassandra_meta");
  m_truncateColumnFamily = rep.getStepAttributeBoolean(id_step, 0,
      "truncate_column_family");

  m_aprioriCQL = rep.getStepAttributeString(id_step, 0, "apriori_cql");

  m_useThriftIO = rep.getStepAttributeBoolean(id_step, 0, "use_thrift_io");
  asIndexColumn = rep.getStepAttributeBoolean(id_step, 0, "asIndexColumn");
}
 
Example 10
Source File: MongoDbModel.java    From pentaho-mongodb-plugin with Apache License 2.0 5 votes vote down vote up
/**
 * Retrieve the list of database names from MongoDB based on what the user entered for hostname, port,etc.
 * NOTE: Much of this could move to the MongoDbInputData class, as it is copied almost verbatim from the
 * Spoon MongoDbInputDialog class.
 *
 * @return Vector<String> list of database names
 * @throws Exception Should anything go wrong connecting to MongoDB, it will be reported with this exception
 */
public Vector<String> getDatabaseNamesFromMongo() throws Exception {
  Vector<String> dbs = new Vector<String>();

  if ( Const.isEmpty( hostname ) ) {
    log.logBasic( "Fetching database names aborted. Missing hostname." );
    return dbs;
  }

  final MongoDbInputMeta meta = new MongoDbInputMeta();
  final TransMeta transMeta = new TransMeta();
  saveMeta( meta );
  try {
    MongoClientWrapper wrapper = MongoWrapperUtil.createMongoClientWrapper( meta, transMeta, log );
    List<String> dbNames = null;
    try {
      dbNames = wrapper.getDatabaseNames();
    } finally {
      wrapper.dispose();
    }

    for ( String s : dbNames ) {
      dbs.add( s );
    }
    return dbs;
  } catch ( Exception e ) {
    log.logError( "Unexpected error retrieving database names from MongoDb. Check your connection details.", meta );
    throw new MongoDbException(
        "Unexpected error retrieving database names from MongoDb. Check your connection details.", e );
  }
}
 
Example 11
Source File: CPythonScriptExecutorMeta.java    From pentaho-cpython-plugin with Apache License 2.0 5 votes vote down vote up
protected void stringToVarList( String list ) {
  m_pyVarsToGet.clear();
  String[] vars = list.split( "," );
  for ( String v : vars ) {
    if ( !Const.isEmpty( v.trim() ) ) {
      m_pyVarsToGet.add( v.trim() );
    }
  }
}
 
Example 12
Source File: AvroNestedReader.java    From pentaho-hadoop-shims with Apache License 2.0 5 votes vote down vote up
/**
 * Initialize this field by parsing the path etc.
 *
 * @throws KettleException if a problem occurs
 */
public void init() throws KettleException {
  if ( Const.isEmpty( m_expansionPath ) ) {
    throw new KettleException( BaseMessages
      .getString( PKG, "AvroInput.Error.NoPathSet" ) );
  }
  if ( m_pathParts != null ) {
    return;
  }

  String expansionPath = AvroNestedReader.cleansePath( m_expansionPath );

  String[] temp = expansionPath.split( "\\." );
  m_pathParts = new ArrayList<String>();
  for ( String part : temp ) {
    m_pathParts.add( part );
  }

  if ( m_pathParts.get( 0 ).equals( "$" ) ) {
    m_pathParts.remove( 0 ); // root record indicator
  } else if ( m_pathParts.get( 0 ).startsWith( "$[" ) ) {

    // strip leading $ off of array
    String r = m_pathParts.get( 0 ).substring( 1, m_pathParts.get( 0 ).length() );
    m_pathParts.set( 0, r );
  }
  m_tempParts = new ArrayList<String>();

  // initialize the sub fields
  if ( m_subFields != null ) {
    for ( AvroInputField f : m_subFields ) {
      int outputIndex = m_outputRowMeta.indexOfValue( f.getPentahoFieldName() );
      fieldInit( f, outputIndex );
    }
  }
}
 
Example 13
Source File: MongoDbInputXulDialog.java    From pentaho-mongodb-plugin with Apache License 2.0 4 votes vote down vote up
/**
 * This method is invoked from the XUL definition; bound to the "fields" button on the Fields tab.
 */
public void getDocumentFieldsFromMongo() {

  if ( Const.isEmpty( model.getHostnames() ) ) {
    showMessage( "At least one host name is required. Return to the configure tab, enter a host name and try again.",
        "MongoDb Error" );
    return;
  }

  if ( Const.isEmpty( model.getDbName() ) ) {
    showMessage( "A database name is required. Return to the options tab, enter a database name and try again.",
        "MongoDb Error" );
    return;
  }

  if ( Const.isEmpty( model.getCollection() ) ) {
    showMessage( "A collection name is required. Return to the options tab, enter a collection name and try again.",
        "MongoDb Error" );
    return;
  }

  try {

    // 0 = Add new
    // 1 = Add all
    // 2 = Clear and add all 
    // 3 = Cancel

    int mergeStrategy = 1;

    if ( model.getFields().size() > 0 ) {
      mergeStrategy = this.showClearDataMessage();
    }

    if ( ( mergeStrategy < 3 ) && ( mergeStrategy > -1 ) ) {
      model.getFieldsFromMongo( mergeStrategy );
    }

  } catch ( Exception e ) {
    showMessage( e.getMessage(), "MongoDb Error" );
  }
}
 
Example 14
Source File: MongoDbOutputMeta.java    From pentaho-mongodb-plugin with Apache License 2.0 4 votes vote down vote up
@Override
public void readRep( Repository rep, IMetaStore metaStore, ObjectId id_step, List<DatabaseMeta> databases )
  throws KettleException {
  setHostnames( rep.getStepAttributeString( id_step, 0, "mongo_host" ) ); //$NON-NLS-1$
  setPort( rep.getStepAttributeString( id_step, 0, "mongo_port" ) ); //$NON-NLS-1$
  setUseAllReplicaSetMembers( rep.getStepAttributeBoolean( id_step, 0, "use_all_replica_members" ) ); //$NON-NLS-1$
  setAuthenticationDatabaseName( rep.getStepAttributeString( id_step, 0, "mongo_auth_database" ) ); //$NON-NLS-1$
  setAuthenticationUser( rep.getStepAttributeString( id_step, 0, "mongo_user" ) ); //$NON-NLS-1$
  setAuthenticationPassword( rep.getStepAttributeString( id_step, 0, "mongo_password" ) ); //$NON-NLS-1$
  if ( !Const.isEmpty( getAuthenticationPassword() ) ) {
    setAuthenticationPassword( Encr.decryptPasswordOptionallyEncrypted( getAuthenticationPassword() ) );
  }
  setAuthenticationMechanism( rep.getStepAttributeString( id_step, "auth_mech" ) );
  setUseKerberosAuthentication( rep.getStepAttributeBoolean( id_step, "auth_kerberos" ) ); //$NON-NLS-1$
  setDbName( rep.getStepAttributeString( id_step, 0, "mongo_db" ) ); //$NON-NLS-1$
  setCollection( rep.getStepAttributeString( id_step, 0, "mongo_collection" ) ); //$NON-NLS-1$
  m_batchInsertSize = rep.getStepAttributeString( id_step, 0, "batch_insert_size" ); //$NON-NLS-1$

  setConnectTimeout( rep.getStepAttributeString( id_step, "connect_timeout" ) ); //$NON-NLS-1$
  setSocketTimeout( rep.getStepAttributeString( id_step, "socket_timeout" ) ); //$NON-NLS-1$
  setUseSSLSocketFactory( rep.getStepAttributeBoolean( id_step, 0, "use_ssl_socket_factory", false ) );
  setReadPreference( rep.getStepAttributeString( id_step, "read_preference" ) ); //$NON-NLS-1$
  setWriteConcern( rep.getStepAttributeString( id_step, "write_concern" ) ); //$NON-NLS-1$
  setWTimeout( rep.getStepAttributeString( id_step, "w_timeout" ) ); //$NON-NLS-1$
  setJournal( rep.getStepAttributeBoolean( id_step, 0, "journaled_writes" ) ); //$NON-NLS-1$

  m_truncate = rep.getStepAttributeBoolean( id_step, 0, "truncate" ); //$NON-NLS-1$
  m_update = rep.getStepAttributeBoolean( id_step, 0, "update" ); //$NON-NLS-1$
  m_upsert = rep.getStepAttributeBoolean( id_step, 0, "upsert" ); //$NON-NLS-1$
  m_multi = rep.getStepAttributeBoolean( id_step, 0, "multi" ); //$NON-NLS-1$
  m_modifierUpdate = rep.getStepAttributeBoolean( id_step, 0, "modifier_update" ); //$NON-NLS-1$

  if ( m_upsert || m_multi ) {
    m_update = true;
  }

  int nrfields = rep.countNrStepAttributes( id_step, "incoming_field_name" ); //$NON-NLS-1$

  String writeRetries = rep.getStepAttributeString( id_step, "write_retries" ); //$NON-NLS-1$
  if ( !Const.isEmpty( writeRetries ) ) {
    m_writeRetries = writeRetries;
  }
  String writeRetryDelay = rep.getStepAttributeString( id_step, "write_retry_delay" ); //$NON-NLS-1$
  if ( !Const.isEmpty( writeRetryDelay ) ) {
    m_writeRetryDelay = writeRetryDelay;
  }

  if ( nrfields > 0 ) {
    m_mongoFields = new ArrayList<MongoField>();

    for ( int i = 0; i < nrfields; i++ ) {
      MongoField newField = new MongoField();

      newField.m_incomingFieldName = rep.getStepAttributeString( id_step, i, "incoming_field_name" ); //$NON-NLS-1$
      newField.m_mongoDocPath = rep.getStepAttributeString( id_step, i, "mongo_doc_path" ); //$NON-NLS-1$

      newField.m_useIncomingFieldNameAsMongoFieldName =
          rep.getStepAttributeBoolean( id_step, i, "use_incoming_field_name_as_mongo_field_name" ); //$NON-NLS-1$
      newField.m_updateMatchField = rep.getStepAttributeBoolean( id_step, i, "update_match_field" ); //$NON-NLS-1$
      newField.m_modifierUpdateOperation = rep.getStepAttributeString( id_step, i, "modifier_update_operation" ); //$NON-NLS-1$
      String policy = rep.getStepAttributeString( id_step, i, "modifier_policy" ); //$NON-NLS-1$
      if ( !Const.isEmpty( policy ) ) {
        newField.m_modifierOperationApplyPolicy = policy;
      }
      newField.m_JSON = rep.getStepAttributeBoolean( id_step, i, "json_field" ); //$NON-NLS-1$
      newField.insertNull = rep.getStepAttributeBoolean( id_step, i, "allow_null" ); //$NON-NLS-1$

      m_mongoFields.add( newField );
    }
  }

  nrfields = rep.countNrStepAttributes( id_step, "path_to_fields" ); //$NON-NLS-1$
  if ( nrfields > 0 ) {
    m_mongoIndexes = new ArrayList<MongoIndex>();

    for ( int i = 0; i < nrfields; i++ ) {
      MongoIndex newIndex = new MongoIndex();

      newIndex.m_pathToFields = rep.getStepAttributeString( id_step, i, "path_to_fields" ); //$NON-NLS-1$
      newIndex.m_drop = rep.getStepAttributeBoolean( id_step, i, "drop" ); //$NON-NLS-1$
      newIndex.m_unique = rep.getStepAttributeBoolean( id_step, i, "unique" ); //$NON-NLS-1$
      newIndex.m_sparse = rep.getStepAttributeBoolean( id_step, i, "sparse" ); //$NON-NLS-1$

      m_mongoIndexes.add( newIndex );
    }
  }
}
 
Example 15
Source File: Mapping.java    From pentaho-hadoop-shims with Apache License 2.0 4 votes vote down vote up
public boolean loadXML( Node stepnode ) throws KettleXMLException {
  stepnode = XMLHandler.getSubNode( stepnode, "mapping" );

  if ( stepnode == null
    || Const.isEmpty( XMLHandler.getTagValue( stepnode, "key" ) ) ) {
    return false; // no mapping info in XML
  }

  setMappingName( XMLHandler.getTagValue( stepnode, "mapping_name" ) );
  setTableName( XMLHandler.getTagValue( stepnode, "table_name" ) );

  String keyName = XMLHandler.getTagValue( stepnode, "key" );
  if ( keyName.indexOf( ',' ) > 0 ) {
    setTupleMapping( true );
    setKeyName( keyName.substring( 0, keyName.indexOf( ',' ) ) );
    if ( keyName.indexOf( ',' ) != keyName.length() - 1 ) {
      // specific families have been supplied
      String familiesList = keyName.substring( keyName.indexOf( ',' ) + 1,
        keyName.length() );
      if ( !Const.isEmpty( familiesList.trim() ) ) {
        setTupleFamilies( familiesList );
      }
    }
  } else {
    setKeyName( keyName );
  }

  String keyTypeS = XMLHandler.getTagValue( stepnode, "key_type" );
  for ( KeyType k : KeyType.values() ) {
    if ( k.toString().equalsIgnoreCase( keyTypeS ) ) {
      setKeyType( k );
      break;
    }
  }

  Node fields = XMLHandler.getSubNode( stepnode, "mapped_columns" );
  if ( fields != null && XMLHandler.countNodes( fields, "mapped_column" ) > 0 ) {
    int nrfields = XMLHandler.countNodes( fields, "mapped_column" );

    for ( int i = 0; i < nrfields; i++ ) {
      Node fieldNode = XMLHandler.getSubNodeByNr( fields, "mapped_column", i );
      String alias = XMLHandler.getTagValue( fieldNode, "alias" );
      String colFam = XMLHandler.getTagValue( fieldNode, "column_family" );
      if ( colFam == null ) {
        colFam = "";
      }
      String colName = XMLHandler.getTagValue( fieldNode, "column_name" );
      if ( colName == null ) {
        colName = "";
      }
      String type = XMLHandler.getTagValue( fieldNode, "type" );
      String combined = colFam + HBaseValueMeta.SEPARATOR + colName
        + HBaseValueMeta.SEPARATOR + alias;
      HBaseValueMeta hbvm = new HBaseValueMeta( combined, 0, -1, -1 );
      hbvm.setHBaseTypeFromString( type );

      String indexedV = XMLHandler.getTagValue( fieldNode, "indexed_vals" );
      if ( !Const.isEmpty( indexedV ) ) {
        Object[] nomVals = HBaseValueMeta.stringIndexListToObjects( indexedV );
        hbvm.setIndex( nomVals );
        hbvm.setStorageType( ValueMetaInterface.STORAGE_TYPE_INDEXED );
      }

      try {
        addMappedColumn( hbvm, isTupleMapping() );
      } catch ( Exception ex ) {
        throw new KettleXMLException( ex );
      }
    }
  }

  return true;
}
 
Example 16
Source File: MongoDbOutputDialog.java    From pentaho-mongodb-plugin with Apache License 2.0 4 votes vote down vote up
private void setupCollectionNamesForDB( boolean quiet ) {
  final String hostname = transMeta.environmentSubstitute( m_hostnameField.getText() );
  final String dB = transMeta.environmentSubstitute( m_dbNameField.getText() );

  String current = m_collectionField.getText();
  m_collectionField.removeAll();

  if ( !Const.isEmpty( hostname ) && !Const.isEmpty( dB ) ) {

    final MongoDbOutputMeta meta = new MongoDbOutputMeta();
    getInfo( meta );
    try {
      MongoClientWrapper clientWrapper = MongoWrapperUtil.createMongoClientWrapper( meta, transMeta, log );
      Set<String> collections = new HashSet<String>();
      try {
        collections = clientWrapper.getCollectionsNames( dB );
      } finally {
        clientWrapper.dispose();
      }

      for ( String c : collections ) {
        m_collectionField.add( c );
      }
    } catch ( Exception e ) {
      // Unwrap the PrivilegedActionException if it was thrown
      if ( e instanceof PrivilegedActionException ) {
        e = ( (PrivilegedActionException) e ).getException();
      }
      logError( getString( "MongoDbOutputDialog.ErrorMessage.UnableToConnect" ), e ); //$NON-NLS-1$
      new ErrorDialog( shell, getString( "MongoDbOutputDialog.ErrorMessage.UnableToConnect" ),
        //$NON-NLS-1$ //$NON-NLS-2$
        getString( "MongoDbOutputDialog.ErrorMessage.UnableToConnect" ), e ); //$NON-NLS-1$
    }
  } else {
    // popup some feedback

    String missingConnDetails = ""; //$NON-NLS-1$
    if ( Const.isEmpty( hostname ) ) {
      missingConnDetails += "host name(s)"; //$NON-NLS-1$
    }
    if ( Const.isEmpty( dB ) ) {
      missingConnDetails += " database"; //$NON-NLS-1$
    }
    ShowMessageDialog
      smd =
      new ShowMessageDialog( shell, SWT.ICON_WARNING | SWT.OK,
        getString( "MongoDbOutputDialog.ErrorMessage.MissingConnectionDetails.Title" ),
        BaseMessages.getString( PKG, //$NON-NLS-1$
          "MongoDbOutputDialog.ErrorMessage.MissingConnectionDetails", missingConnDetails ) ); //$NON-NLS-1$
    smd.open();
  }

  if ( !Const.isEmpty( current ) ) {
    m_collectionField.setText( current );
  }
}
 
Example 17
Source File: Mapping.java    From pentaho-hadoop-shims with Apache License 2.0 4 votes vote down vote up
public boolean readRep( Repository rep, ObjectId id_step )
  throws KettleException {
  if ( Const.isEmpty( rep.getStepAttributeString( id_step, 0, "key_type" ) ) ) {
    return false; // No mapping information in the repository
  }

  setMappingName( rep.getStepAttributeString( id_step, 0, "mapping_name" ) );
  setTableName( rep.getStepAttributeString( id_step, 0, "table_name" ) );

  String keyName = rep.getStepAttributeString( id_step, 0, "key" );
  if ( keyName.indexOf( ',' ) > 0 ) {
    setTupleMapping( true );
    setKeyName( keyName.substring( 0, keyName.indexOf( ',' ) ) );
    if ( keyName.indexOf( ',' ) != keyName.length() - 1 ) {
      // specific families have been supplied
      String familiesList = keyName.substring( keyName.indexOf( ',' ) + 1,
        keyName.length() );
      if ( !Const.isEmpty( familiesList.trim() ) ) {
        setTupleFamilies( familiesList );
      }
    }
  } else {
    setKeyName( keyName );
  }

  String keyTypeS = rep.getStepAttributeString( id_step, 0, "key_type" );
  for ( KeyType k : KeyType.values() ) {
    if ( k.toString().equalsIgnoreCase( keyTypeS ) ) {
      setKeyType( k );
      break;
    }
  }

  int nrfields = rep.countNrStepAttributes( id_step, "column_family" );
  if ( nrfields > 0 ) {
    for ( int i = 0; i < nrfields; i++ ) {
      String alias = rep.getStepAttributeString( id_step, i, "alias" );
      String colFam = rep.getStepAttributeString( id_step, i, "column_family" );
      if ( colFam == null ) {
        colFam = "";
      }
      String colName = rep.getStepAttributeString( id_step, i, "column_name" );
      if ( colName == null ) {
        colName = "";
      }
      String type = rep.getStepAttributeString( id_step, i, "type" );
      String combined = colFam + HBaseValueMeta.SEPARATOR + colName
        + HBaseValueMeta.SEPARATOR + alias;
      HBaseValueMeta hbvm = new HBaseValueMeta( combined, 0, -1, -1 );
      hbvm.setHBaseTypeFromString( type );
      String indexedV = rep
        .getStepAttributeString( id_step, i, "indexed_vals" );
      if ( !Const.isEmpty( indexedV ) ) {
        Object[] nomVals = HBaseValueMeta.stringIndexListToObjects( indexedV );
        hbvm.setIndex( nomVals );
        hbvm.setStorageType( ValueMetaInterface.STORAGE_TYPE_INDEXED );
      }

      try {
        addMappedColumn( hbvm, isTupleMapping() );
      } catch ( Exception ex ) {
        throw new KettleException( ex );
      }
    }
  }

  return true;
}
 
Example 18
Source File: CassandraOutputMeta.java    From learning-hadoop with Apache License 2.0 4 votes vote down vote up
public void saveRep(Repository rep, ObjectId id_transformation,
    ObjectId id_step) throws KettleException {
  if (!Const.isEmpty(m_cassandraHost)) {
    rep.saveStepAttribute(id_transformation, id_step, 0, "cassandra_host",
        m_cassandraHost);
  }

  if (!Const.isEmpty(m_cassandraPort)) {
    rep.saveStepAttribute(id_transformation, id_step, 0, "cassandra_port",
        m_cassandraPort);
  }

  if (!Const.isEmpty(m_schemaHost)) {
    rep.saveStepAttribute(id_transformation, id_step, 0, "schema_host",
        m_schemaHost);
  }

  if (!Const.isEmpty(m_schemaPort)) {
    rep.saveStepAttribute(id_transformation, id_step, 0, "schema_port",
        m_schemaPort);
  }

  if (!Const.isEmpty(m_socketTimeout)) {
    rep.saveStepAttribute(id_transformation, id_step, 0, "socket_timeout",
        m_socketTimeout);
  }

  if (!Const.isEmpty(m_username)) {
    rep.saveStepAttribute(id_transformation, id_step, 0, "username",
        m_username);
  }

  if (!Const.isEmpty(m_password)) {
    rep.saveStepAttribute(id_transformation, id_step, 0, "password",
        Encr.encryptPasswordIfNotUsingVariables(m_password));
  }

  if (!Const.isEmpty(m_cassandraKeyspace)) {
    rep.saveStepAttribute(id_transformation, id_step, 0,
        "cassandra_keyspace", m_cassandraKeyspace);
  }

  if (!Const.isEmpty(m_columnFamily)) {
    rep.saveStepAttribute(id_transformation, id_step, 0, "column_family",
        m_columnFamily);
  }

  if (!Const.isEmpty(m_keyField)) {
    rep.saveStepAttribute(id_transformation, id_step, 0, "key_field",
        m_keyField);
  }

  if (!Const.isEmpty(m_consistency)) {
    rep.saveStepAttribute(id_transformation, id_step, 0, "consistency",
        m_consistency);
  }

  if (!Const.isEmpty(m_batchSize)) {
    rep.saveStepAttribute(id_transformation, id_step, 0, "batch_size",
        m_batchSize);
  }

  if (!Const.isEmpty(m_cqlBatchTimeout)) {
    rep.saveStepAttribute(id_transformation, id_step, 0, "cql_batch_timeout",
        m_cqlBatchTimeout);
  }

  if (!Const.isEmpty(m_cqlSubBatchSize)) {
    rep.saveStepAttribute(id_transformation, id_step, 0,
        "cql_sub_batch_size", m_cqlSubBatchSize);
  }

  rep.saveStepAttribute(id_transformation, id_step, 0,
      "create_column_family", m_createColumnFamily);
  rep.saveStepAttribute(id_transformation, id_step, 0, "use_compression",
      m_useCompression);
  rep.saveStepAttribute(id_transformation, id_step, 0,
      "insert_fields_not_in_meta", m_insertFieldsNotInMeta);
  rep.saveStepAttribute(id_transformation, id_step, 0,
      "update_cassandra_meta", m_updateCassandraMeta);
  rep.saveStepAttribute(id_transformation, id_step, 0,
      "truncate_column_family", m_truncateColumnFamily);

  if (!Const.isEmpty(m_aprioriCQL)) {
    rep.saveStepAttribute(id_transformation, id_step, 0, "apriori_cql",
        m_aprioriCQL);
  }

  rep.saveStepAttribute(id_transformation, id_step, 0, "use_thrift_io",
      m_useThriftIO);
  
  rep.saveStepAttribute(id_transformation, id_step, 0, "asIndexColumn",
  		asIndexColumn);
}
 
Example 19
Source File: OdpsOutputDialog.java    From aliyun-maxcompute-data-collectors with Apache License 2.0 4 votes vote down vote up
private void getFields(OdpsOutputMeta meta) {
    if (!Const.isEmpty(m_wEndpoint.getText()) && !Const.isEmpty(m_wAccessId.getText()) && !Const
        .isEmpty(m_wAccessKey.getText()) && !Const.isEmpty(m_wProjectName.getText()) && !Const
        .isEmpty(m_wTableName.getText())) {

        TableSchema schema = MaxcomputeUtil
            .getTableSchema(new AliyunAccount(m_wAccessId.getText(), m_wAccessKey.getText()),
                m_wEndpoint.getText(), m_wProjectName.getText(), m_wTableName.getText());

        List<Column> columns = schema.getColumns();
        List<OdpsField> odpsFields = new ArrayList<OdpsField>();
        for (int i = 0; i < columns.size(); i++) {
            OdpsField field = new OdpsField();
            field.setName(columns.get(i).getName());
            field.setType(columns.get(i).getType().name());
            field.setComment(columns.get(i).getCategoryLabel());
            odpsFields.add(field);
        }

        meta.setOdpsFields(odpsFields);
        RowMetaInterface r = null;
        try {
            r = transMeta.getPrevStepFields(stepname);
        } catch (KettleStepException e) {
            logError(e.getMessage(), e);
        }

        if (r != null) {
            logBasic("prev step fields: " + Arrays.toString(r.getFieldNames()));
            meta.setStreamFields(Arrays.asList(r.getFieldNames()));
        } else {
            logBasic("prev step fields empty!!!");
            meta.setStreamFields(new ArrayList<String>());
        }

        if (meta.getOdpsFields() != null && meta.getStreamFields() != null) {
            m_wFieldsTable.table.clearAll();
            m_wFieldsTable.table.setItemCount(meta.getOdpsFields().size());
            for (int i = 0; i < meta.getOdpsFields().size(); i++) {
                OdpsField odpsField = meta.getOdpsFields().get(i);
                String streamField = "";
                if (meta.getStreamFields().size() > i) {
                    streamField = meta.getStreamFields().get(i);
                }
                TableItem item = m_wFieldsTable.table.getItem(i);
                if (odpsField != null) {
                    if (odpsField.getName() != null)
                        item.setText(1, odpsField.getName());
                    if (streamField != null)
                        item.setText(2, streamField);
                }
            }
        }

    } else {
        // pop up an error dialog
        String missingConDetails = "";
        if (Const.isEmpty(m_wEndpoint.getText())) {
            missingConDetails += " odps endpoint";
        }
        if (Const.isEmpty(m_wAccessId.getText())) {
            missingConDetails += " accessId";
        }
        if (Const.isEmpty(m_wAccessKey.getText())) {
            missingConDetails += " accessKey";
        }
        if (Const.isEmpty(m_wProjectName.getText())) {
            missingConDetails += " project name";
        }
        if (Const.isEmpty(m_wTableName.getText())) {
            missingConDetails += " table name";
        }
        ShowMessageDialog smd = new ShowMessageDialog(shell, SWT.ICON_WARNING | SWT.OK,
            BaseMessages.getString(PKG, "ODPS.ErrorMessage.MissingConnectionDetails.Title"),
            BaseMessages.getString(PKG, "ODPS.ErrorMessage.MissingConnectionDetails",
                missingConDetails));
        smd.open();
    }
}
 
Example 20
Source File: CassandraOutputDialog.java    From learning-hadoop with Apache License 2.0 4 votes vote down vote up
protected void ok() {
  if (Const.isEmpty(m_stepnameText.getText())) {
    return;
  }

  stepname = m_stepnameText.getText();
  m_currentMeta.setCassandraHost(m_hostText.getText());
  m_currentMeta.setCassandraPort(m_portText.getText());
  m_currentMeta.setSchemaHost(m_schemaHostText.getText());
  m_currentMeta.setSchemaPort(m_schemaPortText.getText());
  m_currentMeta.setSocketTimeout(m_socketTimeoutText.getText());
  m_currentMeta.setUsername(m_userText.getText());
  m_currentMeta.setPassword(m_passText.getText());
  m_currentMeta.setCassandraKeyspace(m_keyspaceText.getText());
  m_currentMeta.setColumnFamilyName(m_columnFamilyCombo.getText());
  m_currentMeta.setConsistency(m_consistencyText.getText());
  m_currentMeta.setBatchSize(m_batchSizeText.getText());
  m_currentMeta.setCQLBatchInsertTimeout(m_batchInsertTimeoutText.getText());
  m_currentMeta.setCQLSubBatchSize(m_subBatchSizeText.getText());
  m_currentMeta.setKeyField(m_keyFieldCombo.getText());

  m_currentMeta.setCreateColumnFamily(m_createColumnFamilyBut.getSelection());
  m_currentMeta.setTruncateColumnFamily(m_truncateColumnFamilyBut
      .getSelection());
  m_currentMeta.setUpdateCassandraMeta(m_updateColumnFamilyMetaDataBut
      .getSelection());
  m_currentMeta.setInsertFieldsNotInMeta(m_insertFieldsNotInColumnFamMetaBut
      .getSelection());
  m_currentMeta.setUseCompression(m_useCompressionBut.getSelection());
  m_currentMeta.setAprioriCQL(m_aprioriCQL);
  m_currentMeta.setUseThriftIO(m_useThriftIOCheck.getSelection());
  
  m_currentMeta.setAsIndexColumn(asIndexColumnBut.getSelection());
  m_currentMeta.setAprioriCQL(m_aprioriCQL);

  if (!m_originalMeta.equals(m_currentMeta)) {
    m_currentMeta.setChanged();
    changed = m_currentMeta.hasChanged();
  }

  dispose();
}