Java Code Examples for org.pentaho.di.core.row.RowMetaInterface#size()

The following examples show how to use org.pentaho.di.core.row.RowMetaInterface#size() . You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: StepTransform.java    From kettle-beam with Apache License 2.0 6 votes vote down vote up
private StepMeta createInjectorStep( TransMeta transMeta, String injectorStepName, RowMetaInterface injectorRowMeta, int x, int y ) {
  InjectorMeta injectorMeta = new InjectorMeta();
  injectorMeta.allocate( injectorRowMeta.size() );
  for ( int i = 0; i < injectorRowMeta.size(); i++ ) {
    ValueMetaInterface valueMeta = injectorRowMeta.getValueMeta( i );
    injectorMeta.getFieldname()[ i ] = valueMeta.getName();
    injectorMeta.getType()[ i ] = valueMeta.getType();
    injectorMeta.getLength()[ i ] = valueMeta.getLength();
    injectorMeta.getPrecision()[ i ] = valueMeta.getPrecision();
  }
  StepMeta injectorStepMeta = new StepMeta( injectorStepName, injectorMeta );
  injectorStepMeta.setLocation( x, y );
  injectorStepMeta.setDraw( true );
  transMeta.addStep( injectorStepMeta );

  return injectorStepMeta;
}
 
Example 2
Source File: CassandraOutputData.java    From learning-hadoop with Apache License 2.0 6 votes vote down vote up
protected static int numFieldsToBeWritten(String colFamilyName,
		RowMetaInterface inputMeta, int keyIndex,
		CassandraColumnMetaData cassandraMeta,
		boolean insertFieldsNotInMetaData) {

	// check how many fields will actually be inserted - we must insert at
	// least
	// one field
	// apart from the key or Cassandra will complain.

	int count = 1; // key
	for (int i = 0; i < inputMeta.size(); i++) {
		if (i != keyIndex) {
			ValueMetaInterface colMeta = inputMeta.getValueMeta(i);
			String colName = colMeta.getName();
			if (!cassandraMeta.columnExistsInSchema(colName)
					&& !insertFieldsNotInMetaData) {
				continue;
			}
			count++;
		}
	}

	return count;
}
 
Example 3
Source File: PentahoMapReduceBase.java    From pentaho-hadoop-shims with Apache License 2.0 6 votes vote down vote up
public void injectValue( Object key, int keyOrdinal, ITypeConverter inConverterK,
                         Object value, int valueOrdinal, ITypeConverter inConverterV,
                         RowMetaInterface injectorRowMeta, RowProducer rowProducer, Reporter reporter )
  throws Exception {
  Object[] row = new Object[ injectorRowMeta.size() ];
  row[ keyOrdinal ] =
    inConverterK != null ? inConverterK.convert( injectorRowMeta.getValueMeta( keyOrdinal ), key ) : key;
  row[ valueOrdinal ] =
    inConverterV != null ? inConverterV.convert( injectorRowMeta.getValueMeta( valueOrdinal ), value ) : value;

  if ( log.isDebug() ) {
    setDebugStatus( reporter, "Injecting input record [" + row[ keyOrdinal ] + "] - [" + row[ valueOrdinal ] + "]" );
  }

  rowProducer.putRow( injectorRowMeta, row );
}
 
Example 4
Source File: OlapInputMeta.java    From pentaho-kettle with Apache License 2.0 6 votes vote down vote up
@Override
public void getFields( RowMetaInterface row, String origin, RowMetaInterface[] info, StepMeta nextStep,
  VariableSpace space, Repository repository, IMetaStore metaStore ) throws KettleStepException {

  RowMetaInterface add = null;

  try {
    initData( space );

    add = data.outputRowMeta;
  } catch ( Exception dbe ) {
    throw new KettleStepException( "Unable to get query result for MDX query: " + Const.CR + mdx, dbe );
  }

  // Set the origin
  //
  for ( int i = 0; i < add.size(); i++ ) {
    ValueMetaInterface v = add.getValueMeta( i );
    v.setOrigin( origin );
  }

  row.addRowMeta( add );
}
 
Example 5
Source File: OpenERPObjectOutputDialog.java    From pentaho-kettle with Apache License 2.0 6 votes vote down vote up
private String[] getSteamFieldsNames( boolean showError ) {
  String[] fields = null;

  // Set stream fields
  RowMetaInterface row;
  try {
    row = transMeta.getPrevStepFields( stepMeta );
    fields = new String[row.size()];
    for ( int i = 0; i < row.size(); i++ ) {
      fields[i] = row.getValueMeta( i ).getName();
    }
  } catch ( KettleStepException e ) {
    if ( showError ) {
      new ErrorDialog( shell,
        BaseMessages.getString( PKG, "OpenERPObjectOutputDialog.UnableToFindStreamFieldsTitle" ), BaseMessages
        .getString( PKG, "OpenERPObjectOutputDialog.UnableToFindStreamFieldsMessage" ), e );
    }
    return null;
  }

  return fields;
}
 
Example 6
Source File: DatabaseMeta.java    From pentaho-kettle with Apache License 2.0 5 votes vote down vote up
/**
 * Checks the fields specified for reserved words
 *
 * @param fields
 *          the list of fields to check
 * @return The nr of reserved words for this database.
 */
public int getNrReservedWords( RowMetaInterface fields ) {
  int nrReservedWords = 0;
  for ( int i = 0; i < fields.size(); i++ ) {
    ValueMetaInterface v = fields.getValueMeta( i );
    if ( isReservedWord( v.getName() ) ) {
      nrReservedWords++;
    }
  }
  return nrReservedWords;
}
 
Example 7
Source File: TableOutputMeta.java    From pentaho-kettle with Apache License 2.0 5 votes vote down vote up
public SQLStatement getSQLStatements( TransMeta transMeta, StepMeta stepMeta, RowMetaInterface prev, String tk,
                                      boolean use_autoinc, String pk ) {
  SQLStatement retval = new SQLStatement( stepMeta.getName(), databaseMeta, null ); // default: nothing to do!

  if ( databaseMeta != null ) {
    if ( prev != null && prev.size() > 0 ) {
      if ( !Utils.isEmpty( tableName ) ) {
        Database db = new Database( loggingObject, databaseMeta );
        db.shareVariablesWith( transMeta );
        try {
          db.connect();

          String schemaTable = databaseMeta.getQuotedSchemaTableCombination( schemaName, tableName );
          String cr_table = db.getDDL( schemaTable, prev, tk, use_autoinc, pk );

          // Empty string means: nothing to do: set it to null...
          if ( cr_table == null || cr_table.length() == 0 ) {
            cr_table = null;
          }

          retval.setSQL( cr_table );
        } catch ( KettleDatabaseException dbe ) {
          retval.setError( BaseMessages.getString( PKG, "TableOutputMeta.Error.ErrorConnecting", dbe
            .getMessage() ) );
        } finally {
          db.disconnect();
        }
      } else {
        retval.setError( BaseMessages.getString( PKG, "TableOutputMeta.Error.NoTable" ) );
      }
    } else {
      retval.setError( BaseMessages.getString( PKG, "TableOutputMeta.Error.NoInput" ) );
    }
  } else {
    retval.setError( BaseMessages.getString( PKG, "TableOutputMeta.Error.NoConnection" ) );
  }

  return retval;
}
 
Example 8
Source File: TextFileInput.java    From pentaho-kettle with Apache License 2.0 5 votes vote down vote up
/**
 * Adds <code>String</code> value meta with given name if not present and returns index
 *
 * @param rowMeta
 * @param fieldName
 * @return Index in row meta of value meta with <code>fieldName</code>
 */
private int addValueMeta( RowMetaInterface rowMeta, String fieldName ) {
  ValueMetaInterface valueMeta = new ValueMetaString( fieldName );
  valueMeta.setOrigin( getStepname() );
  // add if doesn't exist
  int index = -1;
  if ( !rowMeta.exists( valueMeta ) ) {
    index = rowMeta.size();
    rowMeta.addValueMeta( valueMeta );
  } else {
    index = rowMeta.indexOfValue( fieldName );
  }
  return index;
}
 
Example 9
Source File: CassandraOutputDialog.java    From learning-hadoop with Apache License 2.0 5 votes vote down vote up
protected void setupFieldsCombo() {
  // try and set up from incoming fields from previous step

  StepMeta stepMeta = transMeta.findStep(stepname);

  if (stepMeta != null) {
    try {
      RowMetaInterface row = transMeta.getPrevStepFields(stepMeta);

      if (row.size() == 0) {
        MessageDialog.openError(shell, BaseMessages.getString(PKG,
            "CassandraOutputData.Message.NoIncomingFields.Title"),
            BaseMessages.getString(PKG,
                "CassandraOutputData.Message.NoIncomingFields"));

        return;
      }

      m_keyFieldCombo.removeAll();
      for (int i = 0; i < row.size(); i++) {
        ValueMetaInterface vm = row.getValueMeta(i);
        m_keyFieldCombo.add(vm.getName());
      }
    } catch (KettleException ex) {
      MessageDialog.openError(shell, BaseMessages.getString(PKG,
          "CassandraOutputData.Message.NoIncomingFields.Title"), BaseMessages
          .getString(PKG, "CassandraOutputData.Message.NoIncomingFields"));
    }
  }
}
 
Example 10
Source File: ElasticSearchBulk.java    From pentaho-kettle with Apache License 2.0 5 votes vote down vote up
private static Integer getFieldIdx( RowMetaInterface rowMeta, String fieldName ) {
  if ( fieldName == null ) {
    return null;
  }

  for ( int i = 0; i < rowMeta.size(); i++ ) {
    String name = rowMeta.getValueMeta( i ).getName();
    if ( fieldName.equals( name ) ) {
      return i;
    }
  }
  return null;
}
 
Example 11
Source File: TableProducer.java    From pentaho-reporting with GNU Lesser General Public License v2.1 5 votes vote down vote up
protected TypedTableModel createTableModel( final RowMetaInterface rowMeta ) {
  final int colCount = rowMeta.size();
  final String fieldNames[] = new String[ colCount ];
  final Class<?> fieldTypes[] = new Class<?>[ colCount ];
  for ( int columnNo = 0; columnNo < colCount; columnNo++ ) {
    final ValueMetaInterface valueMeta = rowMeta.getValueMeta( columnNo );
    fieldNames[ columnNo ] = valueMeta.getName();

    switch( valueMeta.getType() ) {
      case ValueMetaInterface.TYPE_BIGNUMBER:
        fieldTypes[ columnNo ] = BigDecimal.class;
        break;
      case ValueMetaInterface.TYPE_BOOLEAN:
        fieldTypes[ columnNo ] = Boolean.class;
        break;
      case ValueMetaInterface.TYPE_DATE:
        fieldTypes[ columnNo ] = Date.class;
        break;
      case ValueMetaInterface.TYPE_INTEGER:
        fieldTypes[ columnNo ] = Integer.class;
        break;
      case ValueMetaInterface.TYPE_NONE:
        fieldTypes[ columnNo ] = String.class;
        break;
      case ValueMetaInterface.TYPE_NUMBER:
        fieldTypes[ columnNo ] = Double.class;
        break;
      case ValueMetaInterface.TYPE_STRING:
        fieldTypes[ columnNo ] = String.class;
        break;
      case ValueMetaInterface.TYPE_BINARY:
        fieldTypes[ columnNo ] = byte[].class;
        break;
      default:
        fieldTypes[ columnNo ] = String.class;
    }

  }
  return new TypedTableModel( fieldNames, fieldTypes );
}
 
Example 12
Source File: TableInput.java    From pentaho-kettle with Apache License 2.0 5 votes vote down vote up
private RowMetaAndData readStartDate() throws KettleException {
  if ( log.isDetailed() ) {
    logDetailed( "Reading from step [" + data.infoStream.getStepname() + "]" );
  }

  RowMetaInterface parametersMeta = new RowMeta();
  Object[] parametersData = new Object[] {};

  RowSet rowSet = findInputRowSet( data.infoStream.getStepname() );
  if ( rowSet != null ) {
    Object[] rowData = getRowFrom( rowSet ); // rows are originating from "lookup_from"
    while ( rowData != null ) {
      parametersData = RowDataUtil.addRowData( parametersData, parametersMeta.size(), rowData );
      parametersMeta.addRowMeta( rowSet.getRowMeta() );

      rowData = getRowFrom( rowSet ); // take all input rows if needed!
    }

    if ( parametersMeta.size() == 0 ) {
      throw new KettleException( "Expected to read parameters from step ["
        + data.infoStream.getStepname() + "] but none were found." );
    }
  } else {
    throw new KettleException( "Unable to find rowset to read from, perhaps step ["
      + data.infoStream.getStepname() + "] doesn't exist. (or perhaps you are trying a preview?)" );
  }

  RowMetaAndData parameters = new RowMetaAndData( parametersMeta, parametersData );

  return parameters;
}
 
Example 13
Source File: Database.java    From pentaho-kettle with Apache License 2.0 5 votes vote down vote up
/**
 * Prepare inserting values into a table, using the fields & values in a Row
 *
 * @param rowMeta    The metadata row to determine which values need to be inserted
 * @param schemaName The name of the schema in which we want to insert rows
 * @param tableName  The name of the table in which we want to insert rows
 * @throws KettleDatabaseException if something went wrong.
 */
public void prepareInsert( RowMetaInterface rowMeta, String schemaName, String tableName )
  throws KettleDatabaseException {
  if ( rowMeta.size() == 0 ) {
    throw new KettleDatabaseException( "No fields in row, can't insert!" );
  }

  String ins = getInsertStatement( schemaName, tableName, rowMeta );

  if ( log.isDetailed() ) {
    log.logDetailed( "Preparing statement: " + Const.CR + ins );
  }
  prepStatementInsert = prepareSQL( ins );
}
 
Example 14
Source File: PGBulkLoaderMeta.java    From pentaho-kettle with Apache License 2.0 4 votes vote down vote up
public SQLStatement getSQLStatements( TransMeta transMeta, StepMeta stepMeta, RowMetaInterface prev,
  Repository repository, IMetaStore metaStore ) throws KettleStepException {
  SQLStatement retval = new SQLStatement( stepMeta.getName(), databaseMeta, null ); // default: nothing to do!

  if ( databaseMeta != null ) {
    if ( prev != null && prev.size() > 0 ) {
      // Copy the row
      RowMetaInterface tableFields = new RowMeta();

      // Now change the field names
      for ( int i = 0; i < fieldTable.length; i++ ) {
        ValueMetaInterface v = prev.searchValueMeta( fieldStream[i] );
        if ( v != null ) {
          ValueMetaInterface tableField = v.clone();
          tableField.setName( fieldTable[i] );
          tableFields.addValueMeta( tableField );
        } else {
          throw new KettleStepException( "Unable to find field [" + fieldStream[i] + "] in the input rows" );
        }
      }

      if ( !Utils.isEmpty( tableName ) ) {
        Database db = new Database( loggingObject, databaseMeta );
        db.shareVariablesWith( transMeta );
        try {
          db.connect();

          String schemaTable =
            databaseMeta.getQuotedSchemaTableCombination(
              transMeta.environmentSubstitute( schemaName ), transMeta.environmentSubstitute( tableName ) );
          String sql = db.getDDL( schemaTable, tableFields, null, false, null, true );

          if ( sql.length() == 0 ) {
            retval.setSQL( null );
          } else {
            retval.setSQL( sql );
          }
        } catch ( KettleException e ) {
          retval.setError( BaseMessages.getString( PKG, "GPBulkLoaderMeta.GetSQL.ErrorOccurred" )
            + e.getMessage() );
        }
      } else {
        retval.setError( BaseMessages.getString( PKG, "GPBulkLoaderMeta.GetSQL.NoTableDefinedOnConnection" ) );
      }
    } else {
      retval.setError( BaseMessages.getString( PKG, "GPBulkLoaderMeta.GetSQL.NotReceivingAnyFields" ) );
    }
  } else {
    retval.setError( BaseMessages.getString( PKG, "GPBulkLoaderMeta.GetSQL.NoConnectionDefined" ) );
  }

  return retval;
}
 
Example 15
Source File: AddSequence.java    From pentaho-kettle with Apache License 2.0 4 votes vote down vote up
public Object[] addSequence( RowMetaInterface inputRowMeta, Object[] inputRowData ) throws KettleException {
  Object next = null;

  if ( meta.isCounterUsed() ) {
    synchronized ( data.counter ) {
      long prev = data.counter.getCounter();

      long nval = prev + data.increment;
      if ( data.increment > 0 && data.maximum > data.start && nval > data.maximum ) {
        nval = data.start;
      }
      if ( data.increment < 0 && data.maximum < data.start && nval < data.maximum ) {
        nval = data.start;
      }
      data.counter.setCounter( nval );

      next = prev;
    }
  } else if ( meta.isDatabaseUsed() ) {
    try {
      next = data.getDb().getNextSequenceValue( data.realSchemaName, data.realSequenceName, meta.getValuename() );
    } catch ( KettleDatabaseException dbe ) {
      throw new KettleStepException( BaseMessages.getString(
        PKG, "AddSequence.Exception.ErrorReadingSequence", data.realSequenceName ), dbe );
    }
  } else {
    // This should never happen, but if it does, don't continue!!!
    throw new KettleStepException( BaseMessages.getString( PKG, "AddSequence.Exception.NoSpecifiedMethod" ) );
  }

  if ( next != null ) {
    Object[] outputRowData = inputRowData;
    if ( inputRowData.length < inputRowMeta.size() + 1 ) {
      outputRowData = RowDataUtil.resizeArray( inputRowData, inputRowMeta.size() + 1 );
    }
    outputRowData[inputRowMeta.size()] = next;
    return outputRowData;
  } else {
    throw new KettleStepException( BaseMessages.getString(
      PKG, "AddSequence.Exception.CouldNotFindNextValueForSequence" )
      + meta.getValuename() );
  }
}
 
Example 16
Source File: BaseStep.java    From pentaho-kettle with Apache License 2.0 4 votes vote down vote up
private void handlePutError( RowMetaInterface rowMeta, Object[] row, long nrErrors, String errorDescriptions,
                             String fieldNames, String errorCodes ) throws KettleStepException {
  if ( trans.isSafeModeEnabled() ) {
    if ( rowMeta.size() > row.length ) {
      throw new KettleStepException( BaseMessages.getString(
        PKG, "BaseStep.Exception.MetadataDoesntMatchDataRowSize", Integer.toString( rowMeta.size() ), Integer
          .toString( row != null ? row.length : 0 ) ) );
    }
  }

  StepErrorMeta stepErrorMeta = stepMeta.getStepErrorMeta();

  if ( errorRowMeta == null ) {
    errorRowMeta = rowMeta.clone();

    RowMetaInterface add = stepErrorMeta.getErrorRowMeta( nrErrors, errorDescriptions, fieldNames, errorCodes );
    errorRowMeta.addRowMeta( add );
  }

  Object[] errorRowData = RowDataUtil.allocateRowData( errorRowMeta.size() );
  if ( row != null ) {
    System.arraycopy( row, 0, errorRowData, 0, rowMeta.size() );
  }

  // Also add the error fields...
  stepErrorMeta.addErrorRowData(
    errorRowData, rowMeta.size(), nrErrors, errorDescriptions, fieldNames, errorCodes );

  // call all row listeners...
  for ( RowListener listener : rowListeners ) {
    listener.errorRowWrittenEvent( rowMeta, row );
  }

  if ( errorRowSet != null ) {
    while ( !errorRowSet.putRow( errorRowMeta, errorRowData ) ) {
      if ( isStopped() ) {
        break;
      }
    }
    incrementLinesRejected();
  }

  verifyRejectionRates();
}
 
Example 17
Source File: InsertUpdateMeta.java    From pentaho-kettle with Apache License 2.0 4 votes vote down vote up
public SQLStatement getSQLStatements( TransMeta transMeta, StepMeta stepMeta, RowMetaInterface prev,
    Repository repository, IMetaStore metaStore ) throws KettleStepException {
  SQLStatement retval = new SQLStatement( stepMeta.getName(), databaseMeta, null ); // default: nothing to do!

  if ( databaseMeta != null ) {
    if ( prev != null && prev.size() > 0 ) {
      // Copy the row
      RowMetaInterface tableFields = RowMetaUtils.getRowMetaForUpdate( prev, keyLookup,
          keyStream, updateLookup, updateStream );

      if ( !Utils.isEmpty( tableName ) ) {
        Database db = new Database( loggingObject, databaseMeta );
        db.shareVariablesWith( transMeta );
        try {
          db.connect();

          String schemaTable = databaseMeta.getQuotedSchemaTableCombination( schemaName, tableName );
          String cr_table = db.getDDL( schemaTable, tableFields, null, false, null, true );

          String cr_index = "";
          String[] idx_fields = null;

          if ( keyLookup != null && keyLookup.length > 0 ) {
            idx_fields = new String[keyLookup.length];
            for ( int i = 0; i < keyLookup.length; i++ ) {
              idx_fields[i] = keyLookup[i];
            }
          } else {
            retval.setError( BaseMessages.getString( PKG, "InsertUpdateMeta.CheckResult.MissingKeyFields" ) );
          }

          // Key lookup dimensions...
          if ( idx_fields != null
              && idx_fields.length > 0 && !db.checkIndexExists( schemaName, tableName, idx_fields ) ) {
            String indexname = "idx_" + tableName + "_lookup";
            cr_index =
              db.getCreateIndexStatement( schemaTable, indexname, idx_fields, false, false, false, true );
          }

          String sql = cr_table + cr_index;
          if ( sql.length() == 0 ) {
            retval.setSQL( null );
          } else {
            retval.setSQL( sql );
          }
        } catch ( KettleException e ) {
          retval.setError( BaseMessages.getString( PKG, "InsertUpdateMeta.ReturnValue.ErrorOccurred" )
              + e.getMessage() );
        }
      } else {
        retval
          .setError( BaseMessages.getString( PKG, "InsertUpdateMeta.ReturnValue.NoTableDefinedOnConnection" ) );
      }
    } else {
      retval.setError( BaseMessages.getString( PKG, "InsertUpdateMeta.ReturnValue.NotReceivingAnyFields" ) );
    }
  } else {
    retval.setError( BaseMessages.getString( PKG, "InsertUpdateMeta.ReturnValue.NoConnectionDefined" ) );
  }

  return retval;
}
 
Example 18
Source File: BaseStepXulDialog.java    From pentaho-kettle with Apache License 2.0 4 votes vote down vote up
public static void getFieldsFromPrevious( RowMetaInterface row, XulTree tableView, List<Object> fields,
  StepTableDataObject field, TableItemInsertXulListener listener ) {
  if ( row == null || row.size() == 0 ) {
    return; // nothing to do
  }

  // get a list of all the non-empty keys (names)
  //
  List<String> keys = new ArrayList<String>();
  for ( Object entry : fields ) {
    keys.add( ( (StepTableDataObject) entry ).getName() );
  }

  int choice = 0;

  if ( keys.size() > 0 ) {
    // Ask what we should do with the existing data in the step.
    //
    Shell shell = ( (TableViewer) tableView.getManagedObject() ).getTable().getShell();
    MessageDialog md =
      new MessageDialog( shell,
        BaseMessages.getString( PKG, "BaseStepDialog.GetFieldsChoice.Title" ), // "Warning!"
        null,
        BaseMessages.getString( PKG, "BaseStepDialog.GetFieldsChoice.Message", "" + keys.size(), "" + row.size() ),
        MessageDialog.WARNING, new String[] {
          BaseMessages.getString( PKG, "BaseStepDialog.AddNew" ),
          BaseMessages.getString( PKG, "BaseStepDialog.Add" ),
          BaseMessages.getString( PKG, "BaseStepDialog.ClearAndAdd" ),
          BaseMessages.getString( PKG, "BaseStepDialog.Cancel" ), }, 0 );
    MessageDialog.setDefaultImage( GUIResource.getInstance().getImageSpoon() );
    int idx = md.open();
    choice = idx & 0xFF;
  }

  if ( choice == 3 || choice == 255 ) {
    return; // Cancel clicked
  }

  if ( choice == 2 ) {
    fields.clear();
  }

  for ( int i = 0; i < row.size(); i++ ) {
    ValueMetaInterface v = row.getValueMeta( i );

    if ( choice == 0 ) { // hang on, see if it's not yet in the table view

      if ( keys.indexOf( v.getName() ) >= 0 ) {
        continue;
      }
    }

    if ( listener != null && !listener.tableItemInsertedFor( v ) ) {
      continue;
    }

    StepTableDataObject newField = field.createNew( v );
    fields.add( newField );
  }
}
 
Example 19
Source File: Neo4JOutput.java    From knowbi-pentaho-pdi-neo4j-output with Apache License 2.0 4 votes vote down vote up
private void outputGraphValue( RowMetaInterface rowMeta, Object[] row ) throws KettleException {

    try {

      GraphData graphData = new GraphData();
      graphData.setSourceTransformationName( getTransMeta().getName() );
      graphData.setSourceStepName( getStepMeta().getName() );

      GraphNodeData sourceNodeData = null;
      GraphNodeData targetNodeData = null;
      GraphRelationshipData relationshipData;

      if ( meta.getFromNodeProps().length > 0 ) {
        sourceNodeData = createGraphNodeData( rowMeta, row, meta.getFromNodeLabels(), data.fromLabelValues, data.fromNodeLabelIndexes,
          data.fromNodePropIndexes, meta.getFromNodePropNames(), meta.getFromNodePropPrimary(), "from" );
        if ( !meta.isOnlyCreatingRelationships() ) {
          graphData.getNodes().add( sourceNodeData );
        }
      }
      if ( meta.getToNodeProps().length > 0 ) {
        targetNodeData = createGraphNodeData( rowMeta, row, meta.getToNodeLabels(), data.toLabelValues, data.toNodeLabelIndexes,
          data.toNodePropIndexes, meta.getToNodePropNames(), meta.getToNodePropPrimary(), "to" );
        if ( !meta.isOnlyCreatingRelationships() ) {
          graphData.getNodes().add( targetNodeData );
        }
      }

      String relationshipLabel = null;
      if ( data.relationshipIndex >= 0 ) {
        relationshipLabel = getInputRowMeta().getString( row, data.relationshipIndex );
      }
      if ( StringUtil.isEmpty( relationshipLabel ) && StringUtils.isNotEmpty( data.relationshipLabelValue ) ) {
        relationshipLabel = data.relationshipLabelValue;
      }
      if ( sourceNodeData != null && targetNodeData != null && StringUtils.isNotEmpty( relationshipLabel ) ) {

        relationshipData = new GraphRelationshipData();
        relationshipData.setSourceNodeId( sourceNodeData.getId() );
        relationshipData.setTargetNodeId( targetNodeData.getId() );
        relationshipData.setLabel( relationshipLabel );
        relationshipData.setId( sourceNodeData.getId() + " -> " + targetNodeData.getId() );
        relationshipData.setPropertySetId( "relationship" );

        // Add relationship properties...
        //
        // Set the properties
        //
        for ( int i = 0; i < data.relPropIndexes.length; i++ ) {

          ValueMetaInterface valueMeta = rowMeta.getValueMeta( data.relPropIndexes[ i ] );
          Object valueData = row[ data.relPropIndexes[ i ] ];

          String propertyName = meta.getRelPropNames()[ i ];
          GraphPropertyDataType propertyType = GraphPropertyDataType.getTypeFromKettle( valueMeta );
          Object propertyNeoValue = propertyType.convertFromKettle( valueMeta, valueData );
          boolean propertyPrimary = false;

          relationshipData.getProperties().add(
            new GraphPropertyData( propertyName, propertyNeoValue, propertyType, propertyPrimary )
          );
        }

        graphData.getRelationships().add( relationshipData );
      }

      // Pass it forward...
      //
      Object[] outputRowData = RowDataUtil.createResizedCopy( row, data.outputRowMeta.size() );
      int startIndex = rowMeta.size();
      outputRowData[ rowMeta.size() ] = graphData;
      putRow( data.outputRowMeta, outputRowData );

    } catch ( Exception e ) {
      throw new KettleException( "Unable to calculate graph output value", e );
    }
  }
 
Example 20
Source File: LucidDBStreamingLoaderMeta.java    From pentaho-kettle with Apache License 2.0 4 votes vote down vote up
@Override
public SQLStatement getSQLStatements( TransMeta transMeta, StepMeta stepMeta, RowMetaInterface prev,
  Repository repository, IMetaStore metaStore ) throws KettleStepException {

  SQLStatement retval = super.getSQLStatements( transMeta, stepMeta, prev, repository, metaStore );

  if ( databaseMeta != null ) {
    if ( prev != null && prev.size() > 0 ) {
      String schemaTable =
        databaseMeta.getQuotedSchemaTableCombination( transMeta.environmentSubstitute( schemaName ), transMeta
          .environmentSubstitute( tableName ) );

      if ( !Utils.isEmpty( schemaTable ) ) {
        Database db = new Database( loggingObject, databaseMeta );
        db.shareVariablesWith( transMeta );
        try {
          db.connect();

          String cr_table = db.getDDL( schemaTable, prev );

          // Empty string means: nothing to do: set it to null...
          if ( cr_table == null || cr_table.length() == 0 ) {
            cr_table = null;
          }

          retval.setSQL( cr_table );
        } catch ( KettleDatabaseException dbe ) {
          retval.setError( BaseMessages.getString( PKG, "LucidDBStreamingLoaderMeta.Error.ErrorConnecting", dbe
            .getMessage() ) );
        } finally {
          db.disconnect();
        }
      } else {
        retval.setError( BaseMessages.getString( PKG, "LucidDBStreamingLoaderMeta.Error.NoTable" ) );
      }
    } else {
      retval.setError( BaseMessages.getString( PKG, "LucidDBStreamingLoaderMeta.Error.NoInput" ) );
    }
  } else {
    retval.setError( BaseMessages.getString( PKG, "LucidDBStreamingLoaderMeta.Error.NoConnection" ) );
  }

  return retval;
}