Java Code Examples for org.pentaho.di.core.logging.LogChannelInterface#logDebug()

The following examples show how to use org.pentaho.di.core.logging.LogChannelInterface#logDebug() . You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: ServerUtils.java    From pentaho-cpython-plugin with Apache License 2.0 6 votes vote down vote up
/**
 * Prints out a json command for debugging purposes
 *
 * @param command the command to print out
 * @param log     optional log
 */
protected static void outputCommandDebug( Map<String, Object> command, LogChannelInterface log )
    throws KettleException {
  ObjectMapper mapper = new ObjectMapper();
  StringWriter sw = new StringWriter();
  try {
    mapper.writeValue( sw, command );
    String serialized = sw.toString();
    if ( log != null ) {
      log.logDebug( "Sending command:\n" + serialized );
    } else {
      System.err.println( "Sending command: " );
      System.err.println( serialized );
    }
  } catch ( IOException ex ) {
    throw new KettleException( ex );
  }
}
 
Example 2
Source File: UserDefinedJavaClassMeta.java    From pentaho-kettle with Apache License 2.0 6 votes vote down vote up
private boolean checkClassCookings( LogChannelInterface logChannel ) {
  boolean ok = cookedTransformClass != null && cookErrors.size() == 0;
  if ( changed ) {
    cookClasses();
    if ( cookedTransformClass == null ) {
      if ( cookErrors.size() > 0 ) {
        logChannel.logDebug( BaseMessages.getString(
          PKG, "UserDefinedJavaClass.Exception.CookingError", cookErrors.get( 0 ) ) );
      }
      ok = false;
    } else {
      ok = true;
    }
  }
  return ok;
}
 
Example 3
Source File: MonetDBBulkLoader.java    From pentaho-kettle with Apache License 2.0 6 votes vote down vote up
protected static MapiSocket getMonetDBConnection( String host, int port,
    String user, String password, String db, LogChannelInterface log ) throws Exception {
  MapiSocket mserver = new MapiSocket();
  mserver.setDatabase( db );
  mserver.setLanguage( "sql" );

  List<?> warnings = mserver.connect( host, port, user, password );
  if ( warnings != null ) {
    for ( Object warning : warnings ) {
      if ( log != null ) {
        log.logBasic( "MonetDB connection warning: " + warning );
      }
    }
  } else {
    if ( log != null ) {
      log.logDebug( "Successful MapiSocket connection to MonetDB established." );
    }
  }
  return mserver;
}
 
Example 4
Source File: TransWebSocketEngineAdapter.java    From pentaho-kettle with Apache License 2.0 5 votes vote down vote up
private void logToChannel( LogChannelInterface logChannel, LogEntry data ) {
  LogLevel logLogLevel = data.getLogLogLevel();
  switch ( logLogLevel ) {
    case ERROR:
      if ( data.getThrowable() != null ) {
        logChannel.logError( data.getMessage(), data.getThrowable() );
      } else {
        logChannel.logError( data.getMessage() );
      }
      break;
    case MINIMAL:
      logChannel.logMinimal( data.getMessage() );
      break;
    case BASIC:
      logChannel.logBasic( data.getMessage() );
      break;
    case DETAILED:
      logChannel.logDetailed( data.getMessage() );
      break;
    case DEBUG:
      logChannel.logDebug( data.getMessage() );
      break;
    case TRACE:
      logChannel.logRowlevel( data.getMessage() );
      break;
  }
}
 
Example 5
Source File: FieldHelper.java    From pentaho-kettle with Apache License 2.0 5 votes vote down vote up
public static String getNativeDataTypeSimpleName( ValueMetaInterface v ) {
  try {
    return v.getType() != ValueMetaInterface.TYPE_BINARY ? v.getNativeDataTypeClass().getSimpleName() : "Binary";
  } catch ( KettleValueException e ) {
    LogChannelInterface log = new LogChannel( v );
    log.logDebug( BaseMessages.getString( PKG, "FieldHelper.Log.UnknownNativeDataTypeSimpleName" ) );
    return "Object";
  }
}
 
Example 6
Source File: SalesforceUtils.java    From pentaho-kettle with Apache License 2.0 5 votes vote down vote up
/**
 * Extract and return the correct name for the field that should be processed as NULL
 *
 * @param log
 *          the logging object
 * @param field
 *          the field that should be processed as NULL
 * @param isUseExtId
 *          the flag that indicates if the field is external id or not
 * @return return the correct name for the field that should be processed as NULL
 */
public static String getFieldToNullName( LogChannelInterface log, String field, boolean isUseExtId ) {
  String fieldToNullName = field;
  if ( isUseExtId ) {
    // verify if the field has correct syntax
    if ( !FIELD_NAME_WITH_EXTID_PATTERN.matcher( field ).matches() ) {
      if ( log.isDebug() ) {
        log.logDebug( BaseMessages.getString( PKG, "SalesforceUtils.Warn.IncorrectExternalKeySyntax", field,
            fieldToNullName ) );
      }
      return fieldToNullName;
    }

    String lookupField = field.substring( field.indexOf( EXTID_SEPARATOR ) + 1 );
    // working with custom objects and relationship
    // cut off _r and then add _c in the end of the name
    if ( lookupField.endsWith( CUSTOM_OBJECT_RELATIONSHIP_FIELD_SUFFIX ) ) {
      fieldToNullName =
          lookupField.substring( 0, lookupField.length() - CUSTOM_OBJECT_RELATIONSHIP_FIELD_SUFFIX.length() )
              + CUSTOM_OBJECT_SUFFIX;
      if ( log.isDebug() ) {
        log.logDebug( BaseMessages.getString( PKG, "SalesforceUtils.Debug.NullFieldName", fieldToNullName ) );
      }
      return fieldToNullName;
    }

    fieldToNullName = lookupField + "Id";
  }

  if ( log.isDebug() ) {
    log.logDebug( BaseMessages.getString( PKG, "SalesforceUtils.Debug.NullFieldName", fieldToNullName ) );
  }

  return fieldToNullName;
}
 
Example 7
Source File: ServerUtils.java    From pentaho-cpython-plugin with Apache License 2.0 4 votes vote down vote up
/**
 * Execute a script on the server
 *
 * @param script       the script to execute
 * @param outputStream the output stream to write data to the server
 * @param inputStream  the input stream to read responses from
 * @param log          optional log to write to
 * @return a two element list that contains the sys out and sys error from the
 * script execution
 * @throws KettleException if a problem occurs
 */
@SuppressWarnings( "unchecked" ) protected static List<String> executeUserScript( String script,
    OutputStream outputStream, InputStream inputStream, LogChannelInterface log ) throws KettleException {
  if ( !script.endsWith( "\n" ) ) {
    script += "\n";
  }
  List<String> outAndErr = new ArrayList<String>();

  ObjectMapper mapper = new ObjectMapper();
  boolean debug = log == null || log.isDebug();
  Map<String, Object> command = new HashMap<String, Object>();
  command.put( "command", "execute_script" );
  command.put( "script", script );
  command.put( "debug", debug );
  if ( inputStream != null && outputStream != null ) {
    try {
      ByteArrayOutputStream bos = new ByteArrayOutputStream();
      mapper.writeValue( bos, command );
      byte[] bytes = bos.toByteArray();

      if ( debug ) {
        outputCommandDebug( command, log );
      }
      writeDelimitedToOutputStream( bytes, outputStream );

      // get the result of execution
      bytes = readDelimitedFromInputStream( inputStream );

      Map<String, Object> ack = mapper.readValue( bytes, Map.class );
      if ( !ack.get( RESPONSE_KEY ).toString().equals( OK_KEY ) ) {
        // fatal error
        throw new KettleException( ack.get( ERROR_MESSAGE_KEY ).toString() );
      }
      // get the script out and err
      outAndErr.add( ack.get( SCRIPT_OUT_KEY ).toString() );
      outAndErr.add( ack.get( SCRIPT_ERROR_KEY ).toString() );
      if ( debug ) {
        if ( log != null ) {
          log.logDebug(
              BaseMessages.getString( PKG, "ServerUtils.Message.ScriptOutput" ) + "\n" + outAndErr.get( 0 ) );
          log.logDebug(
              BaseMessages.getString( PKG, "ServerUtils.Message.ScriptError" ) + "\n" + outAndErr.get( 1 ) );
        } else {
          System.err.println( "Script output:\n" + outAndErr.get( 0 ) );
          System.err.println( "\nScript error:\n" + outAndErr.get( 1 ) );
        }
      }

      if ( outAndErr.get( 1 ).contains( "Warning:" ) ) {
        // clear warnings - we really just want to know if there
        // are major errors
        outAndErr.set( 1, "" );
      }
    } catch ( IOException ex ) {
      throw new KettleException( ex );
    }
  } else if ( debug ) {
    outputCommandDebug( command, log );
  }

  return outAndErr;
}
 
Example 8
Source File: ServerUtils.java    From pentaho-cpython-plugin with Apache License 2.0 4 votes vote down vote up
/**
 * Send rows to python to be converted to a pandas data frame
 *
 * @param log          the log channel to use
 * @param inputStream  the input stream to read a response from
 * @param outputStream the output stream to talk to the server on
 * @throws KettleException if a problem occurs
 */
protected static void sendRowsToPandasDataFrame( LogChannelInterface log, RowMetaInterface meta, List<Object[]> rows,
    String frameName, OutputStream outputStream, InputStream inputStream ) throws KettleException {
  ObjectMapper mapper = new ObjectMapper();

  boolean debug = log == null || log.isDebug();
  Map<String, Object> metaData = createMetadataMessage( frameName, meta );
  Map<String, Object> command = new HashMap<String, Object>();
  command.put( COMMAND_KEY, ACCEPT_ROWS_COMMAND );
  command.put( NUM_ROWS_KEY, rows.size() );
  command.put( ROW_META_KEY, metaData );
  command.put( DEBUG_KEY, debug );

  boolean needsBase64 = (boolean) metaData.get( BASE64_ENCODING_KEY );
  command.put( BASE64_ENCODING_KEY, needsBase64 );
  metaData.remove( BASE64_ENCODING_KEY );

  if ( inputStream != null && outputStream != null ) {
    try {
      List<Object> rowsInfo = rowsToCSVNew( meta, rows );

      // unfortunately we'll incur the base 64 transcoding overhead even if it
      // is only the header row that needs it.
      if ( !needsBase64 ) {
        command.put( BASE64_ENCODING_KEY, (boolean) rowsInfo.get( 1 ) );
      }

      ByteArrayOutputStream bos = new ByteArrayOutputStream();
      mapper.writeValue( bos, command );
      byte[] bytes = bos.toByteArray();

      if ( debug ) {
        outputCommandDebug( command, log );
      }

      // write the command
      writeDelimitedToOutputStream( bytes, outputStream );

      // now write the CSV data
      if ( rows.size() > 0 ) {
        if ( log != null && debug ) {
          log.logDebug( "Sending CSV data..." );
        }

        writeDelimitedToOutputStream( (byte[]) rowsInfo.get( 0 ), outputStream );

        /* // bos = new ByteArrayOutputStream();
        // BufferedWriter bw = new BufferedWriter( new OutputStreamWriter( bos ) );
        StringBuilder csv = rowsToCSV( meta, rows );
        Charset utf8 = Charset.forName( "UTF-8" );
        ByteBuffer
            bb =
            utf8.newEncoder().onUnmappableCharacter( CodingErrorAction.IGNORE )
                .onMalformedInput( CodingErrorAction.IGNORE ).encode( CharBuffer.wrap( csv.toString() ) );
        // byte[] ptext = csv.toString().getBytes( Charset.forName( "UTF-8" ) );
        System.out.println( csv.toString() );
        System.out.println( "-----------------" );
        // bw.write( csv.toString() );
        // bw.flush();
        // bw.close();
        // bytes = bos.toByteArray();
        // writeDelimitedToOutputStream( bytes, outputStream );
        writeDelimitedToOutputStream( bb.array(), outputStream ); */
      }

      String serverAck = receiveServerAck( inputStream );
      if ( serverAck != null ) {
        throw new KettleException(
            BaseMessages.getString( PKG, "ServerUtils.Error.TransferOfRowsFailed" ) + serverAck );
      }
    } catch ( IOException ex ) {
      throw new KettleException( ex );
    }
  } else if ( debug ) {
    outputCommandDebug( command, log );
  }
}
 
Example 9
Source File: ServerUtils.java    From pentaho-cpython-plugin with Apache License 2.0 4 votes vote down vote up
/**
 * Receive rows taken from a python pandas data frame
 *
 * @param log             the log channel to use
 * @param frameName       the name of the pandas frame to get
 * @param includeRowIndex true to include the frame row index as a field
 * @param inputStream     the input stream to read a response from
 * @param outputStream    the output stream to talk to the server on
 * @return the data frame converted to rows along with its associated row metadata
 * @throws KettleException if a problem occurs
 */
@SuppressWarnings( "unchecked" ) protected static RowMetaAndRows receiveRowsFromPandasDataFrame(
    LogChannelInterface log, String frameName, boolean includeRowIndex, OutputStream outputStream,
    InputStream inputStream ) throws KettleException {

  boolean debug = log == null || log.isDebug();
  ObjectMapper mapper = new ObjectMapper();
  Map<String, Object> command = new HashMap<String, Object>();
  command.put( COMMAND_KEY, GET_FRAME_COMMAND );
  command.put( FRAME_NAME_KEY, frameName );
  command.put( FRAME_INCLUDE_ROW_INDEX, includeRowIndex );
  command.put( DEBUG_KEY, debug );

  RowMetaAndRows result = null;
  if ( inputStream != null && outputStream != null ) {
    try {
      ByteArrayOutputStream bos = new ByteArrayOutputStream();
      mapper.writeValue( bos, command );
      byte[] bytes = bos.toByteArray();

      if ( debug ) {
        outputCommandDebug( command, log );
      }

      writeDelimitedToOutputStream( bytes, outputStream );
      String serverAck = receiveServerAck( inputStream );
      if ( serverAck != null ) {
        throw new KettleException( serverAck );
      }

      // read the header
      bytes = readDelimitedFromInputStream( inputStream );
      Map<String, Object> headerResponse = mapper.readValue( bytes, Map.class );
      if ( headerResponse == null ) {
        throw new KettleException( BaseMessages.getString( PKG, "ServerUtils.Error.HeaderMetadataMapIsNull" ) );
      }
      if ( headerResponse.get( RESPONSE_KEY ).toString().equals( ROW_META_KEY ) ) {
        if ( log != null ) {
          log.logDebug( BaseMessages
              .getString( PKG, "ServerUtils.Message.ReceivedMetadataResponse", headerResponse.get( NUM_ROWS_KEY ) ) );
        } else {
          System.err.println( BaseMessages
              .getString( PKG, "ServerUtils.Message.ReceivedMetadataResponse", headerResponse.get( NUM_ROWS_KEY ) ) );
        }
      } else {
        throw new KettleException( BaseMessages.getString( PKG, "ServerUtils.Error.UnknownResponseType" ) );
      }

      RowMetaInterface convertedMeta = jsonRowMetadataToRowMeta( frameName, headerResponse );
      int numRows = (Integer) headerResponse.get( NUM_ROWS_KEY );

      bytes = readDelimitedFromInputStream( inputStream );
      String csv = new String( bytes, Charset.forName( "UTF-8" ) );
      result = csvToRows( csv, convertedMeta, numRows );
    } catch ( IOException ex ) {
      throw new KettleException( ex );
    }
  } else {
    outputCommandDebug( command, log );
  }

  return result;
}
 
Example 10
Source File: JobEntryWriteToLog.java    From pentaho-kettle with Apache License 2.0 4 votes vote down vote up
/**
 * Output message to job log.
 */
public boolean evaluate( Result result ) {
  LogChannelInterface logChannel = createLogChannel();
  String message = getRealLogMessage();

  // Filter out empty messages and those that are not visible with the job's log level
  if ( Utils.isEmpty( message ) || !getEntryLogLevel().isVisible( logChannel.getLogLevel() ) ) {
    return true;
  }

  try {
    switch ( getEntryLogLevel() ) {
      case ERROR:
        logChannel.logError( message + Const.CR );
        break;
      case MINIMAL:
        logChannel.logMinimal( message + Const.CR );
        break;
      case BASIC:
        logChannel.logBasic( message + Const.CR );
        break;
      case DETAILED:
        logChannel.logDetailed( message + Const.CR );
        break;
      case DEBUG:
        logChannel.logDebug( message + Const.CR );
        break;
      case ROWLEVEL:
        logChannel.logRowlevel( message + Const.CR );
        break;
      default: // NOTHING
        break;
    }

    return true;
  } catch ( Exception e ) {
    result.setNrErrors( 1 );
    log.logError( BaseMessages.getString( PKG, "WriteToLog.Error.Label" ), BaseMessages.getString(
      PKG, "WriteToLog.Error.Description" )
      + " : " + e.toString() );
    return false;
  }

}