org.antlr.v4.runtime.CommonToken Java Examples

The following examples show how to use org.antlr.v4.runtime.CommonToken. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: SqlParser.java    From rainbow with Apache License 2.0 6 votes vote down vote up
@Override
public void exitNonReserved(SqlBaseParser.NonReservedContext context)
{
    // we can't modify the tree during rule enter/exit event handling unless we're dealing with a terminal.
    // Otherwise, ANTLR gets confused an fires spurious notifications.
    if (!(context.getChild(0) instanceof TerminalNode)) {
        int rule = ((ParserRuleContext) context.getChild(0)).getRuleIndex();
        throw new AssertionError("nonReserved can only contain tokens. Found nested rule: " + ruleNames.get(rule));
    }

    // replace nonReserved words with IDENT tokens
    context.getParent().removeLastChild();

    Token token = (Token) context.getChild(0).getPayload();
    context.getParent().addChild(new CommonToken(
            new Pair<>(token.getTokenSource(), token.getInputStream()),
            SqlBaseLexer.IDENTIFIER,
            token.getChannel(),
            token.getStartIndex(),
            token.getStopIndex()));
}
 
Example #2
Source File: Formatter.java    From codebuff with BSD 2-Clause "Simplified" License 6 votes vote down vote up
public static void wipeCharPositionInfoAndWhitespaceTokens(CodeBuffTokenStream tokens) {
	tokens.fill();
	CommonToken dummy = new CommonToken(Token.INVALID_TYPE, "");
	dummy.setChannel(Token.HIDDEN_CHANNEL);
	Token firstRealToken = tokens.getNextRealToken(-1);
	for (int i = 0; i<tokens.size(); i++) {
		if ( i==firstRealToken.getTokenIndex() ) continue; // don't wack first token
		CommonToken t = (CommonToken)tokens.get(i);
		if ( t.getText().matches("\\s+") ) {
			tokens.getTokens().set(i, dummy); // wack whitespace token so we can't use it during prediction
		}
		else {
			t.setLine(0);
			t.setCharPositionInLine(-1);
		}
	}
}
 
Example #3
Source File: SqlParser.java    From macrobase with Apache License 2.0 6 votes vote down vote up
@Override
public void exitNonReserved(SqlBaseParser.NonReservedContext context) {
    // we can't modify the tree during rule enter/exit event handling unless we're dealing with a terminal.
    // Otherwise, ANTLR gets confused an fires spurious notifications.
    if (!(context.getChild(0) instanceof TerminalNode)) {
        int rule = ((ParserRuleContext) context.getChild(0)).getRuleIndex();
        throw new AssertionError(
            "nonReserved can only contain tokens. Found nested rule: " + ruleNames
                .get(rule));
    }

    // replace nonReserved words with IDENT tokens
    context.getParent().removeLastChild();

    Token token = (Token) context.getChild(0).getPayload();
    context.getParent().addChild(new CommonToken(
        new Pair<>(token.getTokenSource(), token.getInputStream()),
        SqlBaseLexer.IDENTIFIER,
        token.getChannel(),
        token.getStartIndex(),
        token.getStopIndex()));
}
 
Example #4
Source File: Pql2Compiler.java    From incubator-pinot with Apache License 2.0 6 votes vote down vote up
public static AstNode buildAst(String expression) {
  CharStream charStream = new ANTLRInputStream(expression);
  PQL2Lexer lexer = new PQL2Lexer(charStream);
  lexer.setTokenFactory(new CommonTokenFactory(true));
  lexer.removeErrorListeners();
  lexer.addErrorListener(ERROR_LISTENER);
  TokenStream tokenStream = new UnbufferedTokenStream<CommonToken>(lexer);
  PQL2Parser parser = new PQL2Parser(tokenStream);
  parser.setErrorHandler(new BailErrorStrategy());
  parser.removeErrorListeners();
  parser.addErrorListener(ERROR_LISTENER);

  // Parse
  ParseTree parseTree = parser.root();

  ParseTreeWalker walker = new ParseTreeWalker();
  Pql2AstListener listener = new Pql2AstListener(expression);
  walker.walk(listener, parseTree);

  AstNode rootNode = listener.getRootNode();
  return rootNode;
}
 
Example #5
Source File: ContextDependentFEELLexerTest.java    From jdmn with Apache License 2.0 6 votes vote down vote up
@Test
public void testSequencesWithEmptyContext() {
    LexicalContext lexicalContext = new LexicalContext();

    check(lexicalContext, "a + b", Arrays.asList(
            new CommonToken(NAME, "a + b"),
            new CommonToken(EOF, "")
    ));

    check(lexicalContext, "// line comment \n123", Arrays.asList(
            new CommonToken(NUMBER, "123"),
            new CommonToken(EOF, "")
    ));

    check(lexicalContext, "// line comment \n123", Arrays.asList(
            new CommonToken(NUMBER, "123"),
            new CommonToken(EOF, "")
    ));

    check(lexicalContext, "for i in ", Arrays.asList(
            new CommonToken(FOR, "for"),
            new CommonToken(NAME, "i in"),
            new CommonToken(EOF, "")
    ));
}
 
Example #6
Source File: TokenStreamSubset.java    From intellij-plugin-v4 with BSD 3-Clause "New" or "Revised" License 6 votes vote down vote up
public void setIndexOfLastToken(int indexOfLastToken) {
		System.out.println("setIndexOfLastToken("+indexOfLastToken+")");
		if ( indexOfLastToken<0 ) {
			System.out.println("replacing "+saveToken.getTokenIndex()+" with "+saveToken);
			tokens.set(saveToken.getTokenIndex(), saveToken);
//			this.indexOfLastToken = indexOfLastToken;
			return;
		}
		int i = indexOfLastToken + 1; // we want to keep token at indexOfLastToken
		sync(i);
		saveToken = tokens.get(i);
		System.out.println("saving "+saveToken);
		CommonToken stopToken = new CommonToken(saveToken);
		stopToken.setType(STOP_TOKEN_TYPE);
		System.out.println("setting "+i+" to "+stopToken);
		tokens.set(i, stopToken);
//		this.indexOfLastToken = indexOfLastToken;
	}
 
Example #7
Source File: SqlParser.java    From crate with Apache License 2.0 6 votes vote down vote up
@Override
public void exitNonReserved(SqlBaseParser.NonReservedContext context) {
    // replace nonReserved words with IDENT tokens
    context.getParent().removeLastChild();

    Token token = (Token) context.getChild(0).getPayload();
    context.getParent().addChild(new TerminalNodeImpl(
        new CommonToken(
            new Pair<>(token.getTokenSource(), token.getInputStream()),
            SqlBaseLexer.IDENTIFIER,
            token.getChannel(),
            token.getStartIndex(),
            token.getStopIndex())
        )
    );
}
 
Example #8
Source File: SqlParser.java    From presto with Apache License 2.0 6 votes vote down vote up
@Override
public void exitNonReserved(SqlBaseParser.NonReservedContext context)
{
    // we can't modify the tree during rule enter/exit event handling unless we're dealing with a terminal.
    // Otherwise, ANTLR gets confused an fires spurious notifications.
    if (!(context.getChild(0) instanceof TerminalNode)) {
        int rule = ((ParserRuleContext) context.getChild(0)).getRuleIndex();
        throw new AssertionError("nonReserved can only contain tokens. Found nested rule: " + ruleNames.get(rule));
    }

    // replace nonReserved words with IDENT tokens
    context.getParent().removeLastChild();

    Token token = (Token) context.getChild(0).getPayload();
    Token newToken = new CommonToken(
            new Pair<>(token.getTokenSource(), token.getInputStream()),
            SqlBaseLexer.IDENTIFIER,
            token.getChannel(),
            token.getStartIndex(),
            token.getStopIndex());

    context.getParent().addChild(parser.createTerminalNode(context.getParent(), newToken));
}
 
Example #9
Source File: Pql2Compiler.java    From incubator-pinot with Apache License 2.0 5 votes vote down vote up
public AstNode parseToAstNode(String expression) {
  CharStream charStream = new ANTLRInputStream(expression);
  PQL2Lexer lexer = new PQL2Lexer(charStream);
  lexer.setTokenFactory(new CommonTokenFactory(true));
  TokenStream tokenStream = new UnbufferedTokenStream<CommonToken>(lexer);
  PQL2Parser parser = new PQL2Parser(tokenStream);
  parser.setErrorHandler(new BailErrorStrategy());

  // Parse
  Pql2AstListener listener = new Pql2AstListener(expression);
  new ParseTreeWalker().walk(listener, parser.expression());
  return listener.getRootNode();
}
 
Example #10
Source File: StaticScope.java    From trygve with GNU General Public License v2.0 5 votes vote down vote up
private static void reinitializeObject(final Type objectType, final StaticScope objectsScope) {
	final CommonToken objectToken = new CommonToken(0);
	objectToken.setLine(157239);
	final ClassDeclaration objectClass = new ClassDeclaration("Object", objectsScope, null, objectToken);
	globalScope_.declareClass(objectClass);
	objectClass.setType(objectType);
	objectsScope.setDeclaration(objectClass);
	
	typeDeclarationList_.add(objectClass);
}
 
Example #11
Source File: GlobalVariableDefinition.java    From ballerina-integrator with Apache License 2.0 5 votes vote down vote up
@Override
public List<CompletionItem> getCompletions(LSContext ctx) {

    List<CompletionItem> completionItems = super.getCompletions(ctx);
    List<CommonToken> lhsDefaultTokens = ctx.get(CompletionKeys.LHS_DEFAULT_TOKENS_KEY);

    if (lhsDefaultTokens.size() <= 2) {
        completionItems.addAll(this.getAllTopLevelItems(ctx));
    }
    return completionItems;
}
 
Example #12
Source File: CQL.java    From chronix.server with Apache License 2.0 5 votes vote down vote up
@Override
public void syntaxError(Recognizer<?, ?> recognizer, Object offendingSymbol, int line, int charPositionInLine, String msg, RecognitionException e) {

    if (offendingSymbol instanceof CommonToken) {
        CommonToken offendingToken = (CommonToken) offendingSymbol;
        StringBuilder annotatedCQL = new StringBuilder(cql);
        annotatedCQL.insert(offendingToken.getStopIndex() + 1, "'<-this-'");
        cql = annotatedCQL.toString();
    }


    throw new CQLException("Syntax error: " + msg + " at line: '" + line + "', position: '" + charPositionInLine + "', query '" + cql + "'");
}
 
Example #13
Source File: QueryListener.java    From heroic with Apache License 2.0 5 votes vote down vote up
@Override
public void exitString(HeroicQueryParser.StringContext ctx) {
    final ParseTree child = ctx.getChild(0);
    final CommonToken token = (CommonToken) child.getPayload();
    final Context c = context(ctx);

    if (token.getType() == HeroicQueryLexer.SimpleString ||
        token.getType() == HeroicQueryLexer.Identifier) {
        push(new StringExpression(c, child.getText()));
        return;
    }

    push(new StringExpression(c, parseQuotedString(c, child.getText())));
}
 
Example #14
Source File: ErrorStrategyAdaptor.java    From antlr4-intellij-adaptor with BSD 2-Clause "Simplified" License 5 votes vote down vote up
/** By default ANTLR makes the start/stop -1/-1 for invalid tokens
 *  which is reasonable but here we want to highlight the
 *  current position indicating that is where we lack a token.
 *  if no input, highlight at position 0.
 */
protected Token getMissingSymbol(Parser recognizer) {
	Token missingSymbol = super.getMissingSymbol(recognizer);
	// alter the default missing symbol.
	if ( missingSymbol instanceof CommonToken) {
		int start, stop;
		Token current = recognizer.getCurrentToken();
		start = current.getStartIndex();
		stop = current.getStopIndex();
		((CommonToken) missingSymbol).setStartIndex(start);
		((CommonToken) missingSymbol).setStopIndex(stop);
	}
	return missingSymbol;
}
 
Example #15
Source File: IndexRQL.java    From indexr with Apache License 2.0 5 votes vote down vote up
private static ParseTree parseSQL(String sql) {
    CharStream charStream = new ANTLRInputStream(sql);
    RQLLexer lexer = new RQLLexer(charStream);
    lexer.setTokenFactory(new CommonTokenFactory(true));
    TokenStream tokenStream = new UnbufferedTokenStream<CommonToken>(lexer);
    RQLParser parser = new RQLParser(tokenStream);
    parser.setErrorHandler(new BailErrorStrategy());
    return parser.root();
}
 
Example #16
Source File: Select_or_valuesGenerator.java    From antsdb with GNU Lesser General Public License v3.0 5 votes vote down vote up
private static RuleContext createColumnName_(Expr_functionContext rule, OutputField field) {
    Column_name_Context column_name_ = new Column_name_Context(rule.getParent(), rule.invokingState);
    Column_nameContext column_name = new Column_nameContext(column_name_.getParent(), rule.invokingState);
    IdentifierContext identifier = new IdentifierContext(column_name, rule.invokingState);
    CommonToken token = CommonTokenFactory.DEFAULT.create(
            MysqlParser.BACKTICK_QUOTED_IDENTIFIER, 
            '`' + field.name + '`' );
    TerminalNode term = new TerminalNodeImpl(token);
    identifier.addChild(term);
    column_name.addChild(identifier);
    column_name_.addChild(column_name);
    return column_name_;
}
 
Example #17
Source File: CodeBuffTokenStream.java    From codebuff with BSD 2-Clause "Simplified" License 5 votes vote down vote up
public CodeBuffTokenStream(CommonTokenStream stream) {
	super(stream.getTokenSource());
	this.fetchedEOF = false;
	for (Token t : stream.getTokens()) {
		tokens.add(new CommonToken(t));
	}
	reset();
}
 
Example #18
Source File: TemplateCustomProperties.java    From M2Doc with Eclipse Public License 1.0 5 votes vote down vote up
/**
 * Parses while matching an AQL expression.
 * 
 * @param queryEnvironment
 *            the {@link IReadOnlyQueryEnvironment}
 * @param type
 *            the type to parse
 * @return the corresponding {@link AstResult}
 */
private AstResult parseWhileAqlTypeLiteral(IReadOnlyQueryEnvironment queryEnvironment, String type) {
    final IQueryBuilderEngine.AstResult result;

    if (type != null && type.length() > 0) {
        AstBuilderListener astBuilder = AQL56Compatibility
                .createAstBuilderListener((IQueryEnvironment) queryEnvironment);
        CharStream input = new UnbufferedCharStream(new StringReader(type), type.length());
        QueryLexer lexer = new QueryLexer(input);
        lexer.setTokenFactory(new CommonTokenFactory(true));
        lexer.removeErrorListeners();
        lexer.addErrorListener(astBuilder.getErrorListener());
        TokenStream tokens = new UnbufferedTokenStream<CommonToken>(lexer);
        QueryParser parser = new QueryParser(tokens);
        parser.addParseListener(astBuilder);
        parser.removeErrorListeners();
        parser.addErrorListener(astBuilder.getErrorListener());
        // parser.setTrace(true);
        parser.typeLiteral();
        result = astBuilder.getAstResult();
    } else {
        ErrorTypeLiteral errorTypeLiteral = (ErrorTypeLiteral) EcoreUtil
                .create(AstPackage.eINSTANCE.getErrorTypeLiteral());
        List<org.eclipse.acceleo.query.ast.Error> errors = new ArrayList<>(1);
        errors.add(errorTypeLiteral);
        final Map<Object, Integer> positions = new HashMap<>();
        if (type != null) {
            positions.put(errorTypeLiteral, Integer.valueOf(0));
        }
        final BasicDiagnostic diagnostic = new BasicDiagnostic();
        diagnostic.add(new BasicDiagnostic(Diagnostic.ERROR, AstBuilderListener.PLUGIN_ID, 0, "null or empty type.",
                new Object[] {errorTypeLiteral }));
        result = new AstResult(errorTypeLiteral, positions, positions, errors, diagnostic);
    }

    return result;
}
 
Example #19
Source File: M2DocParser.java    From M2Doc with Eclipse Public License 1.0 5 votes vote down vote up
/**
 * Parses while matching an AQL type literal.
 * 
 * @param expression
 *            the expression to parse
 * @return the corresponding {@link AstResult}
 */
protected AstResult parseWhileAqlTypeLiteral(String expression) {
    final IQueryBuilderEngine.AstResult result;

    if (expression != null && expression.length() > 0) {
        AstBuilderListener astBuilder = AQL56Compatibility.createAstBuilderListener(queryEnvironment);
        CharStream input = new UnbufferedCharStream(new StringReader(expression), expression.length());
        QueryLexer lexer = new QueryLexer(input);
        lexer.setTokenFactory(new CommonTokenFactory(true));
        lexer.removeErrorListeners();
        lexer.addErrorListener(astBuilder.getErrorListener());
        TokenStream tokens = new UnbufferedTokenStream<CommonToken>(lexer);
        QueryParser parser = new QueryParser(tokens);
        parser.addParseListener(astBuilder);
        parser.removeErrorListeners();
        parser.addErrorListener(astBuilder.getErrorListener());
        // parser.setTrace(true);
        parser.typeLiteral();
        result = astBuilder.getAstResult();
    } else {
        ErrorTypeLiteral errorTypeLiteral = (ErrorTypeLiteral) EcoreUtil
                .create(AstPackage.eINSTANCE.getErrorTypeLiteral());
        List<org.eclipse.acceleo.query.ast.Error> errs = new ArrayList<>(1);
        errs.add(errorTypeLiteral);
        final Map<Object, Integer> positions = new HashMap<>();
        if (expression != null) {
            positions.put(errorTypeLiteral, Integer.valueOf(0));
        }
        final BasicDiagnostic diagnostic = new BasicDiagnostic();
        diagnostic.add(new BasicDiagnostic(Diagnostic.ERROR, AstBuilderListener.PLUGIN_ID, 0,
                "missing type literal", new Object[] {errorTypeLiteral }));
        result = new AstResult(errorTypeLiteral, positions, positions, errs, diagnostic);
    }

    return result;
}
 
Example #20
Source File: M2DocParser.java    From M2Doc with Eclipse Public License 1.0 5 votes vote down vote up
/**
 * Parses while matching an AQL expression.
 * 
 * @param expression
 *            the expression to parse
 * @return the corresponding {@link AstResult}
 */
private AstResult parseWhileAqlExpression(String expression) {
    final IQueryBuilderEngine.AstResult result;

    if (expression != null && expression.length() > 0) {
        AstBuilderListener astBuilder = AQL56Compatibility.createAstBuilderListener(queryEnvironment);
        CharStream input = new UnbufferedCharStream(new StringReader(expression), expression.length());
        QueryLexer lexer = new QueryLexer(input);
        lexer.setTokenFactory(new CommonTokenFactory(true));
        lexer.removeErrorListeners();
        lexer.addErrorListener(astBuilder.getErrorListener());
        TokenStream tokens = new UnbufferedTokenStream<CommonToken>(lexer);
        QueryParser parser = new QueryParser(tokens);
        parser.addParseListener(astBuilder);
        parser.removeErrorListeners();
        parser.addErrorListener(astBuilder.getErrorListener());
        // parser.setTrace(true);
        parser.expression();
        result = astBuilder.getAstResult();
    } else {
        ErrorExpression errorExpression = (ErrorExpression) EcoreUtil
                .create(AstPackage.eINSTANCE.getErrorExpression());
        List<org.eclipse.acceleo.query.ast.Error> errors = new ArrayList<>(1);
        errors.add(errorExpression);
        final Map<Object, Integer> positions = new HashMap<>();
        if (expression != null) {
            positions.put(errorExpression, Integer.valueOf(0));
        }
        final BasicDiagnostic diagnostic = new BasicDiagnostic();
        diagnostic.add(new BasicDiagnostic(Diagnostic.ERROR, AstBuilderListener.PLUGIN_ID, 0,
                "null or empty string.", new Object[] {errorExpression }));
        result = new AstResult(errorExpression, positions, positions, errors, diagnostic);
    }

    return result;
}
 
Example #21
Source File: TestAntlrUtils.java    From yauaa with Apache License 2.0 5 votes vote down vote up
@Test
public void testEdges() {
    ParserRuleContext context = new ParserRuleContext(null, 42);

    ParseTree content = new RuleContext() {
        @Override
        public String getText() {
            return "Content";
        }
    };
    context.children = Collections.singletonList(content);

    Token start = new CommonToken(1, "Start");
    Token stop = new CommonToken(2, "Stop");

    context.start = null;
    context.stop = null;
    assertEquals("Content", AntlrUtils.getSourceText(context));

    context.start = start;
    context.stop = null;
    assertEquals("Content", AntlrUtils.getSourceText(context));

    context.start = null;
    context.stop = stop;
    assertEquals("Content", AntlrUtils.getSourceText(context));
}
 
Example #22
Source File: MatcherAction.java    From yauaa with Apache License 2.0 5 votes vote down vote up
private void unQuoteToken(Token token) {
    if (token instanceof CommonToken) {
        CommonToken commonToken = (CommonToken) token;
        commonToken.setStartIndex(commonToken.getStartIndex() + 1);
        commonToken.setStopIndex(commonToken.getStopIndex() - 1);
    }
}
 
Example #23
Source File: ContextDependentFEELLexerTest.java    From jdmn with Apache License 2.0 5 votes vote down vote up
@Test
public void testComments() {
    LexicalContext lexicalContext = new LexicalContext();

    check(lexicalContext, "// line comment \n123", Arrays.asList(
            new CommonToken(NUMBER, "123"),
            new CommonToken(EOF, "")
    ));

    check(lexicalContext, "/* block comment */ 123", Arrays.asList(
            new CommonToken(NUMBER, "123"),
            new CommonToken(EOF, "")
    ));
}
 
Example #24
Source File: ContextDependentFEELLexerTest.java    From jdmn with Apache License 2.0 5 votes vote down vote up
@Test
public void testSequencesWithContext() {
    LexicalContext lexicalContext = new LexicalContext("a + b", "a", "b");

    check(lexicalContext, "a + b", Arrays.asList(
            new CommonToken(NAME, "a + b"),
            new CommonToken(EOF, "")
    ));

    check(lexicalContext, "a * b", Arrays.asList(
            new CommonToken(NAME, "a"), new CommonToken(STAR, "*"), new CommonToken(NAME, "b"),
            new CommonToken(EOF, "")
    ));

    check(lexicalContext, "(a) * b", Arrays.asList(
            new CommonToken(PAREN_OPEN, "("), new CommonToken(NAME, "a"), new CommonToken(PAREN_CLOSE, ")"), new CommonToken(STAR, "*"), new CommonToken(NAME, "b"),
            new CommonToken(EOF, "")
    ));

    check(lexicalContext, "for i in x ", Arrays.asList(
            new CommonToken(FOR, "for"), new CommonToken(NAME, "i in x"),
            new CommonToken(EOF, "")
    ));

    check(lexicalContext, "for (i) in x ", Arrays.asList(
            new CommonToken(FOR, "for"), new CommonToken(PAREN_OPEN, "("), new CommonToken(NAME, "i"), new CommonToken(PAREN_CLOSE, ")"), new CommonToken(IN, "in"), new CommonToken(NAME, "x"),
            new CommonToken(EOF, "")
    ));
}
 
Example #25
Source File: LocationRepointCommonTokenFactory.java    From Concurnas with MIT License 5 votes vote down vote up
@Override
public CommonToken create(Pair<TokenSource, CharStream> source, int type, String text,
						  int channel, int start, int stop,
						  int line, int charPositionInLine)
{
	return super.create( source,  type,  text, channel,  start,  stop, /*line +*/ lineoffset,  charPositionInLine + coloffset);
}
 
Example #26
Source File: PythonLexerBase.java    From depends with MIT License 5 votes vote down vote up
private void emit(int tokenType, int channel, String text) {
    int charIndex = getCharIndex();
    CommonToken token = new CommonToken(_tokenFactorySourcePair, tokenType, channel, charIndex - text.length(), charIndex);
    token.setLine(getLine());
    token.setCharPositionInLine(getCharPositionInLine());
    token.setText(text);

    emit(token);
}
 
Example #27
Source File: Formatter.java    From codebuff with BSD 2-Clause "Simplified" License 4 votes vote down vote up
public TokenPositionAnalysis getTokenAnalysis(int[] features, int[] featuresForAlign,
                                              int tokenIndexInStream,
                                              int injectNL_WS, int alignOrIndent,
                                              boolean collectAnalysis)
{
	CommonToken curToken = (CommonToken)originalDoc.tokens.get(tokenIndexInStream);
	TerminalNode nodeWithOriginalToken = originalTokenToNodeMap.get(curToken);

	int actualWS = Trainer.getInjectWSCategory(originalTokens, tokenIndexInStream);
	String actualWSNL = getWSCategoryStr(actualWS);
	actualWSNL = actualWSNL!=null ? actualWSNL : String.format("%8s","none");

	String wsDisplay = getWSCategoryStr(injectNL_WS);
	if ( wsDisplay==null ) wsDisplay = String.format("%8s","none");
	String alignDisplay = getHPosCategoryStr(alignOrIndent);
	if ( alignDisplay==null ) alignDisplay = String.format("%8s","none");
	String newlinePredictionString =
		String.format("### line %d: predicted %s actual %s",
		              curToken.getLine(), wsDisplay, actualWSNL);

	int actualAlignCategory = Trainer.getAlignmentCategory(originalDoc, nodeWithOriginalToken, indentSize);
	String actualAlignDisplay = getHPosCategoryStr(actualAlignCategory);
	actualAlignDisplay = actualAlignDisplay!=null ? actualAlignDisplay : String.format("%8s","none");

	String alignPredictionString =
		String.format("### line %d: predicted %s actual %s",
		              curToken.getLine(),
		              alignDisplay,
		              actualAlignDisplay);

	String newlineAnalysis = "";
	String alignAnalysis = "";
	if ( collectAnalysis ) { // this can be slow
		newlineAnalysis = newlinePredictionString+"\n"+
			wsClassifier.getPredictionAnalysis(testDoc, k, features, corpus.injectWhitespace,
			                                   MAX_WS_CONTEXT_DIFF_THRESHOLD);
		if ( (injectNL_WS&0xFF)==CAT_INJECT_NL ) {
			alignAnalysis =
				alignPredictionString+"\n"+
					hposClassifier.getPredictionAnalysis(testDoc, k, featuresForAlign, corpus.hpos,
					                                     MAX_ALIGN_CONTEXT_DIFF_THRESHOLD);
		}
	}
	TokenPositionAnalysis a = new TokenPositionAnalysis(curToken, injectNL_WS, newlineAnalysis, alignOrIndent, alignAnalysis);
	a.actualWS = Trainer.getInjectWSCategory(originalTokens, tokenIndexInStream);
	a.actualAlign = actualAlignCategory;
	return a;
}
 
Example #28
Source File: Formatter.java    From codebuff with BSD 2-Clause "Simplified" License 4 votes vote down vote up
public void processToken(int indexIntoRealTokens, int tokenIndexInStream, boolean collectAnalysis) {
	CommonToken curToken = (CommonToken)testDoc.tokens.get(tokenIndexInStream);
	String tokText = curToken.getText();
	TerminalNode node = tokenToNodeMap.get(curToken);

	int[] features = getFeatures(testDoc, tokenIndexInStream);
	int[] featuresForAlign = new int[features.length];
	System.arraycopy(features, 0, featuresForAlign, 0, features.length);

	int injectNL_WS = wsClassifier.classify(k, features, Trainer.MAX_WS_CONTEXT_DIFF_THRESHOLD);

	injectNL_WS = emitCommentsToTheLeft(tokenIndexInStream, injectNL_WS);

	int newlines = 0;
	int ws = 0;
	if ( (injectNL_WS&0xFF)==CAT_INJECT_NL ) {
		newlines = Trainer.unnlcat(injectNL_WS);
	}
	else if ( (injectNL_WS&0xFF)==CAT_INJECT_WS ) {
		ws = Trainer.unwscat(injectNL_WS);
	}

	if ( newlines==0 && ws==0 && cannotJoin(realTokens.get(indexIntoRealTokens-1), curToken) ) { // failsafe!
		ws = 1;
	}

	int alignOrIndent = CAT_ALIGN;

	if ( newlines>0 ) {
		output.append(Tool.newlines(newlines));
		line+=newlines;
		charPosInLine = 0;

		// getFeatures() doesn't know what line curToken is on. If \n, we need to find exemplars that start a line
		featuresForAlign[INDEX_FIRST_ON_LINE] = 1; // use \n prediction to match exemplars for alignment

		alignOrIndent = hposClassifier.classify(k, featuresForAlign, MAX_ALIGN_CONTEXT_DIFF_THRESHOLD);

		if ( (alignOrIndent&0xFF)==CAT_ALIGN_WITH_ANCESTOR_CHILD ) {
			align(alignOrIndent, node);
		}
		else if ( (alignOrIndent&0xFF)==CAT_INDENT_FROM_ANCESTOR_CHILD ) {
			indent(alignOrIndent, node);
		}
		else if ( (alignOrIndent&0xFF)==CAT_ALIGN ) {
			List<Token> tokensOnPreviousLine = getTokensOnPreviousLine(testDoc.tokens, tokenIndexInStream, line);
			if ( tokensOnPreviousLine.size()>0 ) {
				Token firstTokenOnPrevLine = tokensOnPreviousLine.get(0);
				int indentCol = firstTokenOnPrevLine.getCharPositionInLine();
				charPosInLine = indentCol;
				output.append(Tool.spaces(indentCol));
			}
		}
		else if ( (alignOrIndent&0xFF)==CAT_INDENT ) {
			indent(alignOrIndent, node);
		}
	}
	else {
		// inject whitespace instead of \n?
		output.append(Tool.spaces(ws));
		charPosInLine += ws;
	}

	// update Token object with position information now that we are about
	// to emit it.
	curToken.setLine(line);
	curToken.setCharPositionInLine(charPosInLine);

	TokenPositionAnalysis tokenPositionAnalysis =
		getTokenAnalysis(features, featuresForAlign, tokenIndexInStream, injectNL_WS, alignOrIndent, collectAnalysis);

	analysis.set(tokenIndexInStream, tokenPositionAnalysis);

	int n = tokText.length();
	tokenPositionAnalysis.charIndexStart = output.length();
	tokenPositionAnalysis.charIndexStop = tokenPositionAnalysis.charIndexStart + n - 1;

	// emit
	output.append(tokText);
	charPosInLine += n;
}
 
Example #29
Source File: Dbg.java    From codebuff with BSD 2-Clause "Simplified" License 4 votes vote down vote up
public static void printOriginalFilePiece(InputDocument doc, CommonToken originalCurToken) {
	System.out.println(doc.getLine(originalCurToken.getLine()-1));
	System.out.println(doc.getLine(originalCurToken.getLine()));
	System.out.print(Tool.spaces(originalCurToken.getCharPositionInLine()));
	System.out.println("^");
}
 
Example #30
Source File: ContextDependentFEELLexerTest.java    From jdmn with Apache License 2.0 4 votes vote down vote up
private void check(LexicalContext lexicalContext, String input, Integer code, String lexeme) {
    check(lexicalContext, input, new CommonToken(code, lexeme));
}