Java Code Examples for edu.cornell.cs.nlp.utils.composites.Pair#second()

The following examples show how to use edu.cornell.cs.nlp.utils.composites.Pair#second() . You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: FactoringServices.java    From spf with GNU General Public License v2.0 6 votes vote down vote up
@Override
public void visit(Lambda lambda) {
	// not visiting argument, since we are only abstracting constants.
	lambda.getBody().accept(this);
	final ListIterator<Pair<Placeholders, ? extends LogicalExpression>> iterator = tempReturn
			.listIterator();
	while (iterator.hasNext()) {
		final Pair<Placeholders, ? extends LogicalExpression> pair = iterator
				.next();
		if (pair.second() != null) {
			final LogicalExpression newBody = pair.second();
			if (newBody == lambda.getBody()) {
				iterator.set(Pair.of(pair.first(), lambda));
			} else {
				iterator.set(Pair.of(pair.first(),
						new Lambda(lambda.getArgument(), newBody)));
			}

		}
	}
}
 
Example 2
Source File: MultiCKYParser.java    From spf with GNU General Public License v2.0 6 votes vote down vote up
@Override
public void loggedRun() {
	LOG.debug("%s Lexical job started", split.span);

	final Pair<Collection<Cell<MR>>, Boolean> processingPair = generateLexicalCells(
			split.start, split.end, chart, lexicon, model,
			pruningFilter);

	// Add all the valid cells under a span lock.
	lock.lock(split.start, split.end);
	for (final Cell<MR> newCell : processingPair.first()) {
		chart.add(newCell);
	}
	if (processingPair.second()) {
		chart.externalPruning(split.start, split.end);
	}
	lock.unlock(split.start, split.end);

	LOG.debug("%s: Lexical job completed, tried to add %d entries",
			split.span, processingPair.first().size());

	// Signal the job is complete.
	listener.jobComplete(this);
}
 
Example 3
Source File: GetMaxEvaluations.java    From amr with GNU General Public License v2.0 5 votes vote down vote up
@Override
public void visit(LiteralNode node) {
	node.getPredicate().accept(this);
	final List<Pair<LogicalExpression, Map<INode, LogicalExpression>>> predicateMaxes = maxes;
	int numAssignments = predicateMaxes.size();
	final List<List<Pair<LogicalExpression, Map<INode, LogicalExpression>>>> argMaxes = new ArrayList<>(
			node.getArgs().size());
	for (final IBaseNode argNode : node.getArgs()) {
		argNode.accept(this);
		numAssignments *= maxes.size();
		if (numAssignments == 0 || numAssignments > limit) {
			LOG.debug("Too many evaluation for: %s", node);
			maxes = Collections.emptyList();
			return;
		}
		argMaxes.add(maxes);
	}

	// Create the max literal assignments.
	maxes = new LinkedList<>();
	for (final Pair<LogicalExpression, Map<INode, LogicalExpression>> predicate : predicateMaxes) {
		for (final List<Pair<LogicalExpression, Map<INode, LogicalExpression>>> argPairs : CollectionUtils
				.cartesianProduct(argMaxes)) {
			final Map<INode, LogicalExpression> mapping = new HashMap<>(
					predicate.second());
			final LogicalExpression[] args = new LogicalExpression[argPairs
					.size()];
			for (int i = 0; i < args.length; ++i) {
				final Pair<LogicalExpression, Map<INode, LogicalExpression>> pair = argPairs
						.get(i);
				mapping.putAll(pair.second());
				args[i] = pair.first();
			}
			maxes.add(Pair.of(new Literal(predicate.first(), args), mapping));
		}
	}
}
 
Example 4
Source File: FactoringServices.java    From spf with GNU General Public License v2.0 5 votes vote down vote up
public static List<FactoredLexicalEntry> factor(
		final LexicalEntry<LogicalExpression> entry, boolean doMaximal,
		boolean doPartial, int maxConstantsInPartial) {

	// Abstract syntactic attributes.
	final Pair<List<String>, Syntax> syntaxAbstractionPair = abstractSyntaxAttributes(
			entry.getCategory().getSyntax());
	final List<String> indexedAttributes = syntaxAbstractionPair.first();
	final Category<LogicalExpression> category;
	if (syntaxAbstractionPair.second() == entry.getCategory().getSyntax()) {
		category = INSTANCE.preprocessor.apply(entry.getCategory());
	} else {
		category = INSTANCE.preprocessor
				.apply(Category.create(syntaxAbstractionPair.second(),
						entry.getCategory().getSemantics()));
	}

	final List<Pair<List<LogicalConstant>, LexicalTemplate>> factoring = FactoringServices
			.doFactoring(category, doMaximal, doPartial,
					maxConstantsInPartial, entry.getProperties(),
					indexedAttributes.size());

	return ListUtils.map(factoring,
			obj -> new FactoredLexicalEntry(entry.getTokens(),
					entry.getCategory(),
					new Lexeme(entry.getTokens(), obj.first(),
							indexedAttributes, entry.getProperties()),
			obj.second(), entry.isDynamic(), entry.getProperties()));
}
 
Example 5
Source File: LogicalExpressionCategoryServices.java    From spf with GNU General Public License v2.0 5 votes vote down vote up
@Override
public LogicalExpression readSemantics(String string, boolean checkType) {
	final LogicalExpression exp = LogicalExpression.read(string);
	if (checkType) {

		final Pair<Boolean, String> typeChecking = IsTypeConsistent
				.ofVerbose(exp);
		if (!typeChecking.first()) {
			throw new IllegalStateException("Semantics not well typed ["
					+ typeChecking.second() + "]: " + string);
		}
	}
	return Simplify.of(exp);
}
 
Example 6
Source File: TypeRepository.java    From spf with GNU General Public License v2.0 5 votes vote down vote up
private ComplexType createComplexTypeFromString(String string) {
	// Case complex functional type
	final String innerString = string.substring(1, string.length() - 1)
			.trim();
	int i = 0;
	final StringBuilder domainStringBuilder = new StringBuilder();
	char c;
	int parenthesisCounter = 0;
	while (i < innerString.length()
			&& !((c = innerString.charAt(i)) == ComplexType.COMPLEX_TYPE_SEP && parenthesisCounter == 0)) {
		++i;
		domainStringBuilder.append(c);
		if (c == ComplexType.COMPLEX_TYPE_OPEN_PAREN) {
			++parenthesisCounter;
		} else if (c == ComplexType.COMPLEX_TYPE_CLOSE_PAREN) {
			--parenthesisCounter;
		}
	}
	++i;
	final String rangeString = innerString.substring(i).trim();
	final String domainString = domainStringBuilder.toString().trim();

	// Check if the domain indicates to a RecursiveComplexType, and if so
	// trim the indication to parse it and raise a flag
	final Pair<String, RecursiveComplexType.Option> prefixOption = RecursiveComplexType.Option
			.parse(domainString);
	final RecursiveComplexType.Option option = prefixOption.second();
	final String domainStringTrimmed = prefixOption.first();

	final Type domain = getTypeCreateIfNeeded(domainStringTrimmed);
	final Type range = getTypeCreateIfNeeded(rangeString);

	return ComplexType.create(string, domain, range, option);
}
 
Example 7
Source File: AToExists.java    From spf with GNU General Public License v2.0 5 votes vote down vote up
private LogicalExpression noUpperLevelWrapIfPossible(LogicalExpression exp,
		Stack<Pair<Variable, ? extends LogicalExpression>> stack) {
	if (stack.size() == 1
			&& exp.equals(stack.peek().first())
			&& LogicLanguageServices.getTypeRepository()
					.getTruthValueType()
					.equals(stack.peek().second().getType())) {
		final Pair<Variable, ? extends LogicalExpression> pop = stack.pop();
		final LogicalExpression[] args = new LogicalExpression[1];
		args[0] = new Lambda(pop.first(), pop.second());
		return new Literal(existsPredicate, args);
	} else {
		return exp;
	}
}
 
Example 8
Source File: Variable.java    From spf with GNU General Public License v2.0 5 votes vote down vote up
@Override
public Variable read(String string,
		ScopeMapping<String, LogicalExpression> mapping,
		TypeRepository typeRepository, ITypeComparator typeComparator,
		LogicalExpressionReader reader) {

	try {
		final Pair<String, Variable> defintion = readVariableDefintion(
				string, typeRepository);
		if (defintion != null && !mapping.containsKey(string)) {
			mapping.push(defintion.first(), defintion.second());
			return defintion.second();
		} else if (defintion != null) {
			throw new LogicalExpressionRuntimeException(
					"Re-define a global variable: " + string);
		} else {
			// Case variable reference.
			if (mapping.containsKey(string)) {
				return (Variable) mapping.peek(string);
			} else {
				throw new LogicalExpressionRuntimeException(
						"Undefined variable reference: " + string);
			}
		}
	} catch (final RuntimeException e) {
		LOG.error("Variable error: %s", string);
		throw e;
	}

}
 
Example 9
Source File: ScopeMappingOverlay.java    From spf with GNU General Public License v2.0 5 votes vote down vote up
public void applyToBase() {
	for (final Pair<K, Iterator<V>> entry : map) {
		final Iterator<V> stackIterator = entry.second();
		final K key = entry.first();
		while (stackIterator.hasNext()) {
			base.push(key, stackIterator.next());
		}
	}
	map.clear();
	reverseMapping.clear();
}
 
Example 10
Source File: MultiCKYParser.java    From spf with GNU General Public License v2.0 5 votes vote down vote up
@Override
public void loggedRun() {
	LOG.debug("%s: Unary span job started", split.span);

	final Pair<List<Cell<MR>>, Boolean> processingPair = preChartPruning
			? unaryProcessSpanAndPrune(split.start, split.end,
					sentenceLength, chart, cellFactory, pruningFilter,
					chart.getBeamSize(), model)
			: unaryProcessSpan(split.start, split.end, sentenceLength,
					chart, cellFactory, pruningFilter, model);

	final List<Cell<MR>> newCells = processingPair.first();

	LOG.debug("%s: %d new cells", split, newCells.size());

	// Add all the valid cells under a span lock.
	lock.lock(split.start, split.end);
	for (final Cell<MR> newCell : newCells) {
		chart.add(newCell);
	}
	if (processingPair.second()) {
		chart.externalPruning(split.start, split.end);
	}
	lock.unlock(split.start, split.end);

	LOG.debug("%s: Unary span job completed", split);

	// Signal the job is complete
	listener.jobComplete(this);
}
 
Example 11
Source File: MultiCKYParser.java    From spf with GNU General Public License v2.0 5 votes vote down vote up
@Override
public void loggedRun() {
	LOG.debug("%s: Split job started", split);

	final Pair<List<Cell<MR>>, Boolean> processingPair = preChartPruning
			? processSplitAndPrune(split.start, split.end, split.split,
					sentenceLength, chart, cellFactory, pruningFilter,
					chart.getBeamSize(), model)
			: processSplit(split.start, split.end, split.split,
					sentenceLength, chart, cellFactory, pruningFilter,
					model);

	final List<Cell<MR>> newCells = processingPair.first();

	LOG.debug("%s: %d new cells", split, newCells.size());

	// Add all the valid cells under a span lock
	lock.lock(split.start, split.end);
	for (final Cell<MR> newCell : newCells) {
		chart.add(newCell);
	}
	if (processingPair.second()) {
		chart.externalPruning(split.start, split.end);
	}
	lock.unlock(split.start, split.end);

	LOG.debug("%s: Split job completed", split);

	// Signal the job is complete
	listener.jobComplete(this);
}
 
Example 12
Source File: NamedEntitiesCandidates.java    From amr with GNU General Public License v2.0 5 votes vote down vote up
public static void main(String[] args) {
	try {
		// //////////////////////////////////////////
		// Init logging
		// //////////////////////////////////////////

		Logger.DEFAULT_LOG = new Log(System.err);
		Logger.setSkipPrefix(true);
		LogLevel.INFO.set();

		// //////////////////////////////////////////
		// Init AMR.
		// //////////////////////////////////////////

		Init.init(new File(args[0]), false);

		final LogicalConstant namePredicate = LogicalConstant
				.read("c_name:<e,<txt,t>>");

		final AMROntology ontology = AMROntology.read(new File(args[1]));

		for (final SingleSentence sentence : SingleSentenceCollection.read(
				new File(args[2]), new Tokenizer())) {
			System.out.println(sentence.getSample());
			for (final Pair<String, String> entityType : GetNamedEntities
					.of(sentence.getLabel(), namePredicate)) {
				if (entityType.second() == null
						|| !ontology.isType(entityType.second())) {
					System.out.println(entityType.first());
				} else {
					System.out.println(String.format("%s\t%s",
							entityType.first(), entityType.second()));
				}
			}
			System.out.println();
		}
	} catch (final IOException e) {
		throw new IllegalStateException(e);
	}
}
 
Example 13
Source File: TreeTransformerMain.java    From UDepLambda with Apache License 2.0 5 votes vote down vote up
@Override
public void processSentence(JsonObject sent) {
  Sentence sentence = new Sentence(sent);
  // TreeTransformationRules for modifying the structure of a tree.
  TreeTransformer.applyRuleGroupsOnTree(treeTransformationRules,
      sentence.getRootNode());

  // Assign lambdas.
  TreeTransformer.applyRuleGroupsOnTree(lambdaAssignmentRules,
      sentence.getRootNode());

  // Composing lambda.
  Pair<String, List<LogicalExpression>> sentenceSemantics =
      TreeTransformer.composeSemantics(sentence.getRootNode(),
          relationPriorityRules.getRelationPriority(), logger, false);
  sent.addProperty(SentenceKeys.DEPLAMBDA_OBLIQUE_TREE,
      sentenceSemantics.first());
  if (sentenceSemantics.second().size() > 0) {
    sent.addProperty(SentenceKeys.DEPLAMBDA_EXPRESSION,
        sentenceSemantics.second().get(0).toString());
  }

  // Post processing lambdas.
  JsonArray jsonParses = new JsonArray();
  for (LogicalExpression parse : sentenceSemantics.second()) {
    List<String> cleaned =
        Lists.newArrayList(PostProcessLogicalForm.process(sentence, parse,
            lexicalizePredicates));

    // TODO: Better sorting function is required.
    Collections.sort(cleaned);
    jsonParses.add(jsonParser.parse(gson.toJson(cleaned)));
  }
  sent.add(SentenceKeys.DEPENDENCY_LAMBDA, jsonParses);
}
 
Example 14
Source File: JointDerivation.java    From spf with GNU General Public License v2.0 4 votes vote down vote up
public JointDerivation<MR, ERESULT> build() {
	final Pair<List<InferencePair<MR, ERESULT, IDerivation<MR>>>, Double> maxPair = createMaxPairs();
	return new JointDerivation<MR, ERESULT>(maxPair.first(),
			inferencePairs, result, maxPair.second());
}
 
Example 15
Source File: Lambda.java    From spf with GNU General Public License v2.0 4 votes vote down vote up
@Override
public Lambda read(String string,
		ScopeMapping<String, LogicalExpression> mapping,
		TypeRepository typeRepository, ITypeComparator typeComparator,
		LogicalExpressionReader reader) {

	try {
		final LispReader lispReader = new LispReader(new StringReader(
				string));

		// The first argument is the 'lambda' keyword. We just ignore
		// it.
		lispReader.next();

		// The second argument is the variable definition.
		final Pair<String, Variable> variableDef = Variable
				.readVariableDefintion(lispReader.next(),
						typeRepository);

		if (variableDef == null) {
			throw new LogicalExpressionRuntimeException(
					"Invalid lambda argument: " + string);
		}

		// Update the scope mapping.
		mapping.push(variableDef.first(), variableDef.second());

		// The next argument is the body expression.
		final LogicalExpression lambdaBody = reader.read(
				lispReader.next(), mapping, typeRepository,
				typeComparator);

		// Verify that we don't have any more elements.
		if (lispReader.hasNext()) {
			throw new LogicalExpressionRuntimeException(String.format(
					"Invalid lambda expression: %s", string));
		}

		// Remove the variable from the mapping.
		if (mapping.pop(variableDef.first()) == null) {
			throw new LogicalExpressionRuntimeException(
					"Failed to remove variable from mapping. Something werid is happening: "
							+ string);
		}

		return new Lambda(variableDef.second(), lambdaBody);
	} catch (final RuntimeException e) {
		LOG.error("Lambda syntax error: %s", string);
		throw e;
	}

}
 
Example 16
Source File: AbstractAmrParserOutput.java    From amr with GNU General Public License v2.0 4 votes vote down vote up
protected List<InferencePair<LogicalExpression, LogicalExpression, BASEDERIV>> getInferencePairs(
		LogicalExpression result) {
	// Get the base parse that gives the underspecified ID-less result and
	// the factor graph created from it.
	final Category<LogicalExpression> baseCategory = Category.create(
			AMRServices.getCompleteSentenceSyntax(),
			AMRServices.underspecifyAndStrip(result));
	final List<InferencePair<LogicalExpression, LogicalExpression, BASEDERIV>> pairs = new LinkedList<>();
	for (final Pair<BASEDERIV, FactorGraph> derivationPair : derviationPairs) {
		if (StripOverload.of(derivationPair.first().getCategory())
				.equals(baseCategory)) {
			final FactorGraph graph = derivationPair.second();

			// Get the mapping from the factor graph that will give the
			// result, if such exists.
			final Map<INode, LogicalExpression> mapping = GetMapping
					.of(graph, result);

			if (mapping != null) {

				// Collect the features using the mapping. Aggregate the
				// score as well. If the graph has no marginals, the
				// aggregated belief will reflect the viterbi score.
				// Otherwise, it is just a meaningless sum of beliefs and we
				// need theta to compute this.
				final IHashVector features = HashVectorFactory.create();
				double beliefSum = 0.0;
				for (final IFactor factor : GetFactors.of(graph)) {
					final FactorTable table = factor.getTable();
					table.getFeatures(mapping).addTimesInto(1.0, features);
					beliefSum += table.get(mapping);
				}

				// Create the evaluation object and the joint derivation.
				final EvaluationResult evaluation = graph.hasMarginals()
						? new ProbEvaluationResult(
								theta.dotProduct(features),
								MarginalsProduct.of(graph, mapping),
								features, result)
						: new EvaluationResult(beliefSum, features, result);

				pairs.add(new InferencePair<>(derivationPair.first(),
						evaluation));
			}

		}
	}

	return pairs;
}
 
Example 17
Source File: GraphAmrParserOutput.java    From amr with GNU General Public License v2.0 4 votes vote down vote up
private IHashVector logExpectedFeaturesExpectation(
		LogicalExpression result) {
	final IHashVector logExpectedFeatures = HashVectorFactory.create();

	// Locate the appropriate factor graph. Get the mapping that leads to
	// result and iterate the factors to extract the weighted features.
	final Category<LogicalExpression> baseCategory = Category.create(
			AMRServices.getCompleteSentenceSyntax(),
			AMRServices.underspecifyAndStrip(result));

	for (final Pair<IGraphDerivation<LogicalExpression>, FactorGraph> pair : derviationPairs) {
		if (StripOverload.of(pair.first().getCategory())
				.equals(baseCategory)) {
			final Map<INode, LogicalExpression> mapping = GetMapping
					.of(pair.second(), result);
			if (mapping != null) {
				final double logInsideScore = pair.first()
						.getLogInsideScore();
				final FactorGraph graph = pair.second();
				for (final IFactor factor : GetFactors.of(graph)) {
					final FactorTable belief = factor.getBelief();

					// A quick sanity check.
					if (!belief.isMappingComplete(mapping)) {
						throw new IllegalStateException(
								"Incomplete mapping -- should never happen");
					}

					// The weight is a combination of the inside score of
					// the base parse and the belief of this factor, give
					// the mapping (which does the conditioning).
					HashVectorUtils.logSumExpAdd(
							belief.get(mapping) + logInsideScore,
							belief.getFeatures(mapping),
							logExpectedFeatures);
				}
			}
		}
	}

	// TODO FIX: this is incorrect the factor graph should re-weight the
	// expected features.

	// Add the expected features from the base output. This will consider
	// only the relevant base parses.
	HashVectorUtils.sumExpLogged(
			baseOutput.logExpectedFeatures(
					(IFilter<Category<LogicalExpression>>) e -> baseCategory
							.equals(StripOverload.of(e))),
			logExpectedFeatures);

	// See above TODO. Uncomment the final return statement once fixed.
	throw new RuntimeException(
			"This method is not computing the correct expected feature weigths, see TODO in code");

	// return logExpectedFeatures;
}