edu.cornell.cs.nlp.utils.composites.Pair Java Examples

The following examples show how to use edu.cornell.cs.nlp.utils.composites.Pair. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: SentenceWithDummy.java    From amr with GNU General Public License v2.0 6 votes vote down vote up
@Override
public ParseRuleResult<LogicalExpression> apply(
		Category<LogicalExpression> category, SentenceSpan span) {
	if (isValidArgument(category, span)) {
		final Pair<Lambda, LogicalConstant> pair = DummyEntityServices
				.stripDummy((Lambda) category.getSemantics());
		if (pair != null) {
			final LogicalConstant inversedRelation = AMRServices
					.makeRelationPassive(pair.second(), true);
			if (inversedRelation != null) {
				return new ParseRuleResult<>(name, Category.create(
						targetSyntax, categoryServices.apply(
								categoryServices.apply(helperCategory,
										inversedRelation), pair.first())));
			}
		}
	}
	return null;
}
 
Example #2
Source File: FactoringServices.java    From spf with GNU General Public License v2.0 6 votes vote down vote up
@Override
public void visit(Lambda lambda) {
	// not visiting argument, since we are only abstracting constants.
	lambda.getBody().accept(this);
	final ListIterator<Pair<Placeholders, ? extends LogicalExpression>> iterator = tempReturn
			.listIterator();
	while (iterator.hasNext()) {
		final Pair<Placeholders, ? extends LogicalExpression> pair = iterator
				.next();
		if (pair.second() != null) {
			final LogicalExpression newBody = pair.second();
			if (newBody == lambda.getBody()) {
				iterator.set(Pair.of(pair.first(), lambda));
			} else {
				iterator.set(Pair.of(pair.first(),
						new Lambda(lambda.getArgument(), newBody)));
			}

		}
	}
}
 
Example #3
Source File: NounPhraseWithDummy.java    From amr with GNU General Public License v2.0 6 votes vote down vote up
@Override
public ParseRuleResult<LogicalExpression> apply(
		Category<LogicalExpression> category, SentenceSpan span) {
	if (isValidArgument(category, span)) {
		final Pair<Literal, LogicalConstant> pair = DummyEntityServices
				.stripDummy((Literal) category.getSemantics());
		if (pair != null) {
			final LogicalConstant inversedRelation = AMRServices
					.makeRelationPassive(pair.second(), true);
			if (inversedRelation != null) {
				return new ParseRuleResult<>(name, Category.create(
						targetSyntax, categoryServices.apply(
								categoryServices.apply(helperCategory,
										inversedRelation), pair.first())));
			}
		}
	}
	return null;
}
 
Example #4
Source File: Evaluation.java    From spf with GNU General Public License v2.0 6 votes vote down vote up
/**
 * Decomposes a logical expression as a SELECT query.
 *
 * @param exp
 * @return Pair of queried variables and SELECT body. If not a SELECT query,
 *         returns null.
 */
private static Pair<List<Variable>, LogicalExpression> decomposeLogicalExpressionAsSelect(
		LogicalExpression exp) {
	LogicalExpression currentBody = exp;
	final List<Variable> queryVariables = new LinkedList<Variable>();
	while (currentBody instanceof Lambda) {
		final Lambda lambda = (Lambda) currentBody;
		if (lambda.getArgument().getType().isComplex()) {
			// Case argument is complex
			return null;
		} else {
			queryVariables.add(lambda.getArgument());
			currentBody = lambda.getBody();
		}
	}

	if (currentBody.getType().isComplex()) {
		return null;
	} else {
		return Pair.of(queryVariables, currentBody);
	}
}
 
Example #5
Source File: FactoringServices.java    From spf with GNU General Public License v2.0 6 votes vote down vote up
public static Set<Pair<Placeholders, ? extends LogicalExpression>> of(
		LogicalExpression exp, boolean getMaximal, boolean getPartial,
		int partialMaxConstants) {
	final AbstractConstants visitor = new AbstractConstants(getMaximal,
			getPartial, partialMaxConstants);
	visitor.visit(exp);

	// Remove any empty factoring, unless it's a maximal one
	final Iterator<Pair<Placeholders, ? extends LogicalExpression>> iterator = visitor.tempReturn
			.iterator();
	while (iterator.hasNext()) {
		final Pair<Placeholders, ? extends LogicalExpression> pair = iterator
				.next();
		if (!pair.first().isMaximal() && pair.first().size() == 0) {
			iterator.remove();
		}
	}

	return new HashSet<Pair<Placeholders, ? extends LogicalExpression>>(
			visitor.tempReturn);
}
 
Example #6
Source File: RecursiveComplexType.java    From spf with GNU General Public License v2.0 6 votes vote down vote up
public static Pair<String, Option> parse(String string) {
	final Matcher m = STRING_PATTERN.matcher(string);
	if (m.matches()) {
		final String type = m.group("type");
		if (m.isCaptured("order")) {
			final boolean isOrderSensitive = m.group("order").equals(
					"+");
			int minArgs;
			if (m.isCaptured("minargs")) {
				minArgs = Integer.valueOf(m.group("minargs"));
			} else {
				minArgs = 2;
			}
			return Pair.of(type, new Option(isOrderSensitive, minArgs));
		} else {
			return Pair.of(type, null);
		}
	} else {
		throw new IllegalArgumentException("Invalid type string");
	}
}
 
Example #7
Source File: AbstractAmrParser.java    From amr with GNU General Public License v2.0 6 votes vote down vote up
/**
 * LBP inference for the second stage.
 *
 * @return Triplet that includes the base derivation, list of
 *         {@link EvaluationResult}, and the inference exactness flag (if
 *         the flag is 'true', there were too many max-scoring evaluations
 *         and none were returned).
 */
protected List<Triplet<DERIV, List<EvaluationResult>, Boolean>> loopyBPInference(
		List<Pair<DERIV, FactorGraph>> graphPairs,
		IJointDataItemModel<LogicalExpression, LogicalExpression> model,
		boolean sloppyInference) {
	final List<Triplet<DERIV, List<EvaluationResult>, Boolean>> inferenceTriplets = new LinkedList<>();

	// Mapping of final results to derivation builders.
	for (final Pair<DERIV, FactorGraph> resultPair : graphPairs) {
		final Pair<List<EvaluationResult>, Boolean> inferencePair = doLoopyBPInference(
				resultPair.second(), model, sloppyInference);
		inferenceTriplets.add(Triplet.of(resultPair.first(),
				inferencePair.first(), inferencePair.second()));
	}

	return inferenceTriplets;
}
 
Example #8
Source File: JointDerivation.java    From spf with GNU General Public License v2.0 6 votes vote down vote up
protected Pair<List<InferencePair<MR, ERESULT, IDerivation<MR>>>, Double> createMaxPairs() {
	double maxScore = -Double.MAX_VALUE;
	final List<InferencePair<MR, ERESULT, IDerivation<MR>>> maxPairs = new LinkedList<>();
	for (final InferencePair<MR, ERESULT, IDerivation<MR>> pair : inferencePairs) {
		// Viterbi score is for a linearly-weighted.
		final double score = pair.getBaseDerivation().getScore()
				+ pair.getBaseDerivation().getScore();
		if (score > maxScore) {
			maxScore = score;
			maxPairs.clear();
			maxPairs.add(pair);
		} else if (score == maxScore) {
			maxPairs.add(pair);
		}
	}
	return Pair.of(maxPairs, maxScore);
}
 
Example #9
Source File: FactoringServices.java    From spf with GNU General Public License v2.0 6 votes vote down vote up
private static List<Pair<List<LogicalConstant>, LexicalTemplate>> doFactoring(
		final Category<LogicalExpression> inputCategory, boolean doMaximal,
		boolean doPartial, int maxConstantsInPartial,
		final Map<String, String> properties, final int numAttributes) {
	if (inputCategory.getSemantics() == null) {
		return ListUtils.createSingletonList(
				Pair.of(Collections.<LogicalConstant> emptyList(),
						new LexicalTemplate(
								Collections.<LogicalConstant> emptyList(),
								numAttributes, inputCategory, properties)));
	}

	final Set<Pair<AbstractConstants.Placeholders, ? extends LogicalExpression>> factoring = AbstractConstants
			.of(inputCategory.getSemantics(), doMaximal, doPartial,
					maxConstantsInPartial);
	return factoring.stream()
			.map(obj -> Pair
					.of(Collections.unmodifiableList(obj.first().originals),
							new LexicalTemplate(
									Collections.unmodifiableList(
											obj.first().placeholders),
							numAttributes,
							inputCategory.cloneWithNewSemantics(
									obj.second()), properties)))
			.collect(Collectors.toList());
}
 
Example #10
Source File: FactoringServices.java    From spf with GNU General Public License v2.0 6 votes vote down vote up
private static Pair<Placeholders, ? extends LogicalExpression> getAndRemoveMaximal(
		List<Pair<Placeholders, ? extends LogicalExpression>> pairs) {
	Pair<Placeholders, ? extends LogicalExpression> maximal = null;
	final Iterator<Pair<Placeholders, ? extends LogicalExpression>> iterator = pairs
			.iterator();
	while (iterator.hasNext()) {
		final Pair<Placeholders, ? extends LogicalExpression> pair = iterator
				.next();
		if (pair.first().isMaximal()) {
			if (maximal == null) {
				maximal = pair;
				iterator.remove();
			} else {
				throw new IllegalStateException(
						"found more than one maximal");
			}
		}
	}

	if (maximal == null) {
		throw new IllegalStateException(
				"expected a maximal pair, not found");
	}

	return maximal;
}
 
Example #11
Source File: MultiCKYParser.java    From spf with GNU General Public License v2.0 6 votes vote down vote up
@Override
public void loggedRun() {
	LOG.debug("%s Lexical job started", split.span);

	final Pair<Collection<Cell<MR>>, Boolean> processingPair = generateLexicalCells(
			split.start, split.end, chart, lexicon, model,
			pruningFilter);

	// Add all the valid cells under a span lock.
	lock.lock(split.start, split.end);
	for (final Cell<MR> newCell : processingPair.first()) {
		chart.add(newCell);
	}
	if (processingPair.second()) {
		chart.externalPruning(split.start, split.end);
	}
	lock.unlock(split.start, split.end);

	LOG.debug("%s: Lexical job completed, tried to add %d entries",
			split.span, processingPair.first().size());

	// Signal the job is complete.
	listener.jobComplete(this);
}
 
Example #12
Source File: AToExists.java    From spf with GNU General Public License v2.0 5 votes vote down vote up
private LogicalExpression noUpperLevelWrapIfPossible(LogicalExpression exp,
		Stack<Pair<Variable, ? extends LogicalExpression>> stack) {
	if (stack.size() == 1
			&& exp.equals(stack.peek().first())
			&& LogicLanguageServices.getTypeRepository()
					.getTruthValueType()
					.equals(stack.peek().second().getType())) {
		final Pair<Variable, ? extends LogicalExpression> pop = stack.pop();
		final LogicalExpression[] args = new LogicalExpression[1];
		args[0] = new Lambda(pop.first(), pop.second());
		return new Literal(existsPredicate, args);
	} else {
		return exp;
	}
}
 
Example #13
Source File: LabeledAmrSentence.java    From amr with GNU General Public License v2.0 5 votes vote down vote up
public LabeledAmrSentence(SituatedSentence<AMRMeta> sentence,
		LogicalExpression label, Map<String, String> properties,
		Map<Pair<Integer, Integer>, Set<LogicalExpression>> alignments,
		List<Set<Syntax>> superTags, Map<TokenSeq, Set<Syntax>> spans) {
	super(sentence, label, properties);
	this.alignments = alignments;
	this.superTags = superTags;
	this.spans = spans;
}
 
Example #14
Source File: AToExists.java    From spf with GNU General Public License v2.0 5 votes vote down vote up
@Override
public void visit(Lambda lambda) {
	lambda.getBody().accept(this);
	if (result.first() != lambda.getBody()) {
		// Case body changed
		result = Pair.of(new Lambda(lambda.getArgument(), result.first()),
				result.second());
	} else {
		result = Pair.of(lambda, result.second());
	}
}
 
Example #15
Source File: Literal.java    From spf with GNU General Public License v2.0 5 votes vote down vote up
public static Pair<Type, Type[]> computeLiteralTyping(
		ComplexType predicateType, Type[] argTypes,
		ITypeComparator typeComparator, TypeRepository typeRepository) {
	final Type[] impliedSignatureTypes = new Type[argTypes.length];
	final Type computedType = computeLiteralTyping(predicateType, argTypes,
			typeComparator, typeRepository, impliedSignatureTypes);
	if (computedType == null) {
		return null;
	} else {
		return Pair.of(computedType, impliedSignatureTypes);
	}
}
 
Example #16
Source File: Variable.java    From spf with GNU General Public License v2.0 5 votes vote down vote up
@Override
public Variable read(String string,
		ScopeMapping<String, LogicalExpression> mapping,
		TypeRepository typeRepository, ITypeComparator typeComparator,
		LogicalExpressionReader reader) {

	try {
		final Pair<String, Variable> defintion = readVariableDefintion(
				string, typeRepository);
		if (defintion != null && !mapping.containsKey(string)) {
			mapping.push(defintion.first(), defintion.second());
			return defintion.second();
		} else if (defintion != null) {
			throw new LogicalExpressionRuntimeException(
					"Re-define a global variable: " + string);
		} else {
			// Case variable reference.
			if (mapping.containsKey(string)) {
				return (Variable) mapping.peek(string);
			} else {
				throw new LogicalExpressionRuntimeException(
						"Undefined variable reference: " + string);
			}
		}
	} catch (final RuntimeException e) {
		LOG.error("Variable error: %s", string);
		throw e;
	}

}
 
Example #17
Source File: TreeHashVectorTest.java    From spf with GNU General Public License v2.0 5 votes vote down vote up
@Test
public void test() {
	final TreeHashVector vector = new TreeHashVector();

	vector.set("p1", 1.0);
	vector.set("p1", "p2", "p3", "p4", 2.0);

	assertTrue(vector.get("p1") == 1.0);
	assertTrue(vector.get("p1", "p2", "p3", "p4") == 2.0);
	assertTrue(vector.size() == 2);

	vector.set("p2", 3.0);

	final IHashVector p1 = vector.getAll("p1");
	assertTrue(p1.get("p1") == 1.0);
	assertTrue(p1.get("p1", "p2", "p3", "p4") == 2.0);
	assertTrue(p1.size() == 2);

	vector.set("p3", -2.5);
	Assert.assertEquals(2.5 + 3.0 + 1.0 + 2.0, vector.l1Norm(), 0.0);
	final TreeHashVector pairwise = vector.pairWiseProduct(vector);
	for (final Pair<KeyArgs, Double> entry : pairwise) {
		Assert.assertEquals(entry.second(),
				Math.pow(vector.get(entry.first()), 2), 0.0);
	}

}
 
Example #18
Source File: ScopeMapping.java    From spf with GNU General Public License v2.0 5 votes vote down vote up
@Override
public String toString() {
	return new StringBuilder("{")
			.append(ScopeMapping.class.getSimpleName())
			.append(" ")
			.append(ListUtils.join(ListUtils.map(map,
					new ListUtils.Mapper<Pair<K, Iterator<V>>, String>() {

						@Override
						public String process(Pair<K, Iterator<V>> obj) {
							final StringBuilder entryString = new StringBuilder();
							entryString
									.append(obj.first())
									.append("(")
									.append(System.identityHashCode(obj
											.first())).append(")")
									.append("=>[");
							final Iterator<V> valueIterator = obj.second();
							while (valueIterator.hasNext()) {
								final V value = valueIterator.next();
								entryString.append(new StringBuilder(value
										.toString())
										.append("(")
										.append(System
												.identityHashCode(value))
										.append(")").toString());
								if (valueIterator.hasNext()) {
									entryString.append(", ");
								}
							}
							return entryString.append("]").toString();
						}
					}), ", ")).append("}").toString();
}
 
Example #19
Source File: Chart.java    From spf with GNU General Public License v2.0 5 votes vote down vote up
@Override
public Pair<Double, Double> minQeueuScore() {
	if (queue.isEmpty()) {
		return null;
	} else {
		final Cell<MR> peek = queue.peek();
		return Pair.of(peek.getPruneScore(),
				peek.getSecondPruneScore());
	}
}
 
Example #20
Source File: TreeHashVectorWithInitTest.java    From spf with GNU General Public License v2.0 5 votes vote down vote up
@Test
public void test5() {
	final TreeHashVectorWithInit vector = new TreeHashVectorWithInit(
			new TreeHashVectorWithInit.HashInitFunction(0.1));

	vector.set("p1", 1.0);
	vector.set("p1", "p2", "p3", "p4", 2.0);

	assertTrue(vector.get("p1") == 1.0);
	assertTrue(vector.get("p1", "p2", "p3", "p4") == 2.0);
	assertTrue(vector.size() == 2);

	vector.set("p2", 3.0);

	final IHashVector p1 = vector.getAll("p1");
	assertTrue(p1.get("p1") == 1.0);
	assertTrue(p1.get("p1", "p2", "p3", "p4") == 2.0);
	assertTrue(p1.size() == 2);

	vector.set("p3", -2.5);
	final TreeHashVectorWithInit pairwise = vector
			.pairWiseProduct(new TreeHashVector(vector));
	for (final Pair<KeyArgs, Double> entry : pairwise) {
		Assert.assertEquals(entry.second(),
				Math.pow(vector.get(entry.first()), 2), 0.0);
	}

	Assert.assertNotEquals(0.0, vector.get("sdasd"));
	Assert.assertNotEquals(vector.get("sdasdd"), vector.get("sdasd"));

}
 
Example #21
Source File: MultiCKYParser.java    From spf with GNU General Public License v2.0 5 votes vote down vote up
@Override
public void loggedRun() {
	LOG.debug("%s: Split job started", split);

	final Pair<List<Cell<MR>>, Boolean> processingPair = preChartPruning
			? processSplitAndPrune(split.start, split.end, split.split,
					sentenceLength, chart, cellFactory, pruningFilter,
					chart.getBeamSize(), model)
			: processSplit(split.start, split.end, split.split,
					sentenceLength, chart, cellFactory, pruningFilter,
					model);

	final List<Cell<MR>> newCells = processingPair.first();

	LOG.debug("%s: %d new cells", split, newCells.size());

	// Add all the valid cells under a span lock
	lock.lock(split.start, split.end);
	for (final Cell<MR> newCell : newCells) {
		chart.add(newCell);
	}
	if (processingPair.second()) {
		chart.externalPruning(split.start, split.end);
	}
	lock.unlock(split.start, split.end);

	LOG.debug("%s: Split job completed", split);

	// Signal the job is complete
	listener.jobComplete(this);
}
 
Example #22
Source File: TreeTransformerMain.java    From UDepLambda with Apache License 2.0 5 votes vote down vote up
@Override
public void processSentence(JsonObject sent) {
  Sentence sentence = new Sentence(sent);
  // TreeTransformationRules for modifying the structure of a tree.
  TreeTransformer.applyRuleGroupsOnTree(treeTransformationRules,
      sentence.getRootNode());

  // Assign lambdas.
  TreeTransformer.applyRuleGroupsOnTree(lambdaAssignmentRules,
      sentence.getRootNode());

  // Composing lambda.
  Pair<String, List<LogicalExpression>> sentenceSemantics =
      TreeTransformer.composeSemantics(sentence.getRootNode(),
          relationPriorityRules.getRelationPriority(), logger, false);
  sent.addProperty(SentenceKeys.DEPLAMBDA_OBLIQUE_TREE,
      sentenceSemantics.first());
  if (sentenceSemantics.second().size() > 0) {
    sent.addProperty(SentenceKeys.DEPLAMBDA_EXPRESSION,
        sentenceSemantics.second().get(0).toString());
  }

  // Post processing lambdas.
  JsonArray jsonParses = new JsonArray();
  for (LogicalExpression parse : sentenceSemantics.second()) {
    List<String> cleaned =
        Lists.newArrayList(PostProcessLogicalForm.process(sentence, parse,
            lexicalizePredicates));

    // TODO: Better sorting function is required.
    Collections.sort(cleaned);
    jsonParses.add(jsonParser.parse(gson.toJson(cleaned)));
  }
  sent.add(SentenceKeys.DEPENDENCY_LAMBDA, jsonParses);
}
 
Example #23
Source File: ReliableManager.java    From spf with GNU General Public License v2.0 5 votes vote down vote up
private static List<Pair<String, String>> readConfigurationCommands(
		File file) throws FileNotFoundException, IOException {
	final List<Pair<String, String>> configurationCommands = new LinkedList<Pair<String, String>>();
	try (BufferedReader r = new BufferedReader(
			new InputStreamReader(new FileInputStream(file)))) {
		String line;
		while ((line = r.readLine()) != null) {
			final String[] split = line.split("\t", 2);
			configurationCommands.add(Pair.of(split[0], split[1]));
		}
	}
	return configurationCommands;
}
 
Example #24
Source File: RefControlFeatureSetTest.java    From amr with GNU General Public License v2.0 5 votes vote down vote up
@Test
public void test() {
	final LogicalExpression exp = TestServices.getCategoryServices()
			.readSemantics(
					"(a:<id,<<e,t>,e>> !1 (lambda $0:e (and:<t*,t> (want-01:<e,t> $0) "
							+ "(c_ARG0:<e,<e,t>> $0 (a:<id,<<e,t>,e>> !2 (lambda $0:e (i:<e,t> $0)))) "
							+ "(c_ARG1:<e,<e,t>> $0 (a:<id,<<e,t>,e>> !3 (lambda $0:e (and:<t*,t> (buy-01:<e,t> $0) "
							+ "(c_ARG0:<e,<e,t>> $0 (ref:<id,e> na:id)) "
							+ "(c_ARG1:<e,<e,t>> $0 (a:<id,<<e,t>,e>> !4 (lambda $0:e (ticket:<e,t> $0)))))))))))");

	final AssignmentGeneratorFactory factory = new AssignmentGeneratorFactory();
	final JointModel<SituatedSentence<AMRMeta>, LogicalExpression, LogicalExpression> model = new JointModel.Builder<SituatedSentence<AMRMeta>, LogicalExpression, LogicalExpression>()
			.build();
	model.getTheta().set("REFCTRL", "want-01", "c_ARG0", "c_ARG1", "c_ARG0",
			1.0);

	final Sentence sentence = new Sentence("I want to buy a ticket");
	final AMRMeta meta = new AMRMeta(sentence);
	final IJointDataItemModel<LogicalExpression, LogicalExpression> dim = model
			.createJointDataItemModel(
					new SituatedSentence<AMRMeta>(sentence, meta));

	final FactorGraph graph = CreateFactorGraph.of(exp, factory.create(exp),
			false);

	final List<Runnable> jobs = new RefControlFeatureSet()
			.createFactorJobs(graph, meta, dim);
	jobs.stream().forEach(r -> r.run());

	final Pair<List<EvaluationResult>, Boolean> results = BeamSearch
			.of(graph, 100);
	Assert.assertEquals(4, results.first().size());
	final EvaluationResult max = results.first().stream()
			.max((r1, r2) -> Double.compare(r1.getScore(), r2.getScore()))
			.get();
	final LogicalExpression expected = TestServices.getCategoryServices()
			.readSemantics(
					"(a:<id,<<e,t>,e>> !1 (lambda $0:e (and:<t*,t> (want-01:<e,t> $0) (c_ARG0:<e,<e,t>> $0 (a:<id,<<e,t>,e>> !2 (lambda $1:e (i:<e,t> $1)))) (c_ARG1:<e,<e,t>> $0 (a:<id,<<e,t>,e>> !3 (lambda $2:e (and:<t*,t> (buy-01:<e,t> $2) (c_ARG0:<e,<e,t>> $2 (ref:<id,e> !2)) (c_ARG1:<e,<e,t>> $2 (a:<id,<<e,t>,e>> !4 (lambda $3:e (ticket:<e,t> $3)))))))))))");
	Assert.assertEquals(expected, max.getResult());
}
 
Example #25
Source File: NamedEntitiesCandidates.java    From amr with GNU General Public License v2.0 5 votes vote down vote up
public static void main(String[] args) {
	try {
		// //////////////////////////////////////////
		// Init logging
		// //////////////////////////////////////////

		Logger.DEFAULT_LOG = new Log(System.err);
		Logger.setSkipPrefix(true);
		LogLevel.INFO.set();

		// //////////////////////////////////////////
		// Init AMR.
		// //////////////////////////////////////////

		Init.init(new File(args[0]), false);

		final LogicalConstant namePredicate = LogicalConstant
				.read("c_name:<e,<txt,t>>");

		final AMROntology ontology = AMROntology.read(new File(args[1]));

		for (final SingleSentence sentence : SingleSentenceCollection.read(
				new File(args[2]), new Tokenizer())) {
			System.out.println(sentence.getSample());
			for (final Pair<String, String> entityType : GetNamedEntities
					.of(sentence.getLabel(), namePredicate)) {
				if (entityType.second() == null
						|| !ontology.isType(entityType.second())) {
					System.out.println(entityType.first());
				} else {
					System.out.println(String.format("%s\t%s",
							entityType.first(), entityType.second()));
				}
			}
			System.out.println();
		}
	} catch (final IOException e) {
		throw new IllegalStateException(e);
	}
}
 
Example #26
Source File: AbstractAmrParser.java    From amr with GNU General Public License v2.0 5 votes vote down vote up
private static Pair<List<EvaluationResult>, Boolean> doFactorGraphDummyInference(
		FactorGraph graph) {
	// Get the logical expression at the base of the graph as
	// the evaluation result. This will include the Skolem IDs
	// and any closure that was applied.
	return Pair.of(ListUtils.createSingletonList(new EvaluationResult(0.0,
			HashVectorFactory.create(), graph.getRoot().getExpression())),
			true);
}
 
Example #27
Source File: AbstractAmrParserOutput.java    From amr with GNU General Public License v2.0 5 votes vote down vote up
public AbstractAmrParserOutput(
		List<Pair<BASEDERIV, FactorGraph>> derviationPairs,
		InferenceMethod inferenceMethod, long inferenceTime,
		List<DERIV> jointDerivations, boolean outputExact,
		IHashVectorImmutable theta) {
	assert InferenceMethod.LBP != inferenceMethod
			|| theta != null : "If using LBP for inference, theta must be provided to compute viterbi score for new derivation pairs";
	this.derviationPairs = derviationPairs;
	this.inferenceMethod = inferenceMethod;
	this.inferenceTime = inferenceTime;
	this.jointDerivations = jointDerivations;
	this.outputExact = outputExact;
	this.theta = theta;

	// Collect max scoring derivations. The score considered here is the
	// viterbi score of the derivation, as defined in AMRDerivation.
	final List<DERIV> maxScoringDerivations = new LinkedList<>();
	double max = -Double.MAX_VALUE;
	for (final DERIV derivation : jointDerivations) {
		if (derivation.getScore() == max) {
			maxScoringDerivations.add(derivation);
		} else if (derivation.getScore() > max) {
			maxScoringDerivations.clear();
			maxScoringDerivations.add(derivation);
			max = derivation.getScore();
		}
	}
	this.maxDerivations = Collections
			.unmodifiableList(maxScoringDerivations);

}
 
Example #28
Source File: FastTreeHashVector.java    From spf with GNU General Public License v2.0 5 votes vote down vote up
FastTreeHashVector(IHashVectorImmutable other) {
	if (other instanceof FastTreeHashVector) {
		this.values = new Object2DoubleAVLTreeMap<KeyArgs>(
				((FastTreeHashVector) other).values);
	} else {
		this.values = new Object2DoubleAVLTreeMap<KeyArgs>();
		for (final Pair<KeyArgs, Double> o : other) {
			values.put(o.first(), o.second());
		}
	}
	this.values.defaultReturnValue(ZERO_VALUE);
}
 
Example #29
Source File: AbstractAmrParser.java    From amr with GNU General Public License v2.0 5 votes vote down vote up
/**
 * Do LBP inference for the factor graph and extract the
 * {@link EvaluationResult}s.
 *
 *
 * @return Pair with list of {@link EvaluationResult} and an inference flag
 *         (if the flag is 'true', there were too many max-scoring
 *         evaluations and none were returned).
 */
private Pair<List<EvaluationResult>, Boolean> doLoopyBPInference(
		FactorGraph graph,
		IJointDataItemModel<LogicalExpression, LogicalExpression> model,
		boolean sloppyInference) {
	// Loopy BP inference
	LoopyBP.of(graph, bpConvergenceThreshold, bpMaxIterations, bpMaxTime);

	// Get the max configurations.
	final List<EvaluationResult> argmax = new LinkedList<>(
			GetMaxEvaluations.of(graph, maxLimit, model, sloppyInference));
	return Pair.of(argmax, !argmax.isEmpty());
}
 
Example #30
Source File: TypeRepository.java    From spf with GNU General Public License v2.0 5 votes vote down vote up
private ComplexType createComplexTypeFromString(String string) {
	// Case complex functional type
	final String innerString = string.substring(1, string.length() - 1)
			.trim();
	int i = 0;
	final StringBuilder domainStringBuilder = new StringBuilder();
	char c;
	int parenthesisCounter = 0;
	while (i < innerString.length()
			&& !((c = innerString.charAt(i)) == ComplexType.COMPLEX_TYPE_SEP && parenthesisCounter == 0)) {
		++i;
		domainStringBuilder.append(c);
		if (c == ComplexType.COMPLEX_TYPE_OPEN_PAREN) {
			++parenthesisCounter;
		} else if (c == ComplexType.COMPLEX_TYPE_CLOSE_PAREN) {
			--parenthesisCounter;
		}
	}
	++i;
	final String rangeString = innerString.substring(i).trim();
	final String domainString = domainStringBuilder.toString().trim();

	// Check if the domain indicates to a RecursiveComplexType, and if so
	// trim the indication to parse it and raise a flag
	final Pair<String, RecursiveComplexType.Option> prefixOption = RecursiveComplexType.Option
			.parse(domainString);
	final RecursiveComplexType.Option option = prefixOption.second();
	final String domainStringTrimmed = prefixOption.first();

	final Type domain = getTypeCreateIfNeeded(domainStringTrimmed);
	final Type range = getTypeCreateIfNeeded(rangeString);

	return ComplexType.create(string, domain, range, option);
}