Java Code Examples for org.joda.time.Interval#parse()

The following examples show how to use org.joda.time.Interval#parse() . You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: HistoDbCountAttributesTool.java    From ipst with Mozilla Public License 2.0 6 votes vote down vote up
@Override
public void run(CommandLine line, ToolRunningContext context) throws Exception {
    Interval interval = Interval.parse(line.getOptionValue("interval"));
    HistoDbHorizon horizon = HistoDbHorizon.SN;
    if (line.hasOption("horizon")) {
        horizon = HistoDbHorizon.valueOf(line.getOptionValue("horizon"));
    }
    OfflineConfig config = OfflineConfig.load();
    try (HistoDbClient histoDbClient = config.getHistoDbClientFactoryClass().newInstance().create(true)) {
        Set<HistoDbAttributeId> attributeIds = new LinkedHashSet<>(histoDbClient.listAttributes());
        HistoDbStats stats = histoDbClient.queryStats(attributeIds, interval, horizon, true);
        for (HistoDbAttributeId attributeId : attributeIds) {
            context.getOutputStream().println(attributeId + ";" + (int) stats.getValue(HistoDbStatsType.COUNT, attributeId, -1));
        }
    }
}
 
Example 2
Source File: HistoDbPrintForecastDiffTool.java    From ipst with Mozilla Public License 2.0 6 votes vote down vote up
@Override
public void run(CommandLine line, ToolRunningContext context) throws Exception {
    OfflineConfig config = OfflineConfig.load();
    try (HistoDbClient histoDbClient = config.getHistoDbClientFactoryClass().newInstance().create()) {
        Interval interval = Interval.parse(line.getOptionValue("interval"));
        try (Reader reader = new InputStreamReader(histoDbClient.queryCsv(
                HistoQueryType.forecastDiff,
                EnumSet.allOf(Country.class),
                EnumSet.of(HistoDbEquip.loads, HistoDbEquip.gen),
                EnumSet.of(HistoDbAttr.P),
                interval,
                HistoDbHorizon.DACF,
                false,
                false))) {
            CharStreams.copy(reader, context.getOutputStream());
        }
    }
}
 
Example 3
Source File: ForecastErrorsAnalysisParameters.java    From ipst with Mozilla Public License 2.0 6 votes vote down vote up
public static ForecastErrorsAnalysisParameters load() {
    ModuleConfig config = PlatformConfig.defaultConfig().getModuleConfig("fea-parameters");

    DateTime baseCaseDate = DateTime.parse(config.getStringProperty("baseCaseDate"));
    Interval histoInterval = Interval.parse(config.getStringProperty("histoInterval"));
    String feAnalysisId = config.getStringProperty("feAnalysisId");
    double ir = config.getDoubleProperty("ir");
    Integer flagPQ = config.getIntProperty("flagPQ");
    Integer method = config.getIntProperty("method");
    Integer nClusters = config.getIntProperty("nClusters");
    double percentileHistorical = config.getDoubleProperty("percentileHistorical");
    Integer modalityGaussian = config.getOptionalIntegerProperty("modalityGaussian").orElse(null);
    Integer outliers = config.getOptionalIntegerProperty("outliers").orElse(null);
    Integer conditionalSampling = config.getOptionalIntegerProperty("conditionalSampling").orElse(null);
    Integer nSamples = config.getIntProperty("nSamples");
    Set<Country> countries = config.getEnumSetProperty("countries", Country.class, DEFAULT_COUNTRIES);
    CaseType caseType = config.getEnumProperty("caseType", CaseType.class, DEFAULT_CASE_TYPE);
    boolean allInjections = config.getBooleanProperty("all-injections", DEFAULT_ALL_INJECTIONS);

    return new ForecastErrorsAnalysisParameters(baseCaseDate, histoInterval, feAnalysisId, ir, flagPQ, method, nClusters, percentileHistorical,
                                                modalityGaussian, outliers, conditionalSampling, nSamples, countries, caseType, allInjections);
}
 
Example 4
Source File: CreateFeaMat.java    From ipst with Mozilla Public License 2.0 6 votes vote down vote up
@Override
public void run(CommandLine line, ToolRunningContext context) throws Exception {
    Path caseFile = Paths.get(line.getOptionValue("case-file"));
    Path outputFolder = Paths.get(line.getOptionValue("output-folder"));
    Interval histoInterval = Interval.parse(line.getOptionValue("histo-interval"));

    if (Files.isRegularFile(caseFile)) {
        System.out.println("loading case " + caseFile);
        // load the network
        Network network = Importers.loadNetwork(caseFile);
        if (network == null) {
            throw new RuntimeException("Case '" + caseFile + "' not found");
        }
        network.getStateManager().allowStateMultiThreadAccess(true);
        createMat(network, histoInterval, outputFolder);
    } else {
        throw new RuntimeException("Case '" + caseFile + "' is not a valid basecase file");
    }
}
 
Example 5
Source File: ParseInterval.java    From super-csv with Apache License 2.0 6 votes vote down vote up
/**
 * {@inheritDoc}
 * 
 * @throws SuperCsvCellProcessorException
 *             if value is null or is not a String
 */
public Object execute(final Object value, final CsvContext context) {
	validateInputNotNull(value, context);
	if (!(value instanceof String)) {
		throw new SuperCsvCellProcessorException(String.class, value,
				context, this);
	}
	final Interval result;
	try {
		result = Interval.parse((String) value);
	} catch (IllegalArgumentException e) {
		throw new SuperCsvCellProcessorException(
				"Failed to parse value as an Interval", context, this, e);
	}
	return next.execute(result, context);
}
 
Example 6
Source File: WCAHistoLimitsTest.java    From ipst with Mozilla Public License 2.0 5 votes vote down vote up
@Test
public void testWrite() throws IOException, InterruptedException {
    Interval histoInterval = Interval.parse("2013-01-01T00:00:00+01:00/2013-01-31T23:59:00+01:00");

    Network network = NetworkTest1Factory.create();

    HistoDbClient histoDbClient = Mockito.mock(HistoDbClient.class);
    HistoDbStats histoDbStats = new HistoDbStats();
    histoDbStats.setValue(HistoDbStatsType.MIN, new HistoDbNetworkAttributeId(network.getLoads().iterator().next().getId(), HistoDbAttr.P), 0f);
    histoDbStats.setValue(HistoDbStatsType.MAX, new HistoDbNetworkAttributeId(network.getLoads().iterator().next().getId(), HistoDbAttr.P), 20f);
    histoDbStats.setValue(HistoDbStatsType.MIN, new HistoDbNetworkAttributeId(network.getGenerators().iterator().next().getId(), HistoDbAttr.P), 200f);
    histoDbStats.setValue(HistoDbStatsType.MAX, new HistoDbNetworkAttributeId(network.getGenerators().iterator().next().getId(), HistoDbAttr.P), 900f);
    Mockito.when(histoDbClient.queryStats(Matchers.anySet(), Matchers.eq(histoInterval), Matchers.eq(HistoDbHorizon.SN), Matchers.eq(true)))
           .thenReturn(histoDbStats);

    MemDataSource dataSource = new MemDataSource();

    StringToIntMapper<AmplSubset> mapper = new StringToIntMapper<>(AmplSubset.class);
    AmplUtil.fillMapper(mapper, network);

    WCAHistoLimits histoLimits = new WCAHistoLimits(histoInterval);
    histoLimits.load(network, histoDbClient);
    histoLimits.write(dataSource, mapper);

    String fileContent = String.join(System.lineSeparator(),
                                     "#loads historical data " + histoInterval,
                                     "#\"num\" \"min p (MW)\" \"max p (MW)\" \"id\"",
                                     "1 0.00000 20.0000 \""+ network.getLoads().iterator().next().getId() + "\"");
    assertEquals(fileContent, new String(dataSource.getData(WCAConstants.HISTO_LOADS_FILE_SUFFIX, WCAConstants.TXT_EXT), StandardCharsets.UTF_8).trim());

    fileContent = String.join(System.lineSeparator(),
                              "#generators historical data " + histoInterval,
                              "#\"num\" \"min p (MW)\" \"max p (MW)\" \"id\"",
                              "1 200.000 900.000 \""+ network.getGenerators().iterator().next().getId() + "\"");
    assertEquals(fileContent, new String(dataSource.getData(WCAConstants.HISTO_GENERATORS_FILE_SUFFIX, WCAConstants.TXT_EXT), StandardCharsets.UTF_8).trim());
}
 
Example 7
Source File: CheckSubstationUniqueToposTool.java    From ipst with Mozilla Public License 2.0 5 votes vote down vote up
@Override
public void run(CommandLine line, ToolRunningContext context) throws Exception {
    Path caseFile = Paths.get(line.getOptionValue("case-file"));
    Interval interval = Interval.parse(line.getOptionValue("interval"));
    Path dictFile = null;
    if (line.hasOption("use-short-ids-dict")) {
        dictFile = Paths.get(line.getOptionValue("use-short-ids-dict"));
    }
    double correlationThreshold = Double.parseDouble(line.getOptionValue("correlation-threshold"));
    double probabilityThreshold = Double.parseDouble(line.getOptionValue("probability-threshold"));

    Network network = Importers.loadNetwork(caseFile);
    if (network == null) {
        throw new RuntimeException("Case '" + caseFile + "' not found");
    }
    network.getStateManager().allowStateMultiThreadAccess(true);

    OfflineConfig config = OfflineConfig.load();
    try (TopologyMiner topologyMiner = config.getTopologyMinerFactoryClass().newInstance().create()) {
        Path topoCacheDir = TopologyContext.createTopoCacheDir(network, interval, correlationThreshold, probabilityThreshold);
        TopologyContext topologyContext = topologyMiner.loadContext(topoCacheDir, interval, correlationThreshold, probabilityThreshold);
        if (topologyContext == null) {
            throw new RuntimeException("Topology context not found");
        }
        ShortIdDictionary dict = null;
        if (dictFile != null) {
            dict = new ShortIdDictionary(dictFile);
        }
        new UniqueTopologyBuilder(topologyContext.getTopologyHistory(), dict)
                .build(network);
    }
}
 
Example 8
Source File: PrintSubstationUniqueTopoTool.java    From ipst with Mozilla Public License 2.0 5 votes vote down vote up
@Override
public void run(CommandLine line, ToolRunningContext context) throws Exception {
    Path caseFile = Paths.get(line.getOptionValue("case-file"));
    String substationId = line.getOptionValue("substation-id");
    Interval interval = Interval.parse(line.getOptionValue("interval"));
    Path dictFile = null;
    if (line.hasOption("use-short-ids-dict")) {
        dictFile = Paths.get(line.getOptionValue("use-short-ids-dict"));
    }
    double correlationThreshold = Double.parseDouble(line.getOptionValue("correlation-threshold"));
    double probabilityThreshold = Double.parseDouble(line.getOptionValue("probability-threshold"));

    Network network = Importers.loadNetwork(caseFile);
    if (network == null) {
        throw new RuntimeException("Case '" + caseFile + "' not found");
    }
    network.getStateManager().allowStateMultiThreadAccess(true);

    OfflineConfig config = OfflineConfig.load();
    try (TopologyMiner topologyMiner = config.getTopologyMinerFactoryClass().newInstance().create()) {
        Path topoCacheDir = TopologyContext.createTopoCacheDir(network, interval, correlationThreshold, probabilityThreshold);
        TopologyContext topologyContext = topologyMiner.loadContext(topoCacheDir, interval, correlationThreshold, probabilityThreshold);
        Map<String, UniqueTopology> uniqueTopologies = new UniqueTopologyBuilder(topologyContext.getTopologyHistory()).build();
        UniqueTopology uniqueTopology = uniqueTopologies.get(substationId);
        if (uniqueTopology == null) {
            throw new RuntimeException("Unique topology not found for substation " + substationId);
        }
        ShortIdDictionary dict = null;
        if (dictFile != null) {
            dict = new ShortIdDictionary(dictFile);
        }
        uniqueTopology.print(context.getOutputStream(), dict);
    }
}
 
Example 9
Source File: DataService.java    From SensorWebClient with GNU General Public License v2.0 5 votes vote down vote up
protected DesignOptions createDesignOptions(ParameterSet parameterSet,
                                            ArrayList<TimeseriesProperties> props,
                                            boolean renderGrid) {
    Interval timespan = Interval.parse(parameterSet.getTimespan());
    long begin = timespan.getStartMillis();
    long end = timespan.getEndMillis();
    return new DesignOptions(props, begin, end, renderGrid);
}
 
Example 10
Source File: HistoDbPrintAttributesTool.java    From ipst with Mozilla Public License 2.0 5 votes vote down vote up
@Override
public void run(CommandLine line, ToolRunningContext context) throws Exception {
    OfflineConfig config = OfflineConfig.load();
    try (HistoDbClient histoDbClient = config.getHistoDbClientFactoryClass().newInstance().create()) {
        boolean statistics = line.hasOption("statistics");
        Set<HistoDbAttributeId> attrs = new LinkedHashSet<>();
        if (!statistics && line.hasOption("add-datetime")) {
            attrs.add(HistoDbMetaAttributeId.datetime);
        }
        for (String str : line.getOptionValue("attributes").split(",")) {
            attrs.add(HistoDbAttributeIdParser.parse(str));
        }
        Interval interval = Interval.parse(line.getOptionValue("interval"));
        boolean format = line.hasOption("format");
        HistoDbHorizon horizon = HistoDbHorizon.SN;
        if (line.hasOption("horizon")) {
            horizon = HistoDbHorizon.valueOf(line.getOptionValue("horizon"));
        }
        boolean async = false;
        boolean zipped = false;
        InputStream is = histoDbClient.queryCsv(statistics ? HistoQueryType.stats : HistoQueryType.data, attrs, interval, horizon, zipped, async);
        if (format) {
            format(is, zipped, context.getOutputStream());
        } else {
            try (Reader reader = createReader(is, zipped)) {
                CharStreams.copy(reader, context.getOutputStream());
            }
        }
    }
}
 
Example 11
Source File: RuleDbDummyClientTest.java    From ipst with Mozilla Public License 2.0 5 votes vote down vote up
@Test
public void testmethods() throws IOException, InterruptedException {
    Interval interval = Interval.parse("2013-01-14T00:00:00+01:00/2013-01-14T01:00:00+01:00");
    String rulesDbName = "DUMMYRULEDBNAMW";
    String wfId = "DUMMYWORKFLOWID";
    String contingencyId = "DUMMYCONTINGENCYID";

    RulesDbClient ruleDbClient = new RulesDbClientTestFactoryImpl().create(rulesDbName);
    assertEquals(ruleDbClient.listWorkflows(), Collections.emptyList());
    assertEquals(ruleDbClient.getRules(wfId, RuleAttributeSet.WORST_CASE, contingencyId, SecurityIndexType.TSO_OVERLOAD), Collections.emptyList());
    assertEquals(ruleDbClient.listRules(wfId, RuleAttributeSet.WORST_CASE), Collections.emptyList());
}
 
Example 12
Source File: Adapters.java    From activitystreams with Apache License 2.0 4 votes vote down vote up
public Interval apply(String v) {
  return Interval.parse(v);
}
 
Example 13
Source File: IntervalAdapter.java    From powsybl-core with Mozilla Public License 2.0 4 votes vote down vote up
@Override
public Interval unmarshal(String v) throws Exception {
    return Interval.parse(v);
}
 
Example 14
Source File: PrintOnlineWorkflowSummaryTable.java    From ipst with Mozilla Public License 2.0 4 votes vote down vote up
@Override
public void run(CommandLine line, ToolRunningContext context) throws Exception {
    OnlineConfig config = OnlineConfig.load();
    try (OnlineDb onlinedb = config.getOnlineDbFactoryClass().newInstance().create()) {
        List<String> workflowsIds = new ArrayList<String>();
        if (line.hasOption("workflow")) {
            workflowsIds.add(line.getOptionValue("workflow"));
        } else if (line.hasOption("workflows")) {
            workflowsIds = Arrays.asList(line.getOptionValue("workflows").split(","));
        } else if (line.hasOption("basecase")) {
            DateTime basecaseDate = DateTime.parse(line.getOptionValue("basecase"));
            workflowsIds = onlinedb.listWorkflows(basecaseDate).stream().map(OnlineWorkflowDetails::getWorkflowId).collect(Collectors.toList());
        } else if (line.hasOption("basecases-interval")) {
            Interval basecasesInterval = Interval.parse(line.getOptionValue("basecases-interval"));
            workflowsIds = onlinedb.listWorkflows(basecasesInterval).stream().map(OnlineWorkflowDetails::getWorkflowId).collect(Collectors.toList());
        } else {
            context.getErrorStream().println("You must specify workflow(s) or basecase(s)");
            return;
        }
        TableFormatterConfig tableFormatterConfig = TableFormatterConfig.load();
        try (TableFormatter formatter = PrintOnlineWorkflowUtils.createFormatter(tableFormatterConfig,
                (line.hasOption("output-format")) ? line.getOptionValue("output-format") : "ascii",
                (line.hasOption("output-file")) ? Paths.get(line.getOptionValue("output-file")) : null,
                TABLE_TITLE,
                new Column("WorkflowId"),
                new Column("Basecase"),
                new Column("Contingency"),
                new Column("State"),
                new Column("FailureStep"),
                new Column("FailureDescription"),
                new Column("ViolationType"),
                new Column("Violation"),
                new Column("ViolationStep"),
                new Column("Equipment"),
                new Column("Value"),
                new Column("Limit"))) {

            workflowsIds.sort((o1, o2) -> o1.compareTo(o2));
            workflowsIds.forEach(workflowId -> {
                Network basecase = onlinedb.getState(workflowId, 0);
                String basecaseId = basecase.getId();
                printPrecontingencyViolations(workflowId, basecaseId, onlinedb, formatter);
                printContingenciesViolations(workflowId, basecaseId, onlinedb, formatter);
            });
        }
    }
}
 
Example 15
Source File: IntervalConstant.java    From FROST-Server with GNU Lesser General Public License v3.0 4 votes vote down vote up
public IntervalConstant(String value) {
    super(Interval.parse(value));
}
 
Example 16
Source File: RunForecastErrorsAnalysisMpiTool.java    From ipst with Mozilla Public License 2.0 4 votes vote down vote up
@Override
public void run(CommandLine line, ToolRunningContext context) throws Exception {

    OnlineWorkflowStartParameters startconfig = OnlineWorkflowStartParameters.loadDefault();

    String host = line.getOptionValue(OnlineWorkflowCommand.HOST);
    String port = line.getOptionValue(OnlineWorkflowCommand.PORT);
    String threads = line.getOptionValue(OnlineWorkflowCommand.THREADS);
    if (host != null) {
        startconfig.setJmxHost(host);
    }
    if (port != null) {
        startconfig.setJmxPort(Integer.valueOf(port));
    }
    if (threads != null) {
        startconfig.setThreads(Integer.valueOf(threads));
    }

    String analysisId = line.getOptionValue("analysis");
    DateTime baseCaseDate = line.hasOption("base-case-date")
            ? DateTime.parse(line.getOptionValue("base-case-date"))
            : getDefaultParameters().getBaseCaseDate();
    Interval histoInterval = line.hasOption("history-interval")
            ? Interval.parse(line.getOptionValue("history-interval"))
            : getDefaultParameters().getHistoInterval();
    double ir = line.hasOption("ir")
            ? Double.parseDouble(line.getOptionValue("ir"))
            : getDefaultParameters().getIr();
    int flagPQ = line.hasOption("flagPQ")
            ? Integer.parseInt(line.getOptionValue("flagPQ"))
            : getDefaultParameters().getFlagPQ();
    int method = line.hasOption("method")
            ? Integer.parseInt(line.getOptionValue("method"))
            : getDefaultParameters().getMethod();
    Integer nClusters = line.hasOption("nClusters")
            ? Integer.parseInt(line.getOptionValue("nClusters"))
            : getDefaultParameters().getnClusters();
    double percentileHistorical = line.hasOption("percentileHistorical")
            ? Double.parseDouble(line.getOptionValue("percentileHistorical"))
            : getDefaultParameters().getPercentileHistorical();
    Integer modalityGaussian = line.hasOption("modalityGaussian")
            ? Integer.parseInt(line.getOptionValue("modalityGaussian"))
            : getDefaultParameters().getModalityGaussian();
    Integer outliers = line.hasOption("outliers")
            ? Integer.parseInt(line.getOptionValue("outliers"))
            : getDefaultParameters().getOutliers();
    Integer conditionalSampling = line.hasOption("conditionalSampling")
            ? Integer.parseInt(line.getOptionValue("conditionalSampling"))
            : getDefaultParameters().getConditionalSampling();
    Integer nSamples = line.hasOption("nSamples")
            ? Integer.parseInt(line.getOptionValue("nSamples"))
            : getDefaultParameters().getnSamples();
    Set<Country> countries = line.hasOption("countries")
            ? Arrays.stream(line.getOptionValue("countries").split(",")).map(Country::valueOf).collect(Collectors.toSet())
            : getDefaultParameters().getCountries();
    CaseType caseType = line.hasOption("case-type")
            ? CaseType.valueOf(line.getOptionValue("case-type"))
            : getDefaultParameters().getCaseType();
    boolean allInjections = line.hasOption("all-injections")
            ? true
            : getDefaultParameters().isAllInjections();

    ForecastErrorsAnalysisParameters parameters = new ForecastErrorsAnalysisParameters(baseCaseDate, histoInterval, analysisId, ir, flagPQ, method, nClusters,
                                                                                       percentileHistorical, modalityGaussian, outliers, conditionalSampling,
                                                                                       nSamples, countries, caseType, allInjections);


    String urlString = "service:jmx:rmi:///jndi/rmi://" + startconfig.getJmxHost() + ":" + startconfig.getJmxPort() + "/jmxrmi";

    JMXServiceURL serviceURL = new JMXServiceURL(urlString);
    Map<String, String> jmxEnv = new HashMap<>();
    JMXConnector connector = JMXConnectorFactory.connect(serviceURL, jmxEnv);
    MBeanServerConnection mbsc = connector.getMBeanServerConnection();

    ObjectName name = new ObjectName(LocalOnlineApplicationMBean.BEAN_NAME);
    LocalOnlineApplicationMBean application = MBeanServerInvocationHandler.newProxyInstance(mbsc, name, LocalOnlineApplicationMBean.class, false);
    String timeHorizonS = "";
    if (line.hasOption("time-horizon")) {
        timeHorizonS = line.getOptionValue("time-horizon");
    }
    application.runFeaAnalysis(startconfig, parameters, timeHorizonS);

}
 
Example 17
Source File: OnlineDbMVStore.java    From ipst with Mozilla Public License 2.0 4 votes vote down vote up
@Override
public OnlineWorkflowParameters getWorkflowParameters(String workflowId) {
    Objects.requireNonNull(workflowId, "workflow id is null");
    LOGGER.info("Getting configuration parameters of wf {}", workflowId);
    if (isWorkflowStored(workflowId)) {
        MVStore wfMVStore = null;
        try {
            wfMVStore = isStoreOpen(workflowId) ? getStore(workflowId) : openStore(workflowId);
            if (wfMVStore.hasMap(STORED_PARAMETERS_MAP_NAME)) {
                MVMap<String, String> storedParametersMap = wfMVStore.openMap(STORED_PARAMETERS_MAP_NAME, mapBuilder);
                DateTime baseCaseDate = DateTime.parse(storedParametersMap.get(STORED_PARAMETERS_BASECASE_KEY));
                int states = Integer.parseInt(storedParametersMap.get(STORED_PARAMETERS_STATE_NUMBER_KEY));
                String offlineWorkflowId = storedParametersMap.get(STORED_PARAMETERS_OFFLINE_WF_ID_KEY);
                TimeHorizon timeHorizon = TimeHorizon.fromName(storedParametersMap.get(STORED_RESULTS_TIMEHORIZON_KEY));
                Interval histoInterval = Interval.parse(storedParametersMap.get(STORED_PARAMETERS_HISTO_INTERVAL_KEY));
                String feAnalysisId = storedParametersMap.get(STORED_PARAMETERS_FEA_ID_KEY);
                double rulesPurityThreshold = Double.parseDouble((storedParametersMap.get(STORED_PARAMETERS_RULES_PURITY_KEY) == null) ? "1" : storedParametersMap.get(STORED_PARAMETERS_RULES_PURITY_KEY));
                boolean storeStates = Boolean.parseBoolean(storedParametersMap.get(STORED_PARAMETERS_STORE_STATES_KEY));
                boolean analyseBasecase = Boolean.parseBoolean(storedParametersMap.get(STORED_PARAMETERS_ANALYSE_BASECASE_KEY));
                boolean validation = Boolean.parseBoolean(storedParametersMap.get(STORED_PARAMETERS_VALIDATION_KEY));
                Set<SecurityIndexType> securityIndexes = null;
                if (storedParametersMap.containsKey(STORED_PARAMETERS_SECURITY_INDEXES_KEY)) {
                    securityIndexes = OnlineDbMVStoreUtils.jsonToIndexesTypes(storedParametersMap.get(STORED_PARAMETERS_SECURITY_INDEXES_KEY));
                }
                CaseType caseType = CaseType.valueOf(storedParametersMap.get(STORED_PARAMETERS_CASE_TYPE_KEY));
                Set<Country> countries = OnlineDbMVStoreUtils.jsonToCountries(storedParametersMap.get(STORED_PARAMETERS_COUNTRIES_KEY));
                boolean mergeOptimized = OnlineWorkflowParameters.DEFAULT_MERGE_OPTIMIZED;
                if (storedParametersMap.containsKey(STORED_PARAMETERS_MERGE_OPTIMIZED_KEY)) {
                    mergeOptimized = Boolean.parseBoolean(storedParametersMap.get(STORED_PARAMETERS_MERGE_OPTIMIZED_KEY));
                }
                float limitReduction = OnlineWorkflowParameters.DEFAULT_LIMIT_REDUCTION;
                if (storedParametersMap.containsKey(STORED_PARAMETERS_LIMIT_REDUCTION_KEY)) {
                    limitReduction = Float.parseFloat(storedParametersMap.get(STORED_PARAMETERS_LIMIT_REDUCTION_KEY));
                }
                boolean handleViolations = OnlineWorkflowParameters.DEFAULT_HANDLE_VIOLATIONS_IN_N;
                if (storedParametersMap.containsKey(STORED_PARAMETERS_HANDLE_VIOLATIONS_KEY)) {
                    handleViolations = Boolean.parseBoolean(storedParametersMap.get(STORED_PARAMETERS_HANDLE_VIOLATIONS_KEY));
                }
                float constraintMargin = OnlineWorkflowParameters.DEFAULT_CONSTRAINT_MARGIN;
                if (storedParametersMap.containsKey(STORED_PARAMETERS_CONSTRAINT_MARGIN_KEY)) {
                    constraintMargin = Float.parseFloat(storedParametersMap.get(STORED_PARAMETERS_CONSTRAINT_MARGIN_KEY));
                }
                OnlineWorkflowParameters onlineWfPars = new OnlineWorkflowParameters(baseCaseDate,
                        states,
                        histoInterval,
                        offlineWorkflowId,
                        timeHorizon,
                        feAnalysisId,
                        rulesPurityThreshold,
                        storeStates,
                        analyseBasecase,
                        validation,
                        securityIndexes,
                        caseType,
                        countries,
                        mergeOptimized,
                        limitReduction,
                        handleViolations,
                        constraintMargin);
                if (storedParametersMap.containsKey(STORED_PARAMETERS_CASE_FILE_KEY)) {
                    onlineWfPars.setCaseFile(storedParametersMap.get(STORED_PARAMETERS_CASE_FILE_KEY));
                }
                return onlineWfPars;
            } else {
                LOGGER.warn("No configuration parameters of wf {} stored in online db", workflowId);
                return null;
            }
        } finally {
            if (wfMVStore != null && !isStoreOpen(workflowId)) {
                wfMVStore.close();
            }
        }
    } else {
        LOGGER.warn("No data about wf {}", workflowId);
        return null;
    }
}
 
Example 18
Source File: UncertaintiesAnalysisTool.java    From ipst with Mozilla Public License 2.0 4 votes vote down vote up
@Override
public void run(CommandLine line, ToolRunningContext context) throws Exception {
    Path caseFile = Paths.get(line.getOptionValue("case-file"));
    Path outputDir = Paths.get(line.getOptionValue("output-dir"));
    Interval histoInterval = Interval.parse(line.getOptionValue("history-interval"));

    try (ComputationManager computationManager = new LocalComputationManager()) {

        System.out.println("loading case file: " + caseFile);

        // load the network

        if (Files.isRegularFile(caseFile)) {
            // load the network
            Network network = Importers.loadNetwork(caseFile);
            if (network == null) {
                throw new RuntimeException("Case '" + caseFile + "' not found");
            }

            try (HistoDbClient histoDbClient = new HistoDbClientImpl(HistoDbConfig.load(), new HistoDbCacheImpl())) {

                System.out.println("compute uncertainties...");

                Uncertainties uncertainties = new UncertaintiesAnalyserImpl(network, histoDbClient, computationManager)
                        .analyse(histoInterval).join();

                StringToIntMapper<AmplSubset> mapper = AmplUtil.createMapper(network);
                DataSource ds = new FileDataSource(outputDir, caseFile.getFileName().toString(), new DefaultDataSourceObserver() {
                    @Override
                    public void opened(String streamName) {
                        System.out.println("writing " + streamName);
                    }
                });

                new UncertaintiesAmplWriter(uncertainties, ds, mapper).write();
            }
        } else {
            throw new RuntimeException("Not a regular file '" + caseFile + "'");
        }
    }
}
 
Example 19
Source File: FEAHistoDBFacadeTest.java    From ipst with Mozilla Public License 2.0 4 votes vote down vote up
@Test
public void testHistoricalDataToCsvFile() throws Exception {
    List<String> generatorsIds = Arrays.asList("generator1", "generator2");
    List<String> loadsIds = Arrays.asList("load1", "load2", "load3");
    Interval histoInterval = Interval.parse("2013-01-01T00:00:00+01:00/2013-01-31T23:59:00+01:00");
    
    String csvContent = String.join(System.lineSeparator(),
                                    String.join(",", 
                                                "datetime",
                                                "horizon",
                                                "forecastTime",
                                                generatorsIds.stream().map(generatorId -> String.join(",", generatorId + "_P", generatorId + "_Q")).collect(Collectors.joining(",")),
                                                loadsIds.stream().map(loadId -> String.join(",", loadId + "_P", loadId + "_Q")).collect(Collectors.joining(","))),
                                    String.join(",", 
                                                "Fri 01 Jan 2013 00:00:00 GMT","720","DACF",
                                                "0.1","-0.1","0.2","-0.2",
                                                "0.1","-0.1","0.2","-0.2","0.3","-0.3"),
                                    String.join(",", 
                                                "Fri 01 Jan 2013 00:00:00 GMT","0","SN",
                                                "0.11","-0.11","0.21","-0.21",
                                                "0.11","-0.11","0.21","-0.21","0.31","-0.31"));

    HistoDbClient histoDbClient = Mockito.mock(HistoDbClient.class);
    Mockito.when(histoDbClient.queryCsv(Matchers.eq(HistoQueryType.forecastDiff), 
                                        Matchers.any(), 
                                        Matchers.eq(histoInterval), 
                                        Matchers.eq(HistoDbHorizon.DACF), 
                                        Matchers.eq(false),
                                        Matchers.eq(false)))
           .thenReturn(new ByteArrayInputStream(csvContent.getBytes()));
    
    
    String feaCsvFileName = "forecasterrors_historicaldata.csv";
    Path workingDir = Files.createDirectory(fileSystem.getPath("/working-dir"));
    
    Path historicalDataCsvFile = workingDir.resolve(feaCsvFileName);
    FEAHistoDBFacade.historicalDataToCsvFile(histoDbClient,
                                             generatorsIds,
                                             loadsIds,
                                             histoInterval,
                                             historicalDataCsvFile);
    
    assertTrue(Files.exists(historicalDataCsvFile));
    try (InputStream expectedStream = new ByteArrayInputStream(csvContent.getBytes());
         InputStream actualStream = Files.newInputStream(historicalDataCsvFile)) {
        assertTrue(IOUtils.contentEquals(expectedStream, actualStream));
    }
    
}
 
Example 20
Source File: OnlineWorkflowParameters.java    From ipst with Mozilla Public License 2.0 4 votes vote down vote up
public static OnlineWorkflowParameters loadDefault() {
    ModuleConfig config = PlatformConfig.defaultConfig().getModuleConfig("online-default-parameters");

    int states = config.getIntProperty("states");
    String offlineWorkflowId = config.getStringProperty("offlineWorkflowId", null);
    TimeHorizon timeHorizon = TimeHorizon.fromName(config.getStringProperty("timeHorizon").trim());
    Interval histoInterval = Interval.parse(config.getStringProperty("histoInterval"));
    String feAnalysisId = config.getStringProperty("feAnalysisId");
    double rulesPurityThreshold = Double.parseDouble(config.getStringProperty("rulesPurityThreshold"));
    boolean storeStates = config.getBooleanProperty("storeStates", false);
    boolean analyseBasecase = config.getBooleanProperty("analyseBasecase", true);
    boolean validation = config.getBooleanProperty("validation", false);
    Set<SecurityIndexType> securityIndexes = config.getEnumSetProperty("securityIndexes", SecurityIndexType.class, null);
    boolean mergeOptimized = config.getBooleanProperty("mergeOptimized", DEFAULT_MERGE_OPTIMIZED);
    float limitReduction = config.getFloatProperty("limitReduction", DEFAULT_LIMIT_REDUCTION);
    boolean handleViolationsInN = config.getBooleanProperty("handleViolationsInN", DEFAULT_HANDLE_VIOLATIONS_IN_N);
    float constraintMargin = config.getFloatProperty("constraintMargin", DEFAULT_CONSTRAINT_MARGIN);

    String caseFile = config.getStringProperty("caseFile", null);
    if (caseFile != null) {
        if ((config.getStringProperty("baseCaseDate", null) != null)
                || (config.getStringProperty("caseType", null) != null)
                || (config.getStringProperty("countries", null) != null)) {
            throw new RuntimeException("caseFile and ( baseCaseDate, caseType, countries ) are mutually exclusive options");
        }
        return new OnlineWorkflowParameters(states,
                histoInterval,
                offlineWorkflowId,
                timeHorizon,
                feAnalysisId,
                rulesPurityThreshold,
                storeStates,
                analyseBasecase,
                validation,
                securityIndexes,
                mergeOptimized,
                limitReduction,
                handleViolationsInN,
                constraintMargin,
                caseFile);
    }
    DateTime baseCaseDate = DateTime.parse(config.getStringProperty("baseCaseDate"));
    CaseType caseType = config.getEnumProperty("caseType", CaseType.class);
    Set<Country> countries = config.getEnumSetProperty("countries", Country.class);
    return new OnlineWorkflowParameters(baseCaseDate,
            states,
            histoInterval,
            offlineWorkflowId,
            timeHorizon,
            feAnalysisId,
            rulesPurityThreshold,
            storeStates,
            analyseBasecase,
            validation,
            securityIndexes,
            caseType,
            countries,
            mergeOptimized,
            limitReduction,
            handleViolationsInN,
            constraintMargin);
}