Java Code Examples for org.apache.jena.atlas.logging.Log#warn()

The following examples show how to use org.apache.jena.atlas.logging.Log#warn() . You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: DataSourceDescription.java    From rdf-delta with Apache License 2.0 6 votes vote down vote up
public static DataSourceDescription fromJson(JsonObject obj) {
    String idStr = JSONX.getStrOrNull(obj, F_ID);
    if ( idStr == null )
        throw new DeltaException("Missing \"id:\" in DataSourceDescription JSON");
    
    String name = JSONX.getStrOrNull(obj, F_NAME);
    if ( name == null ) {
        @SuppressWarnings("deprecation")
        String n = JSONX.getStrOrNull(obj, F_BASE); 
        // Compatibility.
        Log.warn(DataSourceDescription.class, "Deprecated: Use of field name \"base\" - change to \"name\"");
        name = n;
    }
    if ( name == null )
        throw new DeltaException("Missing \"name:\" in DataSourceDescription JSON");
    
    String uri = JSONX.getStrOrNull(obj, F_URI);
    return new DataSourceDescription(Id.fromString(idStr), name, uri);
}
 
Example 2
Source File: DeltaAssembler.java    From rdf-delta with Apache License 2.0 6 votes vote down vote up
private static void UNUSED_CURRENTLY_forkUpdateFetcher(String source, DatasetGraph dsg) {
        if ( true ) {
            Log.warn(DeltaAssembler.class, "forkUpdateFetcher not set up");
            if ( true ) return;
            throw new NotImplemented("NEED THE STATE AREA; NEED THE DATASOURCE ID; NEED THE CLIENT ID");
        }
        DeltaLink dc = DeltaLinkHTTP.connect(source) ;
        DeltaConnection client = null;
        Runnable r = ()->{
            try { client.sync(); }
            catch (Exception ex) {
                Delta.DELTA_LOG.warn("Failed to sync with the change server: "+ex.getMessage()) ;
//                // Delay this task ; extra 3s + the time interval of 2s.
//                Dones not work as expected.
//                Lib.sleep(5*1000);
            }
        } ;
        ScheduledExecutorService executor = Executors.newScheduledThreadPool(1) ;
        executor.scheduleWithFixedDelay(r, 2, 2, TimeUnit.SECONDS) ;
    }
 
Example 3
Source File: PatchStorageS3.java    From rdf-delta with Apache License 2.0 5 votes vote down vote up
private Id keyToId(S3ObjectSummary summary) {
    String awsKey = summary.getKey();
    if ( ! awsKey.startsWith(prefix) ) {
        Log.warn(this, "Not a good object name: "+awsKey);
        return null;
    }
    String x = awsKey.substring(prefix.length());
    try {
        return Id.fromString(x);
    } catch ( IllegalArgumentException ex ) {
        Log.warn(this, "Not a S3 key for a patch id: "+awsKey);
        return null;
    }
}
 
Example 4
Source File: PatchStoreMgr.java    From rdf-delta with Apache License 2.0 5 votes vote down vote up
/** Unregister by provider name */
public static void unregister(Provider provider) {
    FmtLog.info(LOG, "Unregister patch store: %s", provider);
    if ( ! providers.containsKey(provider) )
        Log.warn(PatchStore.class, "Not registered: "+provider);
    PatchStoreProvider psp = providers.remove(provider);
}
 
Example 5
Source File: PatchLogIndexZk.java    From rdf-delta with Apache License 2.0 5 votes vote down vote up
private long versionFromName(String name) {
    try {
        return Long.parseLong(name);
    } catch (NumberFormatException ex) {
        Log.warn(this, "Attempt to extract the version from '"+name+"'");
        return -1;
    }
}
 
Example 6
Source File: RDFChangesManagedOutput.java    From rdf-delta with Apache License 2.0 5 votes vote down vote up
private void startOutput() {
    if ( currentStream != null ) {
        Log.warn(this, "Already writing");
        return;
    }
    currentStream = managedOutput.output();
    TokenWriter tokenWriter = new TokenWriterText(currentStream);
    super.tok = tokenWriter;
    Node id = N.unique();
    header(RDFPatchConst.ID, id);
    if ( previous != null )
        header(RDFPatchConst.PREV, previous);
    previous = id;
}
 
Example 7
Source File: SPARQLExtFunctionRegistry.java    From sparql-generate with Apache License 2.0 5 votes vote down vote up
/**
 * Insert a class that is the function implementation
 *
 * @param uri String URI
 * @param funcClass Class for the function (new instance called).
 */
@Override
public void put(String uri, Class<?> funcClass) {
    if (!Function.class.isAssignableFrom(funcClass)) {
        Log.warn(this, "Class " + funcClass.getName() + " is not a Function");
        return;
    }
    put(uri, new FunctionFactoryAuto(funcClass));
}
 
Example 8
Source File: PatchStoreZk.java    From rdf-delta with Apache License 2.0 4 votes vote down vote up
private PatchLog create$(DataSourceDescription dsd) {
    // This is called to create a log when the application asks but also when the
    // store watcher sees a new log elsewhere appearing.
    //
    // If the log does not exists, it creates it in Zookeeper.
    //
    // It implements "new DataSourceDescription"

    // XXX Split the roles, or add a flag.

    if ( patchLogs.containsKey(dsd.getName()) ) {
        FmtLog.debug(LOGZK, "[%d] Found to exist", instance);
        return patchLogs.get(dsd.getName());
    }

    String dsName = dsd.getName();
    if ( ! validateName(dsName) ) {
        String msg = String.format("Log name '%s' does not match regex '%s'", dsName, LogNameRegex);
        Log.warn(LOGZK, msg);
        throw new DeltaBadRequestException(msg);
    }
    // format

    String logPath = zkPath(ZkConst.pLogs, dsName);
    if ( ! zkExists(client, logPath) ) {
        FmtLog.debug(LOGZK, "[%d] Does not exist: format", instance);
        clusterLock(()->{

            // Someone else got in.
            if ( zkExists(client, logPath) ) {
                FmtLog.debug(LOGZK, "[%d] exists: skip format", instance);
                return;
            }
            formatPatchLog(client, dsd, logPath);

            // The watcher watch path ZkConst.pActiveLogs so adding a zNode
            // signals the log exists and is ready. It will trigger the watchers.
            // We are still inside the clusterLock and also the storeLock.
            // Our own watcher will wait until we leave storeLock, and then find
            // that "patchLogs.containsKey(dsd.getName())"
            String zkActiveLog = zkPath(ZkConst.pActiveLogs, dsName);
            JsonObject dsdJson = dsd.asJson();
            zkCreateSetJson(client, zkActiveLog, dsdJson);
            //FmtLog.debug(LOGZK, "[%d] format done", instance);
        }, null);
    }
    // Local storeLock still held.
    // create the local object.
    PatchLog patchLog = newPatchLogFromIndexAndStorage(dsd);
    patchLogs.put(dsName, patchLog);
    return patchLog;
}
 
Example 9
Source File: labelSearch.java    From xcurator with Apache License 2.0 4 votes vote down vote up
@Override
public QueryIterator exec(QueryIterator input, PropFuncArg argSubject, Node predicate, PropFuncArg argObject, ExecutionContext execCxt)
{
    // No real need to check the pattern arguments because
    // the replacement triple pattern and regex will cope
    // but we illustrate testing here.

    Node nodeVar = argSubject.getArg() ;
    String pattern = NodeUtils.stringLiteral(argObject.getArg()) ;
    if ( pattern == null )
    {
        Log.warn(this, "Pattern must be a plain literal or xsd:string: "+argObject.getArg()) ;
        return QueryIterNullIterator.create(execCxt) ;
    }

    if ( false )
        // Old (ARQ 1) way - not recommended.
        return buildSyntax(input, nodeVar, pattern, execCxt) ;
    
    // Better 
    // Build a SPARQL algebra expression
    Var var2 = createNewVar() ;                     // Hidden variable
    
    BasicPattern bp = new BasicPattern() ;
    Triple t = new Triple(nodeVar, RDFS.label.asNode(), var2) ;
    bp.add(t) ;
    OpBGP op = new OpBGP(bp) ;
    
    Expr regex = new E_Regex(new ExprVar(var2.getName()), pattern, "i") ;
    Op filter = OpFilter.filter(regex, op) ;

    // ---- Evaluation
    if ( true )
    {
        // Use the reference query engine
        // Create a table for the input stream (so it uses working memory at this point, 
        // which is why this is not the preferred way).  
        // Then join to expression for this stage.
        Table table = TableFactory.create(input) ;
        Op op2 = OpJoin.create(OpTable.create(table), filter) ;
        return Algebra.exec(op2, execCxt.getDataset()) ;
    }        
    
    // Use the default, optimizing query engine.
    return QC.execute(filter, input, execCxt) ;
}