Package com.ikanow.infinit.e.data_model.api

Examples of com.ikanow.infinit.e.data_model.api.ResponsePojo


  public Representation get( )
  {
 
     String data = "";
     MediaType mediaType = MediaType.APPLICATION_JSON;
     ResponsePojo rp = new ResponsePojo();

     cookieLookup = RESTTools.cookieLookup(cookie);
     if ( cookieLookup == null )
     {
       rp = new ResponsePojo();
       rp.setResponse(new ResponseObject("Cookie Lookup",false,"Cookie session expired or never existed, please login first"));
     }
     else {
       boolean validGroups = SocialUtils.validateCommunityIds(cookieLookup, communityIdStrList); //every call needs communityid so check now
       if ( validGroups == false )
       {
         rp = new ResponsePojo();
         rp.setResponse(new ResponseObject("Verifying Communities",false,"Community Ids are not valid for this user"));
         RESTTools.logRequest(this);
       }
       else
       {
         if ( action.equals("suggest"))
         {
           rp = this.search.getSuggestions(cookieLookup, term, communityIdStrList, wantGeo, wantLinkdata, wantNoAlias);
         }
         else if ( action.equals("suggestassoc"))
         {
           rp = this.search.getAssociationSuggestions(cookieLookup, ent1, verb, ent2, field, communityIdStrList, wantNoAlias);          
         }
         else if ( action.equals("suggestgeo"))
         {
           rp = this.search.getSuggestionsGeo(cookieLookup, term, communityIdStrList);          
         }
         else if ( action.equals("alias"))
         {
           //(OBSOLETE)
           rp = this.search.getAliasSuggestions(cookieLookup, term, field, communityIdStrList);
         }
       }
     } // (end if login valid)
     data = rp.toApi();

     return new StringRepresentation(data, mediaType);
  }
View Full Code Here


   
    // (NOTE CAN'T ACCESS "query" UNTIL AFTER 0.1 BECAUSE THAT CAN CHANGE IT)
   
    long nSysTime = (_nNow = System.currentTimeMillis());   
   
    ResponsePojo rp = new ResponsePojo();
   
    // communityIdList is CSV
    String[] communityIdStrs = SocialUtils.getCommunityIds(userIdStr, communityIdStrList);
   
    //(timing)
    long nQuerySetupTime = System.currentTimeMillis();
 
    ElasticSearchManager indexMgr = getIndexManager(communityIdStrs);
    SearchRequestBuilder searchSettings = indexMgr.getSearchOptions();

    StringBuffer querySummary = new StringBuffer();
    BaseQueryBuilder queryObj = null;
    InternalTempFilterInfo tempFilterInfo = null;
    try {
      queryObj = getBaseQuery(query, communityIdStrs, communityIdStrList, userIdStr, querySummary);
      if (null == queryObj) { // only occurs if has 1 element with ftext starting $cache:
        return getSavedQueryInstead(query.qt.get(0).ftext.substring(7), communityIdStrs, query); // (step over cache preamble)
      }
      tempFilterInfo = getBaseFilter(query, communityIdStrs);
    }
    catch (Exception e) {
      Globals.populateStackTrace(errorString, e);
      if (null != e.getCause()) {
        errorString.append("[CAUSE=").append(e.getCause().getMessage()).append("]");
        Globals.populateStackTrace(errorString, e.getCause());       
      }
      errorString.append(": " + e.getMessage());
      return null;
    }
   
    //DEBUG
    //querySummary.append(new Gson().toJson(query, AdvancedQueryPojo.class));
   
  // 0.4] Pre-Lucene Scoring
   
    // 0.4.1] General
   
    // Different options:
    //   a] Get the most recent N documents matching the query, score post-query
    //   b] Get the N highest (Lucene) scoring documents, incorporate significance post-query if desired
    // In both cases, N depends on whether significance calculation is taking place (and on the "skip" param)
   
    int nRecordsToOutput = query.output.docs.numReturn;
    int nRecordsToSkip = query.output.docs.skip;
    int nRecordsToGet = query.score.numAnalyze;

    final int nMAXRECORDSTOOUTPUT = 10000;
    final int nMAXRECORDSTOGET = 20000;
   
    // Some sanity checking on doc numbers:
    if (nRecordsToOutput > nMAXRECORDSTOOUTPUT) { // Upper limit...
      errorString.append(": Max # docs to return is 10000.");
      return null;
    }
    if (nRecordsToGet < nRecordsToOutput) {
      nRecordsToGet = nRecordsToOutput;
    }
    else if (nRecordsToGet > nMAXRECORDSTOGET) { // Upper limit...
      nRecordsToGet = nMAXRECORDSTOGET; // (we can do something sensible with this so carry on regardless)
    }
   
    boolean bUseSignificance = (query.score.sigWeight > 0.0);
    boolean bNeedExtraResultsForEnts =
      ((query.output.aggregation != null) && (query.output.aggregation.entsNumReturn != null) && (query.output.aggregation.entsNumReturn > 0))
        ||
      (query.output.docs.enable && (query.output.docs.numReturn > 0) && (query.output.docs.ents) && (query.score.scoreEnts));
   
    if (bUseSignificance || bNeedExtraResultsForEnts) {
     
      // Some logic taken from the original "knowledge/search"
      while ( (nRecordsToSkip + nRecordsToOutput > nRecordsToGet) && (nRecordsToGet <= nMAXRECORDSTOGET) )
      {
        nRecordsToGet += nRecordsToGet;
      }
      if (nRecordsToGet > nMAXRECORDSTOGET) {
        errorString.append(": Can only skip through to 20000 documents.");       
        return null;
      }
      searchSettings.setSize(nRecordsToGet);
     
      //TESTED
    }
    else if (query.output.docs.enable) { // In this case we just need the minimum number of records
      // (whether searching by date or by relevance)
      searchSettings.setFrom(nRecordsToSkip);
      nRecordsToSkip = 0; // (so it isn't double counted in the processing module)
      nRecordsToGet = nRecordsToOutput;
      searchSettings.setSize(nRecordsToGet);
      //TESTED
    }
    else { // In thise case we're just outputting aggregations, and not even ones that come from the docs
      nRecordsToGet = 0; // (use this variable everywhere where we care about bring docs back either to output or for suitable aggregation)
      searchSettings.setSize(0);
    }
   
    // Sort on score if relevance is being used   
   
    if (nRecordsToGet > 0) {
      if (query.score.relWeight > 0.0) { // (b) above
        // Using score is default, nothing to do
      }
      else { // (a) above
        // Debug code, if rel weight negative then use date to check Lucene score is better...
        if (query.score.relWeight < 0.0) {
          query.score.relWeight = -query.score.relWeight;
        }
        // Set Lucene to order:
        searchSettings.addSort(DocumentPojo.publishedDate_, SortOrder.DESC);
      }//TOTEST
    }//(if docs aren't enabled, don't need to worry about sorting)
   
    // 0.4.2] Prox scoring (needs to happen after [0.3]

    // Add proximity scoring:
    boolean bLowAccuracyDecay = false;
    if ((nRecordsToGet > 0) || (null == _scoringParams.adjustAggregateSig) || _scoringParams.adjustAggregateSig) {
      // (ie if we're getting docs or applying scores to entities)
     
      if (!_aggregationAccuracy.equals("full")) {
        bLowAccuracyDecay = true;
      }
      queryObj = addProximityBasedScoring(queryObj, searchSettings, query.score, tempFilterInfo.parentFilterObj, bLowAccuracyDecay);
     
      if (null == _scoringParams.adjustAggregateSig) { // auto-decide .. if ftext is set and is non-trivial
        if ((null != query.score.timeProx) || (null != query.score.geoProx)) {
            // (These are set to null above if badly formed)
          _scoringParams.adjustAggregateSig = true;         
        }
      }
     
    }// (else not worth the effort) 
   
    // 0.4.3] Source weightings (if any)
   
    queryObj = applyManualWeights(queryObj, query.score);
   
  // 0.5] Pre-lucene output options
   
    // only return the id field and score
    // (Both _id and score come back as default options, SearchHit:: getId and getScore, don't need anything else)

    // Facets
   
    // (These are needed for the case where we need to perform aggregations manually)
    Integer manualEntsNumReturn = null;
    Integer manualEventsNumReturn = null;
    Integer manualFactsNumReturn = null;
    Integer manualGeoNumReturn = null;
   
    //DEBUG
    //System.out.println(new Gson().toJson(query.output.aggregation));
   
    if ((null != query.output.aggregation) && (null != query.output.aggregation.raw)) { // Like query, specify raw aggregation (Facets)
      // Gross raw handling for facets
      if ((null != query.raw) && (null != query.raw.query)) {
        // Don't currently support raw query and raw facets because I can't work out how to apply
        // the override on group/source!
        errorString.append(": Not currently allowed raw query and raw facets");
        return null;
      }
      else { // Normal code
        searchSettings.setFacets(query.output.aggregation.raw.getBytes());
      }
    }
    else { // Apply various aggregation (=="facet") outputs to searchSettings
      boolean bSpecialCase = (null != query.raw) && (null != query.raw.query);
     
      if (!_aggregationAccuracy.equals("full")) {
        if (null != query.output.aggregation) {
          if (_aggregationAccuracy.equals("low")) {
            manualEntsNumReturn = query.output.aggregation.entsNumReturn;
            manualEventsNumReturn = query.output.aggregation.eventsNumReturn;
            manualFactsNumReturn = query.output.aggregation.factsNumReturn;
            manualGeoNumReturn = query.output.aggregation.geoNumReturn;
          }                   
          query.output.aggregation.entsNumReturn = null;
          query.output.aggregation.eventsNumReturn = null;
          query.output.aggregation.factsNumReturn = null;
          query.output.aggregation.geoNumReturn = null;
          // (allow time aggregation)
          // (allow source aggregation)
        }
      }
      AggregationUtils.parseOutputAggregation(query.output.aggregation, _aliasLookup,
                            tempFilterInfo.entityTypeFilterStrings, tempFilterInfo.assocVerbFilterStrings,
                              searchSettings, bSpecialCase?tempFilterInfo.parentFilterObj:null);

      // In partial accuracy case, restore aggregation
      if (null != manualEntsNumReturn) {
        query.output.aggregation.entsNumReturn = manualEntsNumReturn;
      }
      if (null != manualEventsNumReturn) {
        query.output.aggregation.eventsNumReturn = manualEventsNumReturn;
      }
      if (null != manualFactsNumReturn) {
        query.output.aggregation.factsNumReturn = manualFactsNumReturn;
      }
      if (null != manualGeoNumReturn) {
        query.output.aggregation.geoNumReturn = manualGeoNumReturn;
      }
      //TESTED
    }
    //TESTED x2     
   
    //(timing)
    nQuerySetupTime = System.currentTimeMillis() - nQuerySetupTime;
   
  // 0.6] Perform Lucene query
   
    // 0.6.1: query extensions: pre-query hook
    ArrayList<IQueryExtension> queryExtensions = null;
    if (null != _queryExtensions) {
      queryId = new ObjectId();
      queryExtensions = new ArrayList<IQueryExtension>(_queryExtensions.size());
      for (Class<IQueryExtension> queryExtensionClass: _queryExtensions) {
        // Don't catch any exceptions thrown here - let it bubble upwards
        IQueryExtension queryExtension = queryExtensionClass.newInstance();
        queryExtension.preQueryActivities(queryId, query, communityIdStrs);
        queryExtensions.add(queryExtension);
      }
    }//TESTED (see test.QueryExtensionsTestCode)
   
    // Built-in federated query engine ...
    if (null != _federatedQueryCache) {
      // 2 modes:
      // 1) If srcInclude is true(default) then check each source vs the table
      // 2) If srcInclude is false, or no sources specified, then check each community vs the table
     
      // 1:
      if ((null != query.input) && (null != query.input.sources) && ((null == query.input.srcInclude) || query.input.srcInclude))
      {
        for (String srcKey: query.input.sources) {
          FederatedQueryInMemoryCache fedQueryCacheEl = _federatedQueryCache.get(srcKey);
          if (null != fedQueryCacheEl) {
            if (null == this._builtInFederatedQueryEngine) {
              _builtInFederatedQueryEngine = new SimpleFederatedQueryEngine();
            }
            _builtInFederatedQueryEngine.addEndpoint(fedQueryCacheEl.source);
          }
        }
      }//TESTED (//TESTED (http://localhost:8184/knowledge/document/query/53ab42a2e4b04bcfe2de4387?qt[0].entity=%22garyhart.com/externaldomain%22&output.docs.numReturn=10&input.sources=inf...federated.externaldomain.&input.srcInclude=true))
     
      // 2:
      else { //Get federated queries from communities
        HashSet<String> excludeSrcs = null;
        for (String commIdStr: communityIdStrs) {
          FederatedQueryInMemoryCache fedQueryCacheEl = _federatedQueryCache.get(commIdStr);
          if (null != fedQueryCacheEl) {
           
            if ((null != query.input) && (null != query.input.sources)) { // (there are exclude sources)
              if (null == excludeSrcs) {
                excludeSrcs = new HashSet<String>(query.input.sources);
              }
            }//TESTED (http://localhost:8184/knowledge/document/query/53ab42a2e4b04bcfe2de4387?qt[0].entity=%22garyhart.com/externaldomain%22&output.docs.numReturn=10&input.sources=inf...federated.externaldomain.&input.srcInclude=false)
           
            for (Map.Entry<String, SourceFederatedQueryConfigPojo> fedQueryKV: fedQueryCacheEl.sources.entrySet()) {
              if ((null == excludeSrcs) || !excludeSrcs.contains(fedQueryKV.getKey())) {
                if (null == this._builtInFederatedQueryEngine) {
                  _builtInFederatedQueryEngine = new SimpleFederatedQueryEngine();
                }
                _builtInFederatedQueryEngine.addEndpoint(fedQueryKV.getValue());
              }
            }
          }
        }//TESTED (by hand)
      }
      if (null != _builtInFederatedQueryEngine) {
        _builtInFederatedQueryEngine.preQueryActivities(queryId, query, communityIdStrs);
      }
    }   
   
    // 0.6.2: the main query
    if ((null != query.explain) && query.explain) { // (for diagnostic - will return lucene explanation)
      searchSettings.setExplain(true);
    }
   
    SearchResponse queryResults = null;

    // (_source can now be enabled, so this is necessary to avoid returning it)
    searchSettings.addFields();
    if ((null != query.raw) && (null != query.raw.query))
    {
      // (Can bypass all other settings)       
      searchSettings.setQuery(query.raw.query);
      queryResults = indexMgr.doQuery(null, tempFilterInfo.parentFilterObj, searchSettings);
    }//TESTED '{ "raw": { "match_all": {} } }'
    else
    {
      // Where I can, use the source filter as part of the query so that
      // facets will apply to query+filter, not just filter
      queryObj = QueryBuilders.boolQuery().must(queryObj).must(QueryBuilders.constantScoreQuery(tempFilterInfo.parentFilterObj).boost(0.0F));
     
      queryResults = indexMgr.doQuery(queryObj, null, searchSettings);
    }//TESTED '{}' etc
   
    long nLuceneTime = queryResults.getTookInMillis();

  // 0.7] Lucene scores 
   
    long nProcTime = 0;
    long nProcTime_tmp = System.currentTimeMillis();
   
    StatisticsPojo stats = new StatisticsPojo();     
    stats.found = queryResults.getHits().getTotalHits();
        stats.start = (long)nRecordsToSkip;
       
    if (nRecordsToGet > 0) {
      stats.setScore(queryResults.getHits(), (null != query.score.geoProx)||(null != query.score.timeProx), (null != query.explain) && query.explain);
    }

    //DEBUG
    //System.out.println(new Gson().toJson(queryResults));
   
    nProcTime += (System.currentTimeMillis() - nProcTime_tmp);
   
  // 0.8] Get data from Mongo + handle scoring

    //(timing)
    long nMongoTime = System.currentTimeMillis();
    List<BasicDBObject> docs = null;
   
    //(aggregation)
    LinkedList<BasicDBObject> lowAccuracyAggregatedEntities = null; // (always low accuracy)
    LinkedList<BasicDBObject> standaloneEvents = null;
    LinkedList<BasicDBObject> lowAccuracyAggregatedEvents = null;
    LinkedList<BasicDBObject> lowAccuracyAggregatedFacts = null;
    AggregationUtils.GeoContainer lowAccuracyAggregatedGeo = null;
    AggregationUtils.GeoContainer extraAliasAggregatedGeo = null;
   
    ScoringUtils scoreStats = null;
    if (null != stats.getIds()) {

      DBCursor docs0 = this.getDocIds(DbManager.getDocument().getMetadata(), stats.getIds(), nRecordsToGet, query.output, query.score);
      nMongoTime = System.currentTimeMillis() - nMongoTime;
             
      nProcTime_tmp = System.currentTimeMillis();
     
      // Entity aggregation (CURRENTLY ALWAYS LOW AGGREGATION):
      if ((null != query.output.aggregation) && (null != query.output.aggregation.entsNumReturn) && (query.output.aggregation.entsNumReturn > 0)) {         
        lowAccuracyAggregatedEntities = new LinkedList<BasicDBObject>();
      }
     
      // Standalone events:
      if ((query.output.docs != null) && (query.output.docs.eventsTimeline != null) && query.output.docs.eventsTimeline) {
        standaloneEvents = new LinkedList<BasicDBObject>();
      }       
     
      // Low accuracy aggregations:
     
      if ((null != manualEventsNumReturn) && (manualEventsNumReturn > 0)) {
        lowAccuracyAggregatedEvents = new LinkedList<BasicDBObject>();
      }
      if ((null != manualFactsNumReturn) && (manualFactsNumReturn > 0)) {
        lowAccuracyAggregatedFacts = new LinkedList<BasicDBObject>();       
      }
     
      if ((null != manualGeoNumReturn) && (manualGeoNumReturn > 0)) {
        lowAccuracyAggregatedGeo = new AggregationUtils.GeoContainer();               
      }
      else if ((null != query.output.aggregation) && (null != query.output.aggregation.geoNumReturn) && (query.output.aggregation.geoNumReturn > 0))
      {
        // (only if not using low accuracy aggregation ... otherwise it all gets dumped in lowAccuracyAggregatedGeo)
        extraAliasAggregatedGeo = new AggregationUtils.GeoContainer();       
      }
     
      scoreStats = new ScoringUtils();
      try {
        boolean lockAcquired = true;
        try {
          lockAcquired = this.acquireConcurrentAccessLock();
         
        } catch (InterruptedException e) {
          //(that's fine just carry on)
          lockAcquired = false;
        }
        if (!lockAcquired) {
          rp.setResponse(new ResponseObject("Query", false, "Query engine busy, please try again later."));
          return rp;
        }
       
        scoreStats.setAliasLookupTable(_aliasLookup);
        docs = scoreStats.calcTFIDFAndFilter(DbManager.getDocument().getMetadata(),
                              docs0, query.score, query.output, stats, bLowAccuracyDecay,
                                nRecordsToSkip, nRecordsToOutput,
                                  communityIdStrs,
                                  tempFilterInfo.entityTypeFilterStrings, tempFilterInfo.assocVerbFilterStrings,
                                  standaloneEvents,
                                  lowAccuracyAggregatedEntities,
                                  lowAccuracyAggregatedGeo, extraAliasAggregatedGeo,
                                          lowAccuracyAggregatedEvents, lowAccuracyAggregatedFacts);
      }
      finally {
        scoreStats.clearAsMuchMemoryAsPossible();
        this.releaseConcurrentAccessLock();
      }     
      nProcTime += (System.currentTimeMillis() - nProcTime_tmp);
    }
    else {
      nMongoTime = 0;
    }
    //TESTED (all queries)
   
  // 0.9] Output:

    rp.setResponse(new ResponseObject("Query", true, querySummary.toString()));
   
    // 0.9.1] Stats:
    stats.resetArrays();
    rp.setStats(stats); // (only actually uses the response pojo, but get rid of big fields anyway...)

    // 0.9.2] Facets:

    if (null != lowAccuracyAggregatedEntities) { // Entity aggregation
      rp.setEntities(lowAccuracyAggregatedEntities);       
    }
    if (null != standaloneEvents) {
      rp.setEventsTimeline(standaloneEvents);
    }
    if (null != lowAccuracyAggregatedGeo) {
      rp.setGeo(lowAccuracyAggregatedGeo.geotags, (int)lowAccuracyAggregatedGeo.maxCount, (int)lowAccuracyAggregatedGeo.minCount);
    }
    if (null != lowAccuracyAggregatedEvents) {
      rp.setEvents(lowAccuracyAggregatedEvents);
    }
    if (null != lowAccuracyAggregatedFacts) {
      rp.setFacts(lowAccuracyAggregatedFacts);
    }
   
    if ((null != query.output.aggregation) && (null != query.output.aggregation.raw)) {
      rp.setFacets(queryResults.getFacets().facetsAsMap());
    }
    else if ((null != queryResults.getFacets()) && (null != queryResults.getFacets().getFacets())) { // "Logical" aggregation

      if (0.0 == query.score.sigWeight) {
        scoreStats = null; // (don't calculate event/fact aggregated significance if it's not wanted)
      }
      AggregationUtils.loadAggregationResults(rp, queryResults.getFacets().getFacets(), query.output.aggregation, scoreStats, _aliasLookup, tempFilterInfo.entityTypeFilterStrings, tempFilterInfo.assocVerbFilterStrings, extraAliasAggregatedGeo);
     
    } // (end facets not overwritten)     
   
    scoreStats = null; // (now definitely never need scoreStats)
   
    // 0.9.3] Documents
    if  (query.output.docs.enable) {
      if ((null != docs) && (docs.size() > 0)) {
        rp.setData(docs, (BasePojoApiMap<BasicDBObject>)null);
      }
      else { // (ensure there's always an empty list)
        docs = new ArrayList<BasicDBObject>(0);
        rp.setData(docs, (BasePojoApiMap<BasicDBObject>)null);
      }
    }
    else { // (ensure there's always an empty list)
      docs = new ArrayList<BasicDBObject>(0);
      rp.setData(docs, (BasePojoApiMap<BasicDBObject>)null);
    }
   
    // 0.9.4] query extensions: post-query hook
    if (null != queryExtensions) {
      for (IQueryExtension queryExtension: queryExtensions) {
        // Don't catch any exceptions thrown here - let it bubble upwards
        queryExtension.postQueryActivities(queryId, docs, rp);
      }
    }//TESTED (see test.QueryExtensionsTestCode)
   
    // (Built-in version)
    if (null != _builtInFederatedQueryEngine) {
      _builtInFederatedQueryEngine.postQueryActivities(queryId, docs, rp);
    }
   
    // 0.9.5] Timing/logging
   
    long nTotalTime = System.currentTimeMillis() - nSysTime;
    rp.getResponse().setTime(nTotalTime);
   
    _logMsg.setLength(0);
    _logMsg.append("knowledge/query querylen=").append(querySummary.length());
    _logMsg.append(" query=").append(querySummary.toString());
    _logMsg.append(" userid=").append(userIdStr);
View Full Code Here

   */
  public ResponsePojo getPerson(String id, Boolean isPublicRequest)
  {
    //TODO (INF-502): there is no public vs private currently
    if ( isPublicRequest )
      return new ResponsePojo(new ResponseObject("Person Info",false,"There is no public get person info call currently, coming soon!"));
   
   
    ResponsePojo rp = new ResponsePojo();
   
    try
    {
      // Set up the query
      PersonPojo personQuery = new PersonPojo();
      try {
        personQuery.set_id(new ObjectId(id));
      }
      catch (Exception e) { // Not an id, try email
        personQuery.setEmail(id);
      }
     
      BasicDBObject dbo = (BasicDBObject) DbManager.getSocial().getPerson().findOne(personQuery.toDb());
      PersonPojo person = PersonPojo.fromDb(dbo, PersonPojo.class);
     
      rp.setData(person, new PersonPojoApiMap());
      rp.setResponse(new ResponseObject("Person Info", true, "Person info returned successfully"))
    }
    catch (Exception e)
    {
      logger.error("Exception Message: " + e.getMessage(), e);
      rp.setResponse(new ResponseObject("Person Info", false, "Error returning person info: " + e.getMessage()
          + " - " + e.getStackTrace().toString()));
    }
    return rp;
 
View Full Code Here

    return rp;
 
 
  public ResponsePojo listPerson(String userId)
  {
    ResponsePojo rp = new ResponsePojo();
    try
    {
      PersonPojo person = SocialUtils.getPerson(userId)
      boolean isAdmin = RESTTools.adminLookup(userId);
      CommunityPojo system_comm = getSystemCommunity();
      List<ObjectId> communityIds = new ArrayList<ObjectId>();
      for ( PersonCommunityPojo community : person.getCommunities())
      {
        ObjectId comm_id = community.get_id();
        if ( allowedToSeeCommunityMembers(comm_id, isAdmin, system_comm) )
        {
          communityIds.add(comm_id);
        }
      }
      BasicDBObject query = new BasicDBObject();
      query.put("communities._id", new BasicDBObject( MongoDbManager.in_, communityIds ));
      DBCursor dbc = DbManager.getSocial().getPerson().find(query);
     
     
      if (dbc.count() > 0)
      {
        rp.setData(PersonPojo.listFromDb(dbc, PersonPojo.listType()), new PersonPojoApiMap());
        rp.setResponse(new ResponseObject("People List", true, "List returned successfully"));       
      }
      else
      {
        rp.setResponse(new ResponseObject("People List", true, "No list to return returned"))
      }
     
    }
    catch (Exception e)
    {
      logger.error("Exception Message: " + e.getMessage(), e);
      rp.setResponse(new ResponseObject("Person List", false, "Error returning person list: " + e.getMessage()
          + " - " + e.getStackTrace().toString()));
    }
    return rp;
  }
View Full Code Here

   * getAllPeople (REST, CURRENTLY UNUSED)
   * @return
   */
  public ResponsePojo getAllPeople()
  {
    ResponsePojo rp = new ResponsePojo();
    try
    {
      DBCursor dbc = DbManager.getSocial().getPerson().find();
     
      if (dbc.count() > 0)
      {
        rp.setData(PersonPojo.listFromDb(dbc, PersonPojo.listType()), new PersonPojoApiMap());
        rp.setResponse(new ResponseObject("People Info", true, "Info returned successfully"));       
      }
      else
      {
        rp.setResponse(new ResponseObject("People Info", true, "No info returned"))
      }
     
    }
    catch (Exception e)
    {
      logger.error("Exception Message: " + e.getMessage(), e);
      rp.setResponse(new ResponseObject("Person Info", false, "Error returning person info: " + e.getMessage()
          + " - " + e.getStackTrace().toString()));
    }
    return rp;
  }
View Full Code Here

   * @param wpauth
   * @return
   */
  public ResponsePojo registerWPUser(String cookieLookup, String wpuser, String wpauth, String wpsetup)
  {   
    ResponsePojo rp = new ResponsePojo();
   
    //Step 0 Read wordpress objects
    WordPressUserPojo wpu = null;
    WordPressAuthPojo wpa = null;
    if (null != wpsetup) {
      WordPressSetupPojo setup = WordPressSetupPojo.fromApi(wpsetup, WordPressSetupPojo.class);
      wpu = setup.getUser();
      wpa = setup.getAuth();
      if ((null == wpu) || (null == wpa)) {
        rp.setResponse(new ResponseObject("WP Register User",false,"Need to specify both user and auth objects"));
        return rp;
      }
    }//TESTED
    else {
      wpu = WordPressUserPojo.fromApi(wpuser,WordPressUserPojo.class);
      wpa = WordPressAuthPojo.fromApi(wpauth,WordPressAuthPojo.class);
    }
   
    //Step 1 Create the person object
    //NOTE we use to store subscription info (i.e. in the peoplepojo)
    //but it was never used anywhere (validating subscription?)
    //its in the WPUserPojo that comes across
    ObjectId profileId = new ObjectId();
    PersonPojo pp = new PersonPojo();
    pp.set_id(profileId);
    pp.setAccountStatus("active");
    if ((null == wpu.getEmail()) || (0 == wpu.getEmail().size())) {
      rp.setResponse(new ResponseObject("WP Register User",false,"Need to specify email"));
      return rp;
    }//TESTED (2c)
    pp.setEmail(wpu.getEmail().get(0));
    pp.setFirstName(wpu.getFirstname()); // (optional but one of this + last name must be set)
    pp.setLastName(wpu.getLastname()); // (optional but one of this + first name must be set)
    if ((null == wpu.getFirstname()) || wpu.getFirstname().isEmpty()){
      if (null == wpu.getLastname()) {
        rp.setResponse(new ResponseObject("WP Register User",false,"Need to specify one of firstname,lastname"));
        return rp;
      }
      pp.setDisplayName(wpu.getLastname());
    }//TESTED (2d)
    else if ((null == wpu.getLastname()) || wpu.getLastname().isEmpty()) {
      pp.setDisplayName(wpu.getFirstname());     
    }
    else {
      pp.setDisplayName(wpu.getFirstname() + " " + wpu.getLastname());           
    }
   
    // Check if user is already present (+set "primary keys"):
   
    if (null == wpu.getWPUserID()) { // WPUserId is optional, uses email if not present
      wpu.setWPUserID(pp.getEmail());
    }
    else { // Check WPU (+email later)
      PersonPojo personQuery = new PersonPojo();
      personQuery.setWPUserID(wpu.getWPUserID()); // (can be null, that's fine)           
      DBObject dboperson = DbManager.getSocial().getPerson().findOne(personQuery.toDb());
      if (null != dboperson) {
        rp.setResponse(new ResponseObject("WP Register User",false,"User already exists, both WPUserId and first email must be unique"));
        return rp;       
      }//TESTED (2e)
    }   
    pp.setWPUserID(wpu.getWPUserID());
   
    PersonPojo personQuery = new PersonPojo();
    personQuery.setEmail(pp.getEmail());
    DBObject dboperson = DbManager.getSocial().getPerson().findOne(personQuery.toDb());
    if (null != dboperson) {
      rp.setResponse(new ResponseObject("WP Register User",false,"User already exists, both WPUserId and first email must be unique"));
      return rp;       
    }//TESTED (2f)
   
    //(The rest of this code has not significantly changed)
   
    // Optional fields:
    pp.setPhone(wpu.getPhone());
    pp.setSubscriptionEndDate(wpu.getSubscriptionEndDate());
    pp.setSubscriptionID(wpu.getSubscriptionID());
    pp.setSubscriptionStartDate(wpu.getSubscriptionStartDate());
    pp.setSubscriptionTypeID(wpu.getSubscriptionTypeID());
   
    //Step 3 add communities to my list (self and system)
    List<PersonCommunityPojo> communities = new ArrayList<PersonCommunityPojo>();
    pp.setCommunities(communities);
   
    //these fields may need set one day
    pp.setAvatar(null);
    pp.setBiography(null);   
    pp.setContacts(null);
    pp.setLanguages(null);
    pp.setLinks(null);
    pp.setLocation(null);
    pp.setOrganization(null);
    pp.setTags(null);
    pp.setTitle(null);
    //end set of fields i didn't use
       
    //Step 4 Create the new auth object so user can login
    AuthenticationPojo ap = new AuthenticationPojo();
    ap.setId(profileId);
    ap.setProfileId(profileId);
    ap.setUsername(pp.getEmail());
    ap.setAccountStatus(AccountStatus.ACTIVE);
    if (null == wpa.getPassword()) { // Obligatory
      rp.setResponse(new ResponseObject("WP Register User",false,"Need to specify password"));
      return rp;
    }
    try
    {
      if (44 != wpa.getPassword().length()) { // hash if in the clear
        wpa.setPassword(PasswordEncryption.encrypt(wpa.getPassword()));
      }
      ap.setPassword(wpa.getPassword());
      if (null == wpa.getAccountType()) { // (optional, defaults to "user"
        wpa.setAccountType("user");
      }
      ap.setAccountType(wpa.getAccountType());
        // to create an account you must be admin, so this is fine....
     
      ap.setWPUserID(wpa.getWPUserID());   
       
      DateFormat df = new SimpleDateFormat("MMM dd, yyyy kk:mm:ss aa");
      //Handle copying dates from wordpress objects
      // (These are all optional, just use now if not specified)
      if (null == wpu.getCreated()) {
        pp.setCreated(new Date());       
      }
      else {
        pp.setCreated(df.parse(wpu.getCreated()));       
      }
      if (null == wpu.getModified()) {
        pp.setModified(new Date());       
      }
      else {
        pp.setModified(df.parse(wpu.getModified()));       
      }
      if (null == wpa.getCreated()) {
        ap.setCreated(new Date());       
      }
      else {
        ap.setCreated(df.parse(wpa.getCreated()));       
      }
      if (null == wpa.getModified()) {
        ap.setModified(new Date());       
      }
      else {
        ap.setModified(df.parse(wpa.getModified()));       
      }
      ap.setApiKey(wpa.getApiKey());
     
      //Step 5 Save all of these objects to the DB
      DbManager.getSocial().getPerson().insert(pp.toDb());
      DbManager.getSocial().getAuthentication().insert(ap.toDb());
     
      CommunityHandler cc = new CommunityHandler();
      cc.createSelfCommunity(pp); //add user to own community
     
      //try to get system
      BasicDBObject commQueryDbo = new BasicDBObject("isSystemCommunity", true);
        // (annoyingly can't use community pojo for queries because it has default fields)
      DBObject dbo = DbManager.getSocial().getCommunity().findOne(commQueryDbo);
      if (null != dbo) {
        CommunityPojo systemGroup = CommunityPojo.fromDb(dbo, CommunityPojo.class);
       
        //Add user to system community also
        cc.addCommunityMember(cookieLookup, systemGroup.getId().toString(), "Infinit.e System", pp.get_id().toString(),
            pp.getEmail(), pp.getDisplayName(), "member", "active", true);
      }               
      rp.setResponse(new ResponseObject("WP Register User",true,"User Registered Successfully"));
      rp.setData(ap, new AuthenticationPojoApiMap());
     
      // OK we're all good, finally for API key users create a persistent cookie:
      if (null != ap.getApiKey()) {
        // (if we're here then we're already admin so can always do this - unlike the update)
        CookiePojo cp = new CookiePojo();
        cp.set_id(profileId);
        cp.setCookieId(cp.get_id());
        cp.setApiKey(wpa.getApiKey());
        cp.setStartDate(ap.getCreated());
        cp.setProfileId(profileId);
        DbManager.getSocial().getCookies().save(cp.toDb());
      }//TOTEST
    }
    catch (Exception ex )
    {
      logger.error("Exception Message: " + ex.getMessage(), ex);
      rp.setResponse(new ResponseObject("WP Register User",false,"error while saving wp objects"));
    }
    return rp;
  }//TESTED
View Full Code Here

  //RETRIEVE
  @Get
  public Representation get()
  {   
    Date startTime = new Date();
    ResponsePojo rp = new ResponsePojo();
    if ( cookieLookup != null )
    {
      if ( id != null )
      {   
        //GET A SPECIFIC SHARE
        rp = shareController.getShare(cookieLookup, id, returnContent)
        SharePojo share = (SharePojo) rp.getData();
        if (null != share)
        {
          boolean bBinary = share.getType().equals("binary");
          if ( bBinary && returnContent )          
          {     
            try
            {              
              ByteArrayOutputRepresentation rep = new ByteArrayOutputRepresentation(MediaType.valueOf(share.getMediaType()));
              rep.setOutputBytes(share.getBinaryData());
              return rep;              
            }
            catch (Exception ex )
            {
              rp = new ResponsePojo(new ResponseObject(ACTION,false,"error converting bytes to output: " + ex.getMessage()));
            }            
          }
          else if (!bBinary && jsonOnly)
          {
            try
            {
              BasicDBObject dbo = (BasicDBObject) com.mongodb.util.JSON.parse(share.getShare());
              rp.setData(dbo, null);
            }
            catch (Exception e)
            { // Try a list instead           
              BasicDBList dbo = (BasicDBList) com.mongodb.util.JSON.parse(share.getShare());
              rp.setData(dbo, (BasePojoApiMap<BasicDBList>)null);                
            }
          }
        }
      }
      else
      {
        //SEARCH
        rp = this.shareController.searchShares(cookieLookup, searchby, searchids, type, skip, limit, ignoreAdmin, returnContent, searchParent);
      }
    }
    else
    {
      rp = new ResponsePojo(new ResponseObject("Cookie Lookup",false,"Cookie session expired or never existed, please login first"));
    }
   
    return returnRepresentation(rp, startTime);
  }
View Full Code Here

  //CREATE
  @Post
  public Representation post(Representation entity)  
  {
    ResponsePojo rp = new ResponsePojo();
    Date startTime = new Date();
   
    if ( cookieLookup != null )
    {
      SharePojo share = null;
      try
      {
        share = parseEntity(entity)
        share.set_id(null); //is create function, id can't exist
      }
      catch (Exception ex)
      {
        rp.setResponse(new ResponseObject(ACTION, false, ex.getMessage()));
        return returnRepresentation(rp, startTime);
      }
     
      String message = validateSharePojo(share, true);
      if ( message != null )
      {
        //failed to validate
        rp.setResponse(new ResponseObject(ACTION, false, message));
      }
      else
      {
        rp = this.shareController.createOrUpdateShare(cookieLookup, share, readWrite, returnContent);
     
    }
    else
    {
      rp = new ResponsePojo(new ResponseObject("Cookie Lookup",false,"Cookie session expired or never existed, please login first"));
    }
   
    return returnRepresentation(rp, startTime);
  }
View Full Code Here

  //UPDATE
  @Put
  public Representation put(Representation entity)
  {
    ResponsePojo rp = new ResponsePojo();
    Date startTime = new Date();
    if ( cookieLookup != null )
    {
      SharePojo share = null;
      try
      {
        share = parseEntity(entity);
      }
      catch (Exception ex)
      {
        rp.setResponse(new ResponseObject(ACTION, false, ex.getMessage()));
        return returnRepresentation(rp, startTime);
      }
     
      String message = validateSharePojo(share, false);
      if ( message != null )
      {
        //failed to validate
        rp.setResponse(new ResponseObject(ACTION, false, message));
      }
      else
      {
        rp = this.shareController.createOrUpdateShare(cookieLookup, share, readWrite, returnContent);
      }   
    }
    else
    {
      rp = new ResponsePojo(new ResponseObject("Cookie Lookup",false,"Cookie session expired or never existed, please login first"));
    }
   
    return returnRepresentation(rp, startTime);
  }
View Full Code Here

// 1.0] Stored queries/datasets

  // Saved queries (ie the entire dataset)
 
  private ResponsePojo getSavedQueryInstead(String storedQueryNameOrId, String[] communityIdStrs, AdvancedQueryPojo query) {
    ResponsePojo rp = null;
    ObjectId oid = null;
    BasicDBObject jobQuery = null;
    try {
      oid = new ObjectId(storedQueryNameOrId);
      jobQuery = new BasicDBObject(CustomMapReduceJobPojo._id_, oid);
    }
    catch (Exception e) {
      jobQuery = new BasicDBObject(CustomMapReduceJobPojo.jobtitle_, storedQueryNameOrId);     
    }
    CustomMapReduceJobPojo savedJob = CustomMapReduceJobPojo.fromDb(DbManager.getCustom().getLookup().findOne(jobQuery), CustomMapReduceJobPojo.class);
   
    if (null != savedJob) { // Is this even a saved job?
      if (null != savedJob.jarURL) {       
        savedJob = null;
      }
    }
    if (null != savedJob) { // Authorization
      boolean auth = false;
      String communityIdStrList = Arrays.toString(communityIdStrs);
      for (ObjectId commId: savedJob.communityIds) {
       
        if (communityIdStrList.contains(commId.toString())) {
          auth = true;
          break;
        }
      }
      if (!auth) {
        savedJob = null;
      }
      if (null == savedJob) {
        throw new RuntimeException("Can't find saved query, or is a custom job not a query, or authorization error");
      }
      // OK go get the results of the job
      DBCollection coll = DbManager.getCollection(savedJob.getOutputDatabase(), savedJob.outputCollection);
      BasicDBObject result = (BasicDBObject) coll.findOne(); // (at some point support multiple saved queries)
      if (null == result) {
        throw new RuntimeException("Saved query is empty");     
      }
      BasicDBObject apiResultToConvert = (BasicDBObject) result.get("value");
      if (null == apiResultToConvert) {
        throw new RuntimeException("Saved query has invalid format");     
      }
      rp = ResponsePojo.fromDb(apiResultToConvert);
    }
    else if (null != oid) { // Support new user/doc queues
      SharePojo share = SharePojo.fromDb(DbManager.getSocial().getShare().findOne(jobQuery), SharePojo.class);
      if ((null == share) || (null == share.getShare()) ||
          (!share.getType().equals(DocumentQueueControlPojo.UserQueue) && !share.getType().equals(DocumentQueueControlPojo.SavedQueryQueue))
          )
      {
        throw new RuntimeException("Can't find saved query, or is a custom job not a query, or authorization error");               
      }
      else { // share.share is a  DocumentQueueControlPojo
        DocumentQueueControlPojo queue = DocumentQueueControlPojo.fromApi(share.getShare(), DocumentQueueControlPojo.class);
        BasicDBObject docQuery1 = new BasicDBObject(DocumentPojo._id_, new BasicDBObject(DbManager.in_, queue.getQueueList()));
        BasicDBObject docQuery2 = new BasicDBObject(DocumentPojo.updateId_, new BasicDBObject(DbManager.in_, queue.getQueueList()));
        BasicDBObject docQuery = new BasicDBObject(DbManager.or_, Arrays.asList(docQuery1, docQuery2));
        DBCursor dbc = DbManager.getDocument().getMetadata().find(docQuery).limit(query.score.numAnalyze);
        ScoringUtils scoreStats = new ScoringUtils();
        List<BasicDBObject> docs = null;
        StatisticsPojo stats = new StatisticsPojo();
        stats.setSavedScores(query.output.docs.skip, dbc.count());
        try {
          boolean lockAcquired = true;
          try {
            lockAcquired = this.acquireConcurrentAccessLock();
           
          } catch (InterruptedException e) {
            //(that's fine just carry on)
            lockAcquired = false;
          }
          if (!lockAcquired) {
            rp.setResponse(new ResponseObject("Query", false, "Query engine busy, please try again later."));
            return rp;
          }
          scoreStats.setAliasLookupTable(_aliasLookup);
          docs = scoreStats.calcTFIDFAndFilter(DbManager.getDocument().getMetadata(),
                              dbc, query.score, query.output, stats, false,
                              query.output.docs.skip, query.output.docs.numReturn,
                                    communityIdStrs,
                                    null, null,
                                    null,
                                    null,
                                    null, null,
                                    null, null);
        }
        finally {
          scoreStats.clearAsMuchMemoryAsPossible();
          this.releaseConcurrentAccessLock();
        }
        rp = new ResponsePojo();
        rp.setResponse(new ResponseObject("Query", true, "Saved Query: " + share.getTitle()));
        rp.setStats(stats);
        if ((null != docs) && (docs.size() > 0)) {
          rp.setData(docs, (BasePojoApiMap<BasicDBObject>)null);
        }
        else { // (ensure there's always an empty list)
          docs = new ArrayList<BasicDBObject>(0);
          rp.setData(docs, (BasePojoApiMap<BasicDBObject>)null);
        }
      }//end if user or saved query queue
    }
    return rp;
   
View Full Code Here

TOP

Related Classes of com.ikanow.infinit.e.data_model.api.ResponsePojo

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.