Package com.mongodb

Examples of com.mongodb.BasicDBList


            } else {
                // Fill in the metadata from the available node metadata

                // Get the IP address
                if (cMetadata.containsField("publicAddresses")) {
                    BasicDBList publicAddresses = (BasicDBList) cMetadata.get("publicAddresses");

                    // TODO: How do we want to handle multiple IP addresses?
                    if (publicAddresses.size() > 0) {
                        civMetadata.put("targetIP", publicAddresses.get(0));
                    }
                }

                // Get location information (ISO 3166 code, region and availability zone)
                if (cMetadata.containsField("location") && cMetadata.get("location") != null) {
                    BasicDBObject location = (BasicDBObject) cMetadata.get("location");
                    boolean regionProcessed = false;
                    boolean zoneProcessed = false;

                    while (location != null) {
                        if (regionProcessed && zoneProcessed) {
                            break;
                        }

                        String locationScope = location.containsField("scope") ? location.getString("scope") : null;

                        if (locationScope != null) {
                            LocationScope scope = LocationScope.valueOf(locationScope);

                            switch (scope) {
                                case REGION:
                                    civMetadata.put("targetRegion", location.get("id"));
                                    regionProcessed = true;
                                    break;
                                case ZONE:
                                    BasicDBList iso3166Codes = (BasicDBList) location.get("iso3166Codes");

                                    civMetadata.put("targetISO3166Code", iso3166Codes.get(0));
                                    civMetadata.put("targetZone", location.get("id"));
                                    zoneProcessed = true;
                                    break;
                            }
                        }
View Full Code Here


        testProcess("function8");
    }
    */

    private void testProcess(String testFunction) throws Exception {
        BasicDBList testData = getTestData(testFunction);
        Iterator<Object> iter = testData.iterator();
        Map<String, JuggaloaderStreamState> states = new HashMap<>();
        Values previousValues = null;
        for (int i = 0; iter.hasNext(); i++) {
            BasicDBObject dbObject = (BasicDBObject) iter.next();
            dbObject.put("testFunction", testFunction);
View Full Code Here

        dateTime = dt;
        return ts;
    }

    private BasicDBList getTestData(String testFunction) throws Exception {
        BasicDBList dbList = (BasicDBList) JSON.parse(
            JSONUtils.readJSONFromClasspath(String.format("/com/streamreduce/storm/bolts/JuggaloaderTimeBaseTest-%s.json", testFunction))
        );
        assert dbList != null;
        return dbList;
    }
View Full Code Here

    private void validateEvent(Event event, JSONObject json) {
        Map<String, Object> eventMetadata = event.getMetadata();

        // Validate the hashtags
        if (json.containsKey("hashtags")) {
            BasicDBList targetHashtags = (BasicDBList)eventMetadata.get("targetHashtags");
            JSONArray messageHashtags = json.getJSONArray("hashtags");
            Set<String> expectedHashtags = new HashSet<>();

            for (Object rawHashtag : targetHashtags) {
                expectedHashtags.add(rawHashtag.toString());
            }

            // Account for the inherited '#custom' hashtag from the target
            assertEquals(messageHashtags.size() + 1, targetHashtags.size());

            // Ensure all message hashtags are in the targetHashtags
            for (Object rawHashtag : messageHashtags) {
                assertTrue(expectedHashtags.contains(rawHashtag.toString()));
            }
View Full Code Here

        this.mongoObject.append(field, value.unwrap());
    }

    @Override
    public void set(final String field, final Object[] values) {
        final BasicDBList list = new BasicDBList();
        Collections.addAll(list, values);
        this.mongoObject.append(field, list);
    }
View Full Code Here

        this.mongoObject.append(field, list);
    }

    @Override
    public void set(final String field, final NoSQLObject<BasicDBObject>[] values) {
        final BasicDBList list = new BasicDBList();
        for (final NoSQLObject<BasicDBObject> value : values) {
            list.add(value.unwrap());
        }
        this.mongoObject.append(field, list);
    }
View Full Code Here

    }
  }
 
  public static BasicDBList getBsonFromTextFiles(CustomMapReduceJobPojo cmr, int nLimit, String fields) throws IOException, SAXException, ParserConfigurationException {
   
    BasicDBList dbl = new BasicDBList();

    PropertiesManager props = new PropertiesManager();
    Configuration conf = getConfiguration(props);   
   
    Path pathDir = HadoopUtils.getPathForJob(cmr, conf, false);
    FileSystem fs = FileSystem.get(conf);
   
    FileStatus[] files = fs.globStatus(new Path(pathDir.toString() + "/part-*"));
    for(FileStatus file:files) {
      if(file.getLen() > 0){
        FSDataInputStream in = fs.open(file.getPath());
        BufferedReader bin = new BufferedReader(new InputStreamReader(in));
        for(;;) {
          String s = bin.readLine();
          if (null == s) break;
         
          String[] keyValue = s.split("\t", 2);
          BasicDBObject dbo = new BasicDBObject();
          if (keyValue.length > 1) {
            dbo.put("key", keyValue[0]);
            dbo.put("value", keyValue[1]);
          }
          else {
            dbo.put("value", keyValue[0]);             
          }
          dbl.add(dbo);
        }
        in.close();
      }
    }
    return dbl;
View Full Code Here

    return dbl;
  }//TESTED

  public static BasicDBList getBsonFromSequenceFile(CustomMapReduceJobPojo cmr, int nLimit, String fields) throws SAXException, IOException, ParserConfigurationException {

    BasicDBList dbl = new BasicDBList();
   
    PropertiesManager props = new PropertiesManager();
    Configuration conf = getConfiguration(props);   
   
    Path pathDir = HadoopUtils.getPathForJob(cmr, conf, false);
   
    @SuppressWarnings({ "unchecked", "rawtypes" })
    SequenceFileDirIterable<? extends Writable, ? extends Writable> seqFileDir =
      new SequenceFileDirIterable(pathDir, PathType.LIST, PathFilters.logsCRCFilter(), conf);


    // Very basic, only allow top level, 1 level of nesting, and field removal
    HashSet<String> fieldLookup = null;
    if (null != fields) {
      fieldLookup = new HashSet<String>();
      String[] fieldArray = fields.split(",");
      for (String field: fieldArray) {
        String[] fieldDecomp = field.split(":");
        fieldLookup.add(fieldDecomp[0]);
      }
    }//TOTEST
   
    int nRecords = 0;
    for (Pair<? extends Writable, ? extends Writable> record: seqFileDir) {
      BasicDBObject element = new BasicDBObject();
     
      // KEY
     
      Writable key = record.getFirst();
      if (key instanceof org.apache.hadoop.io.Text) {
        org.apache.hadoop.io.Text writable = (org.apache.hadoop.io.Text)key;
        element.put("key", writable.toString());                               
      }
      else if (key instanceof org.apache.hadoop.io.DoubleWritable) {
        org.apache.hadoop.io.DoubleWritable writable = (org.apache.hadoop.io.DoubleWritable)key;
        element.put("key", Double.toString(writable.get()));               
      }
      else if (key instanceof org.apache.hadoop.io.IntWritable) {
        org.apache.hadoop.io.IntWritable writable = (org.apache.hadoop.io.IntWritable)key;
        element.put("key", Integer.toString(writable.get()));       
      }
      else if (key instanceof org.apache.hadoop.io.LongWritable) {
        org.apache.hadoop.io.LongWritable writable = (org.apache.hadoop.io.LongWritable)key;
        element.put("key", Long.toString(writable.get()));
      }
      else if (key instanceof BSONWritable) {
        element.put("key", MongoDbUtil.convert((BSONWritable)key));
      }
     
      // VALUE

      Writable value = record.getSecond();
      if (value instanceof org.apache.hadoop.io.Text) {
        org.apache.hadoop.io.Text writable = (org.apache.hadoop.io.Text)value;
        element.put("value", writable.toString());                               
      }
      else if (value instanceof org.apache.hadoop.io.DoubleWritable) {
        org.apache.hadoop.io.DoubleWritable writable = (org.apache.hadoop.io.DoubleWritable)value;
        element.put("value", Double.toString(writable.get()));               
      }
      else if (value instanceof org.apache.hadoop.io.IntWritable) {
        org.apache.hadoop.io.IntWritable writable = (org.apache.hadoop.io.IntWritable)value;
        element.put("value", Integer.toString(writable.get()));       
      }
      else if (value instanceof org.apache.hadoop.io.LongWritable) {
        org.apache.hadoop.io.LongWritable writable = (org.apache.hadoop.io.LongWritable)value;
        element.put("value", Long.toString(writable.get()));
      }
      else if (value instanceof BSONWritable) {
        element.put("value", MongoDbUtil.convert((BSONWritable)value));
      }
      else if (value instanceof org.apache.mahout.math.VectorWritable) {
        Vector vec = ((org.apache.mahout.math.VectorWritable)value).get();
        BasicDBList dbl2 = listFromMahoutVector(vec, "value", element);
        element.put("value", dbl2);         
      }
      else if (value instanceof org.apache.mahout.clustering.classify.WeightedVectorWritable) {
        org.apache.mahout.clustering.classify.WeightedVectorWritable vecW = (org.apache.mahout.clustering.classify.WeightedVectorWritable)value;
        element.put("valueWeight", vecW.getWeight());
        BasicDBList dbl2 = listFromMahoutVector(vecW.getVector(), "value", element);
        element.put("value", dbl2);         
      }
      else if (value instanceof org.apache.mahout.clustering.iterator.ClusterWritable) {
        Cluster cluster = ((org.apache.mahout.clustering.iterator.ClusterWritable)value).getValue();
        BasicDBObject clusterVal = new BasicDBObject();
View Full Code Here

 
  private static BasicDBList listFromMahoutVector(Vector vec, String prefix, BasicDBObject element) {
    if (vec instanceof NamedVector) {
      element.put(prefix + "Name", ((NamedVector)vec).getName());
    }
    BasicDBList dbl2 = new BasicDBList();
    if (vec.isDense()) {
      int nSize = vec.size();
      dbl2.ensureCapacity(nSize);
      for (int i = 0; i < nSize; ++i) {
        dbl2.add(vec.getQuick(i));           
      }
    }
    else { // sparse, write as a set in the format [{int:double}]
      Iterator<org.apache.mahout.math.Vector.Element> elIt = vec.iterateNonZero();
      while (elIt.hasNext()) {
        BasicDBObject el2 = new BasicDBObject();
        org.apache.mahout.math.Vector.Element el = elIt.next();
        el2.put("k", el.index());
        el2.put("v", el.get());
        dbl2.add(el2);
      }
    }
    return dbl2;
  }
View Full Code Here

      query = "{}";
    DBObject dbo = null;
    try
    {
      dbo = (DBObject) com.mongodb.util.JSON.parse(query);
      BasicDBList dbl = (BasicDBList)dbo;
      //is a list
      if ( querySpec == QuerySpec.QUERY )
      {
        return dbl.get(0).toString();
      }
      else if ( querySpec == QuerySpec.POSTPROC )
      {
        if ( dbl.size() > 1 ) {
          if (null == dbl.get(1)) // (only query and fields are specified)
            return null;
          else
            return dbl.get(1).toString();
        }
        else
          return null;
      }
      else if ( querySpec == QuerySpec.INPUTFIELDS )
      {
        if ( dbl.size() > 2 )
          return dbl.get(2).toString();
        else
          return null;
      }
      else
        return null;
View Full Code Here

TOP

Related Classes of com.mongodb.BasicDBList

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.