Package com.findwise.hydra.mongodb

Examples of com.findwise.hydra.mongodb.MongoConnector


        if (cmd.getJarFile() != null) {
            File f = new File(cmd.getJarFile().getFilename());

            MongoDBConnectionConfig conf = new MongoDBConnectionConfig(cmd.getJarFile().getPipelinename(), cmd.getHost(), "", "");
            MongoConnector mdc =new MongoConnector(conf.getConfiguration());
            mdc.connect();

            log.info("Uploading jar file");
            File file = new File(cmd.getJarFile().getFilename());
            mdc.getPipelineWriter().save(cmd.getJarFile().getId(), file.getName(), new FileInputStream(f));
        }

        if (cmd.getConfig() != null) {
            PipelineConfiguration pipelineConfig = jsonReader.fromJson(cmd.getConfig());
            List<Stage> stages = stageFactory.createStages(pipelineConfig);

            MongoDBConnectionConfig conf = new MongoDBConnectionConfig(pipelineConfig.getPipelineName(), cmd.getHost(), "", "");
            MongoConnector mdc =new MongoConnector(conf.getConfiguration());
            mdc.connect();

            Pipeline pipeline = mdc.getPipelineReader().getPipeline();
            for (Stage stage : stages) {
                if (cmd.getStageNames() != null) {
                    if (cmd.getStageNames().contains(stage.getName())) {
                        log.info("Preparing to upload stage, " + stage.getName());
                        StageGroup g = new StageGroup(stage.getName());
                        g.addStage(stage);
                        pipeline.addGroup(g);
                    }
                } else {
                    log.info("Preparing to upload stage, " + stage.getName());
                    StageGroup g = new StageGroup(stage.getName());
                    g.addStage(stage);
                    pipeline.addGroup(g);
                }
            }
            log.info("Uploading stages");
            log.info(pipeline.toString());
            mdc.getPipelineWriter().write(pipeline);
        }
    }
View Full Code Here


    }
    if(cmd.hasOption("password")) {
      conf.setDatabasePassword(cmd.getOptionValue("password"));
    }
   
    MongoConnector mdc = new MongoConnector(conf);

    mdc.connect();
   

    if (cmd.hasOption("a")) {
      add(mdc, cmd);
    }
View Full Code Here

    logger.info("Hydra Core creating connector, {}='{}', {}='{}'",
        DatabaseConfiguration.DATABASE_URL_PARAM, coreConfiguration.getDatabaseUrl(),
        DatabaseConfiguration.DATABASE_NAMESPACE, coreConfiguration.getNamespace());
   
    DatabaseConnector<MongoType> backing = new MongoConnector(coreConfiguration);
    try {
      backing.connect();
    } catch (IOException e) {
      logger.error("Unable to start", e);
      return;
    }

    Cache<MongoType> cache;
    if (coreConfiguration.isCacheEnabled()) {
      cache = new MemoryCache<MongoType>();
    } else {
      cache = new NoopCache<MongoType>();
    }

    CachingDocumentNIO<MongoType> caching = new CachingDocumentNIO<MongoType>(
        backing,
        cache,
        coreConfiguration.isCacheEnabled(),
        coreConfiguration.getCacheTimeout());

    NodeMaster<MongoType> nm = new NodeMaster<MongoType>(
        coreConfiguration,
        caching,
        new Pipeline(),
        this);

    server = new RESTServer(coreConfiguration,
        new HttpRESTHandler<MongoType>(
            nm.getDocumentIO(),
            backing.getPipelineReader(),
            null,
            coreConfiguration.isPerformanceLogging()));

    if (!server.blockingStart()) {
      if (server.hasError()) {
View Full Code Here

  private DatabaseConfig databaseConfig;

  @Bean
  @Autowired
  public MongoConnector connector(DatabaseConfig config) {
    return new MongoConnector(config);
  }
View Full Code Here

  @Before
  public void setUp() throws Exception {
    mongoConfiguration = new MongoConfiguration();
    mongoConfiguration.setNamespace("hydra-test-FullScaleIT");
    mongoConnector = new MongoConnector(mongoConfiguration);
    mongoConnector.connect();

    mongoConnector.getDB().dropDatabase();

    // Because I don't trust MongoConnector after the database has been destroyed.
    mongoConnector = new MongoConnector(mongoConfiguration);
    mongoConnector.connect();

    // Initialize core, but don't start until test wants to.
    CoreConfiguration coreConfiguration = new CoreMapConfiguration(mongoConfiguration, new MapConfiguration());
    core = new Main(coreConfiguration);
View Full Code Here

  @BeforeClass
  public static void setUpBeforeClass() throws Exception {
    CoreConfiguration conf = ConfigurationFactory.getConfiguration(TEST_NAME);
    new MongoClient(new MongoClientURI(conf.getDatabaseUrl())).getDB(TEST_NAME).dropDatabase();
    dbc = new MongoConnector(conf);
   
    ShutdownHandler shutdownHandler = Mockito.mock(ShutdownHandler.class);
   
    Mockito.when(shutdownHandler.isShuttingDown()).thenReturn(false);
   
View Full Code Here

    MongoClient mongo = new MongoClient(new MongoClientURI(mongoConfiguration.getDatabaseUrl()));
    DB db = mongo.getDB(mongoConfiguration.getNamespace());
    WriteConcern concern = mongo.getWriteConcern();
    long documentsToKeep = mongoConfiguration.getOldMaxCount();
    int oldDocsMaxSizeMB = mongoConfiguration.getOldMaxSize();
    StatusUpdater updater = new StatusUpdater(new MongoConnector(mongoConfiguration));
    GridFS documentFs = new GridFS(db);

    MongoDocumentIO io = new MongoDocumentIO(db, concern, documentsToKeep,
      oldDocsMaxSizeMB, updater, documentFs);
    io.prepare();
View Full Code Here

public class RESTServerTest {

  @Test
  public void testBlockingStart() throws IOException, InterruptedException {
    CoreConfiguration conf = ConfigurationFactory.getConfiguration("jUnit-RESTServerTest");
    MongoConnector dbc = new MongoConnector(conf);
    int port = conf.getRestPort();
    RESTServer server1 = new RESTServer(conf, new HttpRESTHandler<MongoType>(dbc));
   
    if(!server1.blockingStart()) {
      server1 = RESTServer.getNewStartedRESTServer(port, new HttpRESTHandler<MongoType>(dbc));
    }
    System.out.println("Started server 1 on port "+port);
    RESTServer server2 = new RESTServer(conf, new HttpRESTHandler<MongoType>(dbc));
    if(server2.blockingStart()) {
      System.out.println("We are failing on port "+server2.getPort());
      Thread.sleep(1000);
      System.out.println("1 alive: "+server1.isAlive());
      System.out.println("2 alive: "+server2.isAlive());
      System.out.println("1 hasError: "+server1.hasError());
      System.out.println("2 hasError: "+server2.hasError());
      System.out.println("1 getError: "+server1.getError());
      System.out.println("2 getError: "+server2.getError());
      System.out.println("1 isExecuting: "+server1.isExecuting());
      System.out.println("2 isExecuting: "+server2.isExecuting());
     
      fail("blockingStart() returned true when port should already be taken");
    }
    server2 = RESTServer.getNewStartedRESTServer(port, new HttpRESTHandler<MongoType>(new MongoConnector(conf)));
    System.out.println("Restarted on port "+server2.getPort());
    server2 = RESTServer.getNewStartedRESTServer(port, new HttpRESTHandler<MongoType>(new MongoConnector(conf)));
    System.out.println("Restarted on port "+server2.getPort());
    server1.shutdown();
    server2.shutdown();
  }
View Full Code Here

  }

  @Test
  public void testShutdown() throws IOException, InterruptedException {
    CoreConfiguration conf = ConfigurationFactory.getConfiguration("jUnit-RESTServerTest");
    RESTServer server = RESTServer.getNewStartedRESTServer(conf.getRestPort(), new HttpRESTHandler<MongoType>(new MongoConnector(conf)));
    server.shutdown();
    Thread.sleep(1000);
    if(server.isAlive()) {
      fail("Thread should be dead");
    }
View Full Code Here

import com.findwise.hydra.mongodb.MongoConnector;

public class StraightPipelineSetup {
  public static void main(String[] args) throws Exception {
    MongoConnector mdc = new MongoConnector(new TestConfiguration());

    mdc.connect();
   
    Object outId = addFile(mdc, "hydra-out-jar-with-dependencies.jar");
    Object basicId = addFile(mdc, "basic-stages-jar-with-dependencies.jar");
   
   
   
   
    Pipeline c = new Pipeline();
    Stage s = getStage(c, basicId, "copyStage1", "stage.CopyStage");
    Map<String, Object> map = s.getProperties();
    map.put("map", getSingleMap("in", "out1"));
    s.setProperties(map);
   
    s = getStage(c, basicId, "copyStage2", "stage.CopyStage", "copyStage1");
    map = s.getProperties();
    map.put("map", getSingleMap("out1", "out2"));
    s.setProperties(map);
   
    s = getStage(c, basicId, "copyStage3", "stage.CopyStage", "copyStage2");
    map = s.getProperties();
    map.put("map", getSingleMap("out2", "out3"));
    s.setProperties(map);
   
    s = getStage(c, outId, "solrOutput", "output.solr.SolrOutputStage", "copyStage3");
    map = s.getProperties();
    HashMap<String, String> fieldMap = new HashMap<String, String>();
    fieldMap.put("out1", "out1_s");
    fieldMap.put("out2", "out2_s");
    fieldMap.put("out3", "out3_s");
    fieldMap.put("in", "in_s");
    fieldMap.put("id", "id");
    map.put("fieldMappings", fieldMap);
    map.put("solrDeployPath", "http://127.0.0.1:8983/solr");
    s.setProperties(map);
   
    mdc.getPipelineWriter().write(c);
   
    System.out.println("Posted your stages into Hydra");
   
  }
View Full Code Here

TOP

Related Classes of com.findwise.hydra.mongodb.MongoConnector

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.