Package org.apache.hadoop.gateway.topology

Examples of org.apache.hadoop.gateway.topology.Topology


    public List<Property> properties() {
        return properties;
    }

    public Topology build() {
        Topology topology = new Topology();
        PropertyInterpreter propertyInterpreter = new PropertyInterpreter(topology);
        for (Property property : properties) {
            try {
                propertyInterpreter.interpret(property.getName(), property.getValue());
            } catch (InterpretException ie) {
View Full Code Here


  private static Topology loadTopology( File file ) throws IOException, SAXException, URISyntaxException, InterruptedException {
    final long TIMEOUT = 250; //ms
    final long DELAY = 50; //ms
    log.loadingTopologyFile( file.getAbsolutePath() );
    Topology topology;
    long start = System.currentTimeMillis();
    while( true ) {
      try {
        topology = loadTopologyAttempt( file );
        break;
View Full Code Here

    }
    return topology;
  }

  private static Topology loadTopologyAttempt( File file ) throws IOException, SAXException, URISyntaxException {
    Topology topology;Digester digester = digesterLoader.newDigester();
    TopologyBuilder topologyBuilder = digester.parse( FileUtils.openInputStream( file ) );
    topology = topologyBuilder.build();
    topology.setUri( file.toURI() );
    topology.setName( FilenameUtils.removeExtension( file.getName() ) );
    topology.setTimestamp( file.lastModified() );
    return topology;
  }
View Full Code Here

      }
    }
    // Go through the new topologies and figure out what was updated vs added.
    for( File file : newTopologies.keySet() ) {
      if( oldTopologies.containsKey( file ) ) {
        Topology oldTopology = oldTopologies.get( file );
        Topology newTopology = newTopologies.get( file );
        if( newTopology.getTimestamp() > oldTopology.getTimestamp() ) {
          events.add( new TopologyEvent( TopologyEvent.Type.UPDATED, newTopologies.get( file ) ) );
        }
      } else {
        events.add( new TopologyEvent( TopologyEvent.Type.CREATED, newTopologies.get( file ) ) );
      }
View Full Code Here

  @Test
  public void testBuildSuccessfulForTopologyProperty() {
    PropertyTopologyBuilder propertyTopologyBuilder = new PropertyTopologyBuilder();
    propertyTopologyBuilder.addProperty( new Property( "topology.name", "topology" ) );
    Topology topology = propertyTopologyBuilder.build();

    assertThat( topology, notNullValue() );
  }
View Full Code Here

  @Test
  public void testBuildSuccessfulForProviderProperty() {
    PropertyTopologyBuilder propertyTopologyBuilder = new PropertyTopologyBuilder();
    propertyTopologyBuilder.addProperty( new Property( "topology.gateway.provider.authentication.ShiroProvider.enabled", "value" ) );
    Topology topology = propertyTopologyBuilder.build();

    assertThat( topology, notNullValue() );
    assertThat( topology.getProviders().size(), is( 1 ) );
    assertThat( topology.getProviders().iterator().next().isEnabled(), is( false ) );
  }
View Full Code Here

  @Test( expected = IllegalArgumentException.class )
  public void testBuildSuccessfulForServiceProperty() {
    PropertyTopologyBuilder propertyTopologyBuilder = new PropertyTopologyBuilder();
    propertyTopologyBuilder.addProperty( new Property( "topology.gateway.service.WEBHDFS..url", "http://host:50070/webhdfs" ) );
    Topology topology = propertyTopologyBuilder.build();

    assertThat( topology, notNullValue() );
  }
View Full Code Here

  @Test
  public void testEmptyTopology() throws IOException, SAXException, ParserConfigurationException {
    GatewayConfig config = new GatewayConfigImpl();

    Topology topology = new Topology();
    topology.setName( "test-cluster" );

    WebArchive war = DeploymentFactory.createDeployment( config, topology );
    //File dir = new File( System.getProperty( "user.dir" ) );
    //File file = war.as( ExplodedExporter.class ).exportExploded( dir, "test-cluster.war" );
View Full Code Here

    Provider provider = new Provider();
    provider.setEnabled( true );
    provider.setName( "secure-query" );
    provider.setParamsproviderParams );

    Topology topology = new Topology();
    topology.setName("Sample");
   
    DeploymentContext context = EasyMock.createNiceMock( DeploymentContext.class );
//    EasyMock.expect( context.getDescriptor( "rewrite" ) ).andReturn( rewriteRules ).anyTimes();
    EasyMock.expect( context.getWebArchive() ).andReturn( webArchive ).anyTimes();
    EasyMock.expect( context.getTopology() ).andReturn( topology ).anyTimes();
View Full Code Here

    String name = "org/apache/hadoop/gateway/topology/xml/simple-topology-knox-format.xml";
    URL url = ClassLoader.getSystemResource( name );
    assertThat( "Failed to find URL for resource " + name, url, notNullValue() );
    File file = new File( url.getFile() );
    TopologyBuilder topologyBuilder = digester.parse( url );
    Topology topology = topologyBuilder.build();
    assertThat( "Failed to parse resource " + name, topology, notNullValue() );
    topology.setTimestamp( file.lastModified() );

    assertThat( topology.getName(), is( "topology" ) );
    assertThat( topology.getTimestamp(), is( file.lastModified() ) );
    assertThat( topology.getServices().size(), is( 1 ) );

    Service comp = topology.getServices().iterator().next();
    assertThat( comp, notNullValue() );
    assertThat( comp.getRole(), is( "WEBHDFS" ) );
    assertThat( comp.getUrl(), is( "http://host:80/webhdfs" ) );

    Provider provider = topology.getProviders().iterator().next();
    assertThat( provider, notNullValue() );
    assertThat( provider.isEnabled(), is(true) );
    assertThat( provider.getRole(), is( "authentication" ) );
    assertThat( provider.getParams().size(), is(5));
  }
View Full Code Here

TOP

Related Classes of org.apache.hadoop.gateway.topology.Topology

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.