Package org.hibernate.search

Examples of org.hibernate.search.FullTextSession


    tx.commit();
    s.close();
  }

  public void testResultTransformMap() throws Exception {
    FullTextSession s = Search.getFullTextSession( openSession() );
    prepEmployeeIndex( s );

    Transaction tx;
    s.clear();
    tx = s.beginTransaction();
    QueryParser parser = new QueryParser( "dept", new StandardAnalyzer() );

    Query query = parser.parse( "dept:ITech" );
    org.hibernate.search.FullTextQuery hibQuery = s.createFullTextQuery( query, Employee.class );
    hibQuery.setProjection(
        "id",
        "lastname",
        "dept",
        FullTextQuery.THIS,
        FullTextQuery.SCORE,
        FullTextQuery.DOCUMENT,
        FullTextQuery.ID
    );

    hibQuery.setResultTransformer( new ProjectionToMapResultTransformer() );

    List transforms = hibQuery.list();
    Map map = ( Map ) transforms.get( 1 );
    assertEquals( "incorrect transformation", "ITech", map.get( "dept" ) );
    assertEquals( "incorrect transformation", 1002, map.get( "id" ) );
    assertTrue( "incorrect transformation", map.get( FullTextQuery.DOCUMENT ) instanceof Document );
    assertEquals(
        "incorrect transformation", "1002", ( ( Document ) map.get( FullTextQuery.DOCUMENT ) ).get( "id" )
    );

    //cleanup
    for ( Object element : s.createQuery( "from " + Employee.class.getName() ).list() ) {
      s.delete( element );
    }
    tx.commit();
    s.close();
  }
View Full Code Here


    assertEquals( "DOCUMENT size incorrect", 4, ( ( Document ) projection[5] ).getFields().size() );
    assertEquals( "legacy ID incorrect", 1003, projection[6] );
  }

  public void testLuceneObjectsProjectionWithIterate() throws Exception {
    FullTextSession s = Search.getFullTextSession( openSession() );
    prepEmployeeIndex( s );

    Transaction tx;
    s.clear();
    tx = s.beginTransaction();
    QueryParser parser = new QueryParser( "dept", new StandardAnalyzer() );

    Query query = parser.parse( "dept:ITech" );
    org.hibernate.search.FullTextQuery hibQuery = s.createFullTextQuery( query, Employee.class );
    hibQuery.setProjection(
        "id", "lastname", "dept", FullTextQuery.THIS, FullTextQuery.SCORE,
        FullTextQuery.DOCUMENT, FullTextQuery.ID
    );

    int counter = 0;

    for ( Iterator iter = hibQuery.iterate(); iter.hasNext(); ) {
      Object[] projection = ( Object[] ) iter.next();
      assertNotNull( projection );
      counter++;
      assertEquals( "dept incorrect", "ITech", projection[2] );
      assertEquals( "THIS incorrect", projection[3], s.get( Employee.class, ( Serializable ) projection[0] ) );
      assertTrue( "SCORE incorrect", projection[4] instanceof Float );
      assertTrue( "DOCUMENT incorrect", projection[5] instanceof Document );
      assertEquals( "DOCUMENT size incorrect", 4, ( ( Document ) projection[5] ).getFields().size() );
    }
    assertEquals( "incorrect number of results returned", 4, counter );

    //cleanup
    for ( Object element : s.createQuery( "from " + Employee.class.getName() ).list() ) {
      s.delete( element );
    }
    tx.commit();
    s.close();
  }
View Full Code Here

    tx.commit();
    s.close();
  }

  public void testLuceneObjectsProjectionWithList() throws Exception {
    FullTextSession s = Search.getFullTextSession( openSession() );
    prepEmployeeIndex( s );

    Transaction tx;
    s.clear();
    tx = s.beginTransaction();
    QueryParser parser = new QueryParser( "dept", new StandardAnalyzer() );

    Query query = parser.parse( "dept:Accounting" );
    org.hibernate.search.FullTextQuery hibQuery = s.createFullTextQuery( query, Employee.class );
    hibQuery.setProjection(
        "id", "lastname", "dept", FullTextQuery.THIS, FullTextQuery.SCORE,
        FullTextQuery.DOCUMENT, FullTextQuery.ID, FullTextQuery.DOCUMENT_ID
    );

    List result = hibQuery.list();
    assertNotNull( result );

    Object[] projection = ( Object[] ) result.get( 0 );
    assertNotNull( projection );
    assertEquals( "id incorrect", 1001, projection[0] );
    assertEquals( "last name incorrect", "Jackson", projection[1] );
    assertEquals( "dept incorrect", "Accounting", projection[2] );
    assertEquals( "THIS incorrect", "Jackson", ( ( Employee ) projection[3] ).getLastname() );
    assertEquals( "THIS incorrect", projection[3], s.get( Employee.class, ( Serializable ) projection[0] ) );
    assertTrue( "SCORE incorrect", projection[4] instanceof Float );
    assertTrue( "DOCUMENT incorrect", projection[5] instanceof Document );
    assertEquals( "DOCUMENT size incorrect", 5, ( ( Document ) projection[5] ).getFields().size() );
    assertEquals( "ID incorrect", 1001, projection[6] );
    assertNotNull( "Lucene internal doc id", projection[7] );

    // Change the projection order and null one
    hibQuery.setProjection(
        FullTextQuery.DOCUMENT, FullTextQuery.THIS, FullTextQuery.SCORE, null, FullTextQuery.ID,
        "id", "lastname", "dept", "hireDate", FullTextQuery.DOCUMENT_ID
    );

    result = hibQuery.list();
    assertNotNull( result );

    projection = ( Object[] ) result.get( 0 );
    assertNotNull( projection );

    assertTrue( "DOCUMENT incorrect", projection[0] instanceof Document );
    assertEquals( "DOCUMENT size incorrect", 5, ( ( Document ) projection[0] ).getFields().size() );
    assertEquals( "THIS incorrect", projection[1], s.get( Employee.class, ( Serializable ) projection[4] ) );
    assertTrue( "SCORE incorrect", projection[2] instanceof Float );
    assertNull( "BOOST not removed", projection[3] );
    assertEquals( "ID incorrect", 1001, projection[4] );
    assertEquals( "id incorrect", 1001, projection[5] );
    assertEquals( "last name incorrect", "Jackson", projection[6] );
    assertEquals( "dept incorrect", "Accounting", projection[7] );
    assertNotNull( "Date", projection[8] );
    assertNotNull( "Lucene internal doc id", projection[9] );

    //cleanup
    for ( Object element : s.createQuery( "from " + Employee.class.getName() ).list() ) {
      s.delete( element );
    }
    tx.commit();
    s.close();
  }
View Full Code Here

    tx.commit();
    s.close();
  }

  public void testNonLoadedFieldOptmization() throws Exception {
    FullTextSession s = Search.getFullTextSession( openSession() );
    prepEmployeeIndex( s );

    Transaction tx;
    s.clear();
    tx = s.beginTransaction();
    QueryParser parser = new QueryParser( "dept", new StandardAnalyzer() );

    Query query = parser.parse( "dept:Accounting" );
    org.hibernate.search.FullTextQuery hibQuery = s.createFullTextQuery( query, Employee.class );
    hibQuery.setProjection( FullTextQuery.ID, FullTextQuery.DOCUMENT );

    List result = hibQuery.list();
    assertNotNull( result );

    Object[] projection = ( Object[] ) result.get( 0 );
    assertNotNull( projection );
    assertEquals( "id field name not projected", 1001, projection[0] );
    assertEquals(
        "Document fields should not be lazy on DOCUMENT projection",
        "Jackson", ( ( Document ) projection[1] ).getField( "lastname" ).stringValue()
    );
    assertEquals( "DOCUMENT size incorrect", 5, ( ( Document ) projection[1] ).getFields().size() );

    // Change the projection order and null one
    hibQuery.setProjection( FullTextQuery.THIS, FullTextQuery.SCORE, null, "lastname" );

    result = hibQuery.list();
    assertNotNull( result );

    projection = ( Object[] ) result.get( 0 );
    assertNotNull( projection );

    assertTrue( "THIS incorrect", projection[0] instanceof Employee );
    assertTrue( "SCORE incorrect", projection[1] instanceof Float );
    assertEquals( "last name incorrect", "Jackson", projection[3] );

    //cleanup
    for ( Object element : s.createQuery( "from " + Employee.class.getName() ).list() ) {
      s.delete( element );
    }
    tx.commit();
    s.close();
  }
View Full Code Here

    tx.commit();
  }

  public void testProjection() throws Exception {
    FullTextSession s = Search.getFullTextSession( openSession() );
    Transaction tx = s.beginTransaction();
    Book book = new Book(
        1,
        "La chute de la petite reine a travers les yeux de Festina",
        "La chute de la petite reine a travers les yeux de Festina, blahblah"
    );
    s.save( book );
    Book book2 = new Book( 2, "Sous les fleurs il n'y a rien", null );
    s.save( book2 );
    Author emmanuel = new Author();
    emmanuel.setName( "Emmanuel" );
    s.save( emmanuel );
    book.setMainAuthor( emmanuel );
    tx.commit();
    s.clear();
    tx = s.beginTransaction();
    QueryParser parser = new QueryParser( "title", new StopAnalyzer() );

    Query query = parser.parse( "summary:Festina" );
    org.hibernate.search.FullTextQuery hibQuery = s.createFullTextQuery( query, Book.class );
    hibQuery.setProjection( "id", "summary", "mainAuthor.name" );

    List result = hibQuery.list();
    assertNotNull( result );
    assertEquals( "Query with no explicit criteria", 1, result.size() );
    Object[] projection = ( Object[] ) result.get( 0 );
    assertEquals( "id", 1, projection[0] );
    assertEquals( "summary", "La chute de la petite reine a travers les yeux de Festina", projection[1] );
    assertEquals( "mainAuthor.name (embedded objects)", "Emmanuel", projection[2] );

    hibQuery = s.createFullTextQuery( query, Book.class );
    hibQuery.setProjection( "id", "body", "mainAuthor.name" );

    try {
      hibQuery.list();
      fail( "Projecting an unstored field should raise an exception" );
    }
    catch ( SearchException e ) {
      //success
    }


    hibQuery = s.createFullTextQuery( query, Book.class );
    hibQuery.setProjection();
    result = hibQuery.list();
    assertNotNull( result );
    assertEquals( 1, result.size() );
    assertTrue( "Should not trigger projection", result.get( 0 ) instanceof Book );

    hibQuery = s.createFullTextQuery( query, Book.class );
    hibQuery.setProjection( null );
    result = hibQuery.list();
    assertNotNull( result );
    assertEquals( 1, result.size() );
    assertTrue( "Should not trigger projection", result.get( 0 ) instanceof Book );

    query = parser.parse( "summary:fleurs" );
    hibQuery = s.createFullTextQuery( query, Book.class );
    hibQuery.setProjection( "id", "summary", "mainAuthor.name" );
    result = hibQuery.list();
    assertEquals( 1, result.size() );
    projection = ( Object[] ) result.get( 0 );
    assertEquals( "mainAuthor.name", null, projection[2] );

    //cleanup
    for ( Object element : s.createQuery( "from " + Book.class.getName() ).list() ) {
      s.delete( element );
    }
    for ( Object element : s.createQuery( "from " + Author.class.getName() ).list() ) {
      s.delete( element );
    }
    tx.commit();
    s.close();
  }
View Full Code Here

  public void runHSearch() {
    try {
      Query q = getQuery();
      // Search
      FullTextSession ftSession = Search.getFullTextSession( sf.openSession(  ) );
      final FullTextQuery textQuery = ftSession.createFullTextQuery( q, Boat.class )
          .setMaxResults( 100 ).setProjection( "name" );
      long start = System.currentTimeMillis();
      List results = textQuery.list();
      int resultSize = textQuery.getResultSize();
      long totalTime = System.currentTimeMillis() - start;
      ftSession.close();
//      log.error( "HSearch [ Thread-id : " + threadId + " ] Total time taken for search is : " + totalTime + "ms with total no. of matching records : " + resultSize );
      setTime( totalTime );
    }
    catch (ParseException e) {
      log.error( "[ Thread-id : " + threadId + " ] Parse Exception for queryString : " + queryString );
View Full Code Here

   * @throws Exception in case the test fails.
   */
  public void testProperCopy() throws Exception {

    // assert that the salve index is empty
    FullTextSession fullTextSession = Search.getFullTextSession( getSlaveSession() );
    Transaction tx = fullTextSession.beginTransaction();
    QueryParser parser = new QueryParser( "id", new StopAnalyzer() );
    List result = fullTextSession.createFullTextQuery( parser.parse( "location:texas" ) ).list();
    assertEquals( "No copy yet, fresh index expected", 0, result.size() );
    tx.commit();
    fullTextSession.close();


    // create an entity on the master and persist it in order to index it
    Session session = getMasterSession();
    tx = session.beginTransaction();
    SnowStorm sn = new SnowStorm();
    sn.setDate( new Date() );
    sn.setLocation( "Dallas, TX, USA" );
    session.persist( sn );
    tx.commit();
    session.close();

    int waitPeriodMilli = 2010; // wait  a bit more than 2 refresh periods (one master / one slave)  -  2 * 1 * 1000 + 10
    Thread.sleep( waitPeriodMilli );

    // assert that the master hass indexed the snowstorm
    log.info( "Searching master" );
    fullTextSession = Search.getFullTextSession( getMasterSession() );
    tx = fullTextSession.beginTransaction();
    result = fullTextSession.createFullTextQuery( parser.parse( "location:dallas" ) ).list();
    assertEquals( "Original should get one", 1, result.size() );
    tx.commit();
    fullTextSession.close();

    // assert that index got copied to the salve as well
    log.info( "Searching slave" );
    fullTextSession = Search.getFullTextSession( getSlaveSession() );
    tx = fullTextSession.beginTransaction();
    result = fullTextSession.createFullTextQuery( parser.parse( "location:dallas" ) ).list();
    assertEquals( "First copy did not work out", 1, result.size() );
    tx.commit();
    fullTextSession.close();

    // add a new snowstorm to the master
    session = getMasterSession();
    tx = session.beginTransaction();
    sn = new SnowStorm();
    sn.setDate( new Date() );
    sn.setLocation( "Chennai, India" );
    session.persist( sn );
    tx.commit();
    session.close();

    Thread.sleep( waitPeriodMilli ); //wait a bit more than 2 refresh (one master / one slave)

    // assert that the new snowstorm made it into the slave
    log.info( "Searching slave" );
    fullTextSession = Search.getFullTextSession( getSlaveSession() );
    tx = fullTextSession.beginTransaction();
    result = fullTextSession.createFullTextQuery( parser.parse( "location:chennai" ) ).list();
    assertEquals( "Second copy did not work out", 1, result.size() );
    tx.commit();
    fullTextSession.close();

    session = getMasterSession();
    tx = session.beginTransaction();
    sn = new SnowStorm();
    sn.setDate( new Date() );
    sn.setLocation( "Melbourne, Australia" );
    session.persist( sn );
    tx.commit();
    session.close();

    Thread.sleep( waitPeriodMilli ); //wait a bit more than 2 refresh (one master / one slave)

    // once more - assert that the new snowstorm made it into the slave
    log.info( "Searching slave" );
    fullTextSession = Search.getFullTextSession( getSlaveSession() );
    tx = fullTextSession.beginTransaction();
    result = fullTextSession.createFullTextQuery( parser.parse( "location:melbourne" ) ).list();
    assertEquals( "Third copy did not work out", 1, result.size() );
    tx.commit();
    fullTextSession.close();
  }
View Full Code Here

    }
    wrapped.close();
  }

  public void testDetachedCriteria() throws Exception {
    FullTextSession s = Search.getFullTextSession( openSession( ) );
    DetachedCriteria dc = DetachedCriteria.forClass( Email.class );
    try {
      Criteria c = dc.getExecutableCriteria( s ).setMaxResults( 10 );
      List results = c.list();
    }
    catch( ClassCastException e ) {
      e.printStackTrace( );
      fail(e.toString());
    }
    s.close();
  }
View Full Code Here

  protected void ensureIndexesAreEmpty() {
    if ( "jms".equals( getCfg().getProperty( "hibernate.search.worker.backend" ) ) ) {
      log.debug( "JMS based test. Skipping index emptying" );
      return;
    }
    FullTextSession s = Search.getFullTextSession( openSession() );
    Transaction tx;
    tx = s.beginTransaction();
    for ( Class clazz : getMappings() ) {
      if ( clazz.getAnnotation( Indexed.class ) != null ) {
        s.purgeAll( clazz );
      }
    }
    tx.commit();
    s.close();
  }
View Full Code Here

    Article englishArticle = new Article();
    englishArticle.setLanguage( "en" );
    englishArticle.setText( "acknowledgment" );
    englishArticle.setReferences( references );

    FullTextSession s = Search.getFullTextSession( openSession() );
    Transaction tx = s.beginTransaction();
    s.persist( englishArticle );
    tx.commit();

    tx = s.beginTransaction();

    // at query time we use a standard analyzer. We explicitly search for tokens which can only be found if the
    // right language specific stemmer was used at index time
    QueryParser parser = new QueryParser( "references.text", new StandardAnalyzer() );
    org.apache.lucene.search.Query luceneQuery = parser.parse( "aufeinanderschlug" );
    FullTextQuery query = s.createFullTextQuery( luceneQuery );
    assertEquals( 1, query.getResultSize() );

    parser = new QueryParser( "text", new StandardAnalyzer() );
    luceneQuery = parser.parse( "acknowledg" );
    query = s.createFullTextQuery( luceneQuery );
    assertEquals( 1, query.getResultSize() );

    tx.commit();
    s.close();
  }
View Full Code Here

TOP

Related Classes of org.hibernate.search.FullTextSession

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.