/*
* *
*
*/
package imageCrawler.MainCode;
import imageCrawler.FileProcessing.ImageConstants;
import java.sql.ResultSet;
import java.util.ArrayList;
import java.util.concurrent.*; //for Threadpool and comcurrent execution
import Infrastructure.DAO;
/**
* @author Sattypro
* @since 1.0.0.0
*/
public class StartCrawler {
public native double imageCompareResult(String Source);
/**
* @param args
*/
public static void main(String[] args) throws Exception {
//Creating the ThreadPool as the FixedThreadPool
/*
* we are using the FixedThreadPool because
* An important advantage of the fixed thread pool is that applications using it degrade gracefully.
* To understand this, consider a web server application where each HTTP request is handled by a separate thread.
* If the application simply creates a new thread for every new HTTP request, and the system receives more requests
* than it can handle immediately, the application will suddenly stop responding to all requests when the overhead of
* all those threads exceed the capacity of the system. With a limit on the number of the threads that can be created,
* the application will not be servicing HTTP requests as quickly as they come in, but it will be servicing them as quickly
* as the system can sustain.
* Refer for details:
* http://docs.oracle.com/javase/tutorial/essential/concurrency/pools.html
*/
System.out.println("Lets Start Crawlling the Web");
ExecutorService es = Executors.newFixedThreadPool(4);
try{
LoadBaseURLSList load = new LoadBaseURLSList();
ArrayList<URLModel> urlList= load.getBaseURLList("config/LoadBaseURLs.xml");
for (URLModel model:urlList)
{
System.out.println(model.getPath() );
es.execute(new Worker(model.getPath(),model.getCrawlDepth()));
}
}catch(Exception ex)
{
System.out.println("Error Occured While Loading File"+ex.getMessage());
}
finally
{
es.shutdown();
System.out.println("Finalizing and Waiting For all the Threads to finish the job......");
while(!es.isTerminated()){};
System.out.println("Done Crawling And Got Images Database Ready!!! :-) Good Day Ahead");
}
System.out.println("Started Calculating the Image Mean");
//Calculating the Mean of the Images and updating the database
DAO d= new DAO();
ResultSet rs=null;
int count=0;
StartCrawler start=new StartCrawler();
do{
rs=d.getImages(count);
while(rs.next()) {
int id = rs.getInt("id");
String ImageName = rs.getString("ImageName");
System.out.println("ID:"+id +" Name:"+ ImageName);
try
{
String extension=ImageName.substring(ImageName.lastIndexOf('.')+1,ImageName.length()).trim();
if(extension.equalsIgnoreCase("gif")==false && ( extension.equalsIgnoreCase("jpg") || extension.equalsIgnoreCase("jpeg")
|| extension.equalsIgnoreCase("png") || extension.equalsIgnoreCase("bmp")))
{
d.updateImageMeanInfo(id,start.imageCompareResult(ImageConstants.SAVELOCATION+"\\"+ImageName));
System.out.println(start.imageCompareResult(ImageConstants.SAVELOCATION+"\\"+ImageName));
}
count++;
}catch(Exception ex)
{
ex.printStackTrace();
}
}
}while(rs !=null);
}
static {
System.loadLibrary("Image");
}
}