target.addComment(s);
target.addVersion(getVersion());
// if loaded then get all classes
if(isLoaded()){
IResourceIterator it = getAllClasses();
while(it.hasNext()){
BClass b = (BClass) it.next();
// copy content
copyClass(b,target);
// do not dispose of content
}
}else{
// get last processed page
int lastPageCount = 0;
if(importProps.containsKey(LAST_PROCESSED_PAGE)){
i = Integer.parseInt(importProps.getProperty(LAST_PROCESSED_PAGE,"1"))+1;
lastPageCount = Integer.parseInt(importProps.getProperty(LAST_PAGE_COUNT,"1"));
// if next page is greater then pagecount, it looks like we are done already
if(i > lastPageCount)
return;
}
do {
pcs.firePropertyChange(ONTOLOGY_LOADING_EVENT,null,"Requesting Page "+i+" ...");
long time = System.currentTimeMillis();
Document doc = parseXML(openURL(repository.getURL()+CONCEPTS+"/"+getId()+ALL+"?pagesize="+pagesize+"&pagenum="+i+"&"+repository.getAPIKey()));
if(doc != null){
// if pagecount is in default state, figure the pagecount
if(pagecount == 1){
Element page = getElementByTagName(doc.getDocumentElement(),"page");
Element npages = getElementByTagName(page,"numPages");
pagecount = Integer.parseInt(npages.getTextContent().trim());
Element ntotal = getElementByTagName(page,"numResultsTotal");
total = Integer.parseInt(ntotal.getTextContent().trim());
pcs.firePropertyChange(ONTOLOGY_LOADING_EVENT,null,"Page Size: "+pagesize+", Total Number of Classes: "+total+" ...");
pcs.firePropertyChange(ONTOLOGY_PAGE_COUNT,null,new Integer(pagecount));
pcs.firePropertyChange(ONTOLOGY_PAGE_SIZE,null,new Integer(pagesize));
pcs.firePropertyChange(ONTOLOGY_CLASS_COUNT,null,new Integer(total));
page = null;
npages = null;
ntotal = null;
}
// if last page count does not match current, we have a problem
if(lastPageCount > 0 && lastPageCount != pagecount){
// we should ignore results and start again
i = 1;
continue;
}
// now go over result set
Element result = getElementByTagName(doc.getDocumentElement(),"classBeanResultList");
for(Element e: getElementsByTagName(result,"classBean")){
// get top level class that was returned
BClass b = new BClass(this,e);
// check resource type (don't do properties etc...)
if(TYPE_CLASS.equalsIgnoreCase(b.getResourceType())){
// copy content
copyClass(b,target);
}
// dispose to save memory
disposeClass(b);
}
// remove any outstanding classes (should not be many)
for(BClass b: new ArrayList<BClass>(classMap.values())){
b.dispose();
}
classMap.clear();
result = null;
doc = null;
target.flush();
System.gc();
pcs.firePropertyChange(ONTOLOGY_LOADING_EVENT,null,"Processed Page "+i+"/"+pagecount+" ("+(System.currentTimeMillis()-time)+" ms) ...");
pcs.firePropertyChange(ONTOLOGY_PROCESSED_PAGE,null,new Integer(i));
// store the page that was just processed
importProps.setProperty(LAST_PROCESSED_PAGE, ""+i);
importProps.setProperty(LAST_PAGE_COUNT, ""+pagecount);
storeImportProperties(target,importProps);
}
else{
throw new IOntologyError("Bioportal did what it does best");
}
}while(++i <= pagecount);
}
pcs.firePropertyChange(ONTOLOGY_LOAD_STAGE,null,ONTOLOGY_LOAD_STAGE_BUILDHIERARCHY);
pcs.firePropertyChange(ONTOLOGY_LOADING_EVENT,null,"Creating Hierarchy ...");
// create some important properties
IProperty partOf = target.getProperty(PART_OF);
if(partOf == null){
partOf = target.createProperty(PART_OF,IProperty.OBJECT);
partOf.setDomain(new IClass [] {target.getRoot()});
partOf.setRange(new IClass [] {target.getRoot()});
}
IProperty hasPart = target.getProperty(HAS_PART);
if(hasPart == null){
hasPart = target.createProperty(HAS_PART,IProperty.OBJECT);
hasPart.setDomain(new IClass [] {target.getRoot()});
hasPart.setRange(new IClass [] {target.getRoot()});
}
// make them inverses of eachother
hasPart.setInverseProperty(partOf);
partOf.setInverseProperty(hasPart);
// now that we have all classes, build a tree, since classes only have the superclasses set
IResourceIterator it = target.getAllClasses();
while(it.hasNext()){
IClass cls = (IClass) it.next();
long time = System.currentTimeMillis();
// now copy equivalent classes classes
IProperty p = target.getProperty(EQUIVALENT_CLASS);
if(p != null){
for(IClass sibling: getClassList(target,cls.getPropertyValues(p))){
cls.addEquivalentClass(sibling);
}
// remove temporary property
cls.removePropertyValues(p);
}
// setup direct subclasses and superclasses
p = target.getProperty(SUPER_CLASS);
if(p != null){
for(IClass parent: getClassList(target,cls.getPropertyValues(p))){
if(!cls.hasDirectSuperClass(parent)){
// add string as a true superclass
cls.addSuperClass(parent);
// if superclass is not root, then add this class
// as a child and remove root as parent
if(!parent.equals(target.getRoot())){
parent.addSubClass(cls);
// since everything was added as root, cleanup
cls.removeSuperClass(target.getRoot());
}
}
}
// remove temporary property
cls.removePropertyValues(p);
}
// setup direct subclasses and superclasses
p = target.getProperty(SUB_CLASS);
if(p != null){
for(IClass child: getClassList(target,cls.getPropertyValues(p))){
if(!cls.hasDirectSubClass(child)){
// add string as a true superclass
cls.addSubClass(child);
child.addSuperClass(cls);
// if superclass is not root, then add this class
// as a child and remove root as parent
if(child.hasDirectSuperClass(target.getRoot())){
// since everything was added as root, cleanup
child.removeSuperClass(target.getRoot());
}
}
}
// remove temporary property
cls.removePropertyValues(p);
}
// now copy disjoint classes
p = target.getProperty(DISJOINT_CLASS);
if(p != null){
for(IClass sibling: getClassList(target,cls.getPropertyValues(p))){
cls.addDisjointClass(sibling);
}
// remove temporary property
cls.removePropertyValues(p);
}
// now copy other relationships relationships
for(String relation: new String [] {PART_OF, HAS_PART}){
p = target.getProperty("annotation_"+relation);
if(p != null){
for(IClass sibling: getClassList(target,cls.getPropertyValues(p))){
IRestriction r = target.createRestriction(IRestriction.SOME_VALUES_FROM);
r.setProperty(target.getProperty(relation));
r.setParameter(sibling.getLogicExpression());
cls.addNecessaryRestriction(r);
}
// remove temporary property
cls.removePropertyValues(p);
}
}
// fire event every 1000 actions
if(it.getCount() % pagesize == 1){
pcs.firePropertyChange(ONTOLOGY_PROCESSED_CLASS,null,new Integer(it.getCount()));
pcs.firePropertyChange(ONTOLOGY_LOADING_EVENT,null,"Processing Hierarchy "+it.getCount()+"/"+total+" ...");
}
//System.out.println(".......... processing hierarchy "+cls.getName()+" "+(System.currentTimeMillis()-time));
}
pcs.firePropertyChange(ONTOLOGY_PROCESSED_CLASS,null,new Integer(it.getCount()));
pcs.firePropertyChange(ONTOLOGY_LOADING_EVENT,null,"Processing Hierarchy "+it.getCount()+"/"+total+" ...");
// remove helper annotations
for(String p: new String []{SUB_CLASS,SUPER_CLASS,DISJOINT_CLASS,EQUIVALENT_CLASS,
"annotation_"+PART_OF,"annotation_"+HAS_PART}){
disposeProperty(target,p);