package tcg.scada.da;
import java.util.ArrayList;
import java.util.Calendar;
import java.util.HashMap;
import java.util.Iterator;
import java.util.LinkedList;
import org.apache.log4j.Logger;
import org.omg.CORBA.LongHolder;
import tcg.common.LoggerManager;
import tcg.common.util.DetachedThread;
import tcg.scada.cos.CosDpErrorEnum;
import tcg.scada.cos.CosDpQualityEnum;
import tcg.scada.cos.CosSubscriptionItemSeqHolder;
import tcg.scada.cos.CosSubscriptionItemStruct;
import tcg.scada.cos.ICosDataPointClient;
public class SubscriptionGroup
{
public static long DEF_UPDATE_RATE_MILLIS = 5000;
public static short DEF_ERROR_THRESHOLD = 3;
private static Logger logger_ = LoggerManager
.getLogger(SubscriptionGroup.class.toString());
private int keyId_ = 0;
private String name_ = "";
private long updateRateMillis_ = DEF_UPDATE_RATE_MILLIS;
private ICosDataPointClient client_ = null;
// parent dpserver
private IDataStore dpServer_ = null;
// queue to stored change notifications until it is processed
private LinkedList<IDataPoint> notificationQueue_ = new LinkedList<IDataPoint>();
// list of subscription items
private SubscriptionItemList itemList_ = new SubscriptionItemList();
// optimization. stored list of items with updated info
// NOTE: this is just an optimization hashmap. never delete the actual
// pointer!
private HashMap<Integer, SubscriptionItem> updatesMap_ = new HashMap<Integer, SubscriptionItem>();
// timestamp of the last push/pull of changes
private long lastRetrival_ = 0;
// next updates cycle
private long nextUpdateCycle_ = 0;
// worker thread
private WorkerThread thread_ = null;
// whether any retrieval/push has been performed so far
// boolean isInitialRetrival_ = true;
// whether we use the output value or source value
private boolean useSourceValue_ = false;
// error counter
private int errorCounter_ = 0;
/**
* Constructor.
*
* @param keyId
* - subscription group key-id
* @param name
* - subscription group name
* @param updateRateMillis
* - update rate in milliseconds
* @param client
* - reference to client if available
* @param dpServer
* - parent dpserver
*/
public SubscriptionGroup(int keyId, String name, long updateRateMillis,
ICosDataPointClient client, IDataStore dpServer)
{
keyId_ = keyId;
name_ = name;
updateRateMillis_ = updateRateMillis;
client_ = client;
dpServer_ = dpServer;
// create a new worker thread
thread_ = new WorkerThread();
}
/**
* Whether to subscribe to output value (false) or source value (true)
*
* @param value
* - true is we want to subscribe for source value
*/
public void useSourceValue(boolean value)
{
useSourceValue_ = value;
}
public boolean start()
{
thread_.start();
return true;
}
public boolean stop()
{
thread_.stop(updateRateMillis_);
return true;
}
public boolean isRunning()
{
if (thread_ == null)
return false;
return thread_.isRunning();
}
// notify of data changes
public void notifyDataPointChange(DataPoint datapoint)
{
// if we subscribe for source value, ignore output value changes
if (useSourceValue_)
return;
// validation
if (datapoint == null)
return;
// just add into the notification queue
// the subscription agent is responsible for retriving it and acting on
// it according
synchronized (notificationQueue_)
{
notificationQueue_.push(datapoint);
}
}
// notify of data changes (source value)
public void notifyDataPointSourceChange(DataPoint datapoint)
{
// if we subscribe for output value, ignore source value changes
if (!useSourceValue_)
return;
// validation
if (datapoint == null)
return;
// just add into the notification queue
// the subscription agent is responsible for retriving it and acting on
// it according
synchronized (notificationQueue_)
{
notificationQueue_.push(datapoint);
}
}
// add subscription
public synchronized CosDpErrorEnum subscribe(IDataPoint dp)
{
if (dp == null || dp.getKeyId() == 0)
{
return CosDpErrorEnum.ErrInvalidSubscriptionItem;
}
// make sure it is unique and no duplicate
if (itemList_.getByKeyId(dp.getKeyId()) != null)
{
// duplicate entry
return CosDpErrorEnum.ErrDuplicateSubscriptionItem;
}
// create new subscription item
SubscriptionItem item = new SubscriptionItem();
item.datapoint = dp;
// insert into the list
itemList_.add(item);
// force update this datapoint for the first time
synchronized (notificationQueue_)
{
notificationQueue_.push(dp);
item.isUpdated = true;
}
return CosDpErrorEnum.ErrNoError;
}
// delete subscription
public synchronized CosDpErrorEnum unsubscribe(IDataPoint dp)
{
if (dp == null || dp.getKeyId() == 0)
{
return CosDpErrorEnum.ErrInvalidSubscriptionItem;
}
// make sure it is unique and no duplicate
SubscriptionItem item = itemList_.getByKeyId(dp.getKeyId());
if (item == null)
{
return CosDpErrorEnum.ErrInvalidSubscriptionItem;
}
// delete the subscription item
itemList_.remove(item);
updatesMap_.remove(dp.getKeyId());
return CosDpErrorEnum.ErrNoError;
}
// subscribe to all read-write datapoints hosted in the datapoint server
public synchronized int subscribeAll()
{
// validation
if (dpServer_ == null)
{
return 0;
}
IDataPoint dp = null;
SubscriptionItem item = null;
int counter = 0;
// use temp queue to reduce the time we lock the main notification queue
// this way, any update from datapoint server could still go through
LinkedList<SubscriptionItem> tmpQueue = new LinkedList<SubscriptionItem>();
// get the datapoint list from the datapoint server
DataPointList dpList = dpServer_.getDataPointList();
for (int idx = 0; idx < dpList.size(); idx++)
{
dp = dpList.get(idx);
// ignore null pointer and datapoint without keyid
if (dp == null || dp.getKeyId() == 0)
{
continue;
}
// we do not care about the value of output point
if (dp.getType() == EDataPointType.TYPE_OUTPUT)
{
continue;
}
// make sure it is unique and no duplicate
if (itemList_.getByKeyId(dp.getKeyId()) != null)
{
// duplicate entry
continue;
}
// create new subscription item
item = new SubscriptionItem();
// copy the value
item.datapoint = dp;
// insert into the list
itemList_.add(item);
counter++;
}
synchronized (notificationQueue_)
{
while (tmpQueue.size() > 0)
{
item = tmpQueue.remove(0);
// considered as changed/updated point
item.isUpdated = true;
notificationQueue_.add(item.datapoint);
}
}
return counter;
}
// subscribe to all real datapoints
// NOTE: the reak datapoint here refers to datapoint that is polled from
// SWC,
// not necessarily using modbus protocol
public int subscribeAllReal()
{
// validation
if (dpServer_ == null)
{
return 0;
}
IDataPoint dp = null;
SubscriptionItem item = null;
int counter = 0;
// use temp queue to reduce the time we lock the main notification queue
// this way, any update from dpserver could still go through
LinkedList<SubscriptionItem> tmpQueue = new LinkedList<SubscriptionItem>();
// get the dplist from the datastore
DataPointList dpList = dpServer_.getDataPointList();
for (int idx = 0; idx < dpList.size(); idx++)
{
dp = dpList.get(idx);
// ignore null pointer and datapoint without keyid
if (dp == null || dp.getKeyId() == 0)
{
continue;
}
// only subscribe for real datapoints with proper underlying
// subsystem
if (dp.getType() != EDataPointType.TYPE_REAL
|| dp.getSubsystem() == null)
{
continue;
}
// make sure it is unique and no duplicate
if (itemList_.getByKeyId(dp.getKeyId()) != null)
{
// duplicate entry
continue;
}
// create new subscription item
item = new SubscriptionItem();
// copy the value
item.datapoint = dp;
// insert into the list
itemList_.add(item);
counter++;
}
synchronized (notificationQueue_)
{
while (tmpQueue.size() > 0)
{
item = tmpQueue.remove(0);
// considered as changed/updated point
item.isUpdated = true;
notificationQueue_.add(item.datapoint);
}
}
return counter;
}
public String getName()
{
return name_;
}
long getTotalSubscriptionItem()
{
return itemList_.size();
}
IDataStore getDataPointServer()
{
return dpServer_;
}
public synchronized int getAllSubscriptionItem(
CosSubscriptionItemSeqHolder items)
{
int counter = 0;
int size = itemList_.size();
long curtime = Calendar.getInstance().getTimeInMillis();
// allocate sequence
items.value = new CosSubscriptionItemStruct[size];
// copy all value
SubscriptionItem item = null;
for (int idx = 0; idx < size; idx++)
{
item = itemList_.get(idx);
if (item == null || item.datapoint == null)
{
// default value
items.value[idx].keyId = 0;
items.value[idx].quality = CosDpQualityEnum.QualityBad;
items.value[idx].value.longValue(0);
items.value[idx].timestamp = 0;
continue;
}
// copy the value.
items.value[idx].keyId = item.datapoint.getKeyId();
if (!useSourceValue_)
{
items.value[idx].quality = item.datapoint.getQuality();
items.value[idx].value = item.datapoint.getValue();
items.value[idx].timestamp = item.datapoint.getTimestamp();
}
else
{
items.value[idx].quality = item.datapoint.getSourceQuality();
items.value[idx].value = item.datapoint.getSourceValue();
items.value[idx].timestamp = item.datapoint
.getSourceTimestamp();
}
// next
counter++;
}
// update timestamp
lastRetrival_ = curtime;
// reset the updated flag
itemList_.resetUpdatedFlag();
// reset the update map
updatesMap_.clear();
return counter;
}
public synchronized int getUpdatedSubscriptionItem(LongHolder timestamp,
CosSubscriptionItemSeqHolder items)
{
long curtime = Calendar.getInstance().getTimeInMillis();
long lasttime = timestamp.value;
int idx = 0;
logger_.trace("lastRetrival_: " + lastRetrival_ + "; lastTimestamp: "
+ lasttime);
// if client timestamp is older than internal timestamp
if (lasttime == 0 || lasttime < lastRetrival_)
{
// force to retrieve all items
return getAllSubscriptionItem(items);
}
// get the number of updated items
int nUpdated = updatesMap_.size();
// create the container
items.value = new CosSubscriptionItemStruct[nUpdated];
// get all the changes
SubscriptionItem item = null;
Iterator<SubscriptionItem> it = updatesMap_.values().iterator();
while (it.hasNext())
{
item = it.next();
if (item == null || item.datapoint == null)
{
// default value
items.value[idx].keyId = 0;
items.value[idx].quality = CosDpQualityEnum.QualityBad;
items.value[idx].value.longValue(0);
items.value[idx].timestamp = 0;
}
else
{
// copy the value.
items.value[idx].keyId = item.datapoint.getKeyId();
if (!useSourceValue_)
{
items.value[idx].quality = item.datapoint.getQuality();
items.value[idx].value = item.datapoint.getValue();
items.value[idx].timestamp = item.datapoint.getTimestamp();
}
else
{
items.value[idx].quality = item.datapoint
.getSourceQuality();
items.value[idx].value = item.datapoint.getSourceValue();
items.value[idx].timestamp = item.datapoint
.getSourceTimestamp();
}
}
// next
idx++;
}
// update timestamp
lastRetrival_ = curtime;
// update returned timestamp
timestamp.value = curtime;
// reset the updated flag
itemList_.resetUpdatedFlag();
// reset the update map
updatesMap_.clear();
logger_.debug(name_ + ": Updated size: " + idx);
return idx;
}
// do work. check for unprocessed change notifications from dpserver
protected synchronized void doWork()
{
// this protect against catastrophic event
// in the first place, when there is no dpserver configured we should
// never do any work!
if (dpServer_ == null)
{
return;
}
if (notificationQueue_.size() == 0)
{
// TODO: consider still try to send empty data as a heart beat
return;
}
logger_.debug(name_ + "notificationQueue_.size(): "
+ notificationQueue_.size());
// process the notification and send it to client if necessary
long curtime = Calendar.getInstance().getTimeInMillis();
// use temp queue to reduce the time we lock the main notification queue
// this way, any update from dpserver could still go through
LinkedList<IDataPoint> tmpQueue = new LinkedList<IDataPoint>();
// copy the current queue to a temporary queue.
IDataPoint dp = null;
synchronized (notificationQueue_)
{
// use current size.
// this ensure that the queue is finite and the update loop will
// eventually finish
int size = notificationQueue_.size();
for (int i = 0; i < size; i++)
{
// get the front value
dp = notificationQueue_.remove(0);
// put it into the temporary queue
tmpQueue.add(dp);
}
}
// reset processed flag to prevent multiple processing
itemList_.resetProcessedFlag();
// process all change notification
SubscriptionItem item = null;
while (tmpQueue.size() > 0)
{
// get the front value
dp = tmpQueue.remove(0);
if (dp == null || dp.getKeyId() == 0)
{
continue;
}
// get the subscription item
item = itemList_.getByKeyId(dp.getKeyId());
if (item == null)
{
// not in the subscription list
continue;
}
// if it is already processed in this session, skip it
if (item.bProcessed)
{
continue;
}
// update the subscription item. use the handle directly. much more
// efficient!
// NOTE: This requires that the handle always refer to the actual
// pointer
// This also brings a question on how to handle BMF datapoints since
// BMF datapoints
// can be activated/de-activated according to the operation mode!
// NOTE: No need to do anything since now we keep the direct pointer
// to the datapoint
// mark as updated
item.isUpdated = true;
// mark as processed
item.bProcessed = true;
// insert it into the update map
updatesMap_.put(dp.getKeyId(), item);
}
logger_.debug("# Changes: " + updatesMap_.size());
// if client reference is not _nil(), push the changes
if (client_ != null && updatesMap_.size() > 0)
{
CosSubscriptionItemSeqHolder items = new CosSubscriptionItemSeqHolder();
// get the list of updated subscription items.
// MUST NOT use getUpdatedSubscriptionItem() because it will clear
// updatesMap_
// we want to clear the hash map only on successful push!
// get the number of updated items
int nUpdated = updatesMap_.size();
// create the container
items.value = new CosSubscriptionItemStruct[nUpdated];
// get all the changes
int idx = 0;
Iterator<SubscriptionItem> it = updatesMap_.values().iterator();
while (it.hasNext())
{
item = it.next();
if (item == null || item.datapoint == null)
{
// default value
items.value[idx].keyId = 0;
items.value[idx].quality = CosDpQualityEnum.QualityBad;
items.value[idx].value.longValue(0);
items.value[idx].timestamp = 0;
}
else
{
// copy the value.
items.value[idx].keyId = item.datapoint.getKeyId();
if (!useSourceValue_)
{
items.value[idx].quality = item.datapoint.getQuality();
items.value[idx].value = item.datapoint.getValue();
items.value[idx].timestamp = item.datapoint
.getTimestamp();
}
else
{
items.value[idx].quality = item.datapoint
.getSourceQuality();
items.value[idx].value = item.datapoint
.getSourceValue();
items.value[idx].timestamp = item.datapoint
.getSourceTimestamp();
}
}
// next
idx++;
}
// try push the changes
// TODO: Consider pushing the changes in groups of 1000 changes
// maximum
// instead of pushing all changes at one go!
try
{
// push the changes
client_.cosCallback_OnDataPointValueChange(this.keyId_,
curtime, items.value);
// reset the error counter
errorCounter_ = 0;
// if successful, update the retrieval timestamp
lastRetrival_ = curtime;
// reset all updated flag
itemList_.resetUpdatedFlag();
// reset the update map
updatesMap_.clear();
}
catch (Exception ex)
{
// fail to push changes.
// do not update retrieval timestamp. do not reset updated flag
// print out some error
logger_
.warn(name_
+ ": Fail to push subscription item changes. Exception: "
+ ex.toString());
// increase error counter
errorCounter_++;
// if always failed, just reset the reference
if (errorCounter_ > DEF_ERROR_THRESHOLD)
{
client_ = null;
}
}
}
// update the last processing timestamp
nextUpdateCycle_ = curtime + updateRateMillis_;
}
class WorkerThread extends DetachedThread
{
/**
* Constructor
*/
public WorkerThread()
{
precisionMillis = (updateRateMillis_ / 10) + 1;
}
@Override
protected void _onStart()
{
logger_.info("Starting subscription group " + name_ + "...");
}
@Override
protected void _onStop()
{
logger_.info("Terminating subscription group " + name_ + "...");
}
@Override
protected void _initial()
{
logger_.info("Subscription group " + name_ + " is started.");
}
@Override
protected void _doWork()
{
if (nextUpdateCycle_ <= Calendar.getInstance().getTimeInMillis())
{
// logger_.trace("Subscription group " + name_ +
// " worker thread is running.");
doWork();
}
}
@Override
protected void _final()
{
logger_.info("Subscription group " + name_ + " has terminated.");
}
}
}
class SubscriptionItem
{
public boolean isUpdated = false;
public boolean bProcessed = false;
// CosHandle_t clientHandle;
public IDataPoint datapoint = null;
};
class SubscriptionItemList
{
ArrayList<SubscriptionItem> items_ = new ArrayList<SubscriptionItem>();
// lookup table
HashMap<Integer, SubscriptionItem> keyIdLookup_ = new HashMap<Integer, SubscriptionItem>();
/**
* Clear the list
*/
public void clear()
{
items_.clear();
keyIdLookup_.clear();
}
/**
* Get the size of the list
*
* @return - the current size of the list
*/
public int size()
{
return items_.size();
}
/**
* Reset updated flag for all subscription items
*/
public void resetUpdatedFlag()
{
SubscriptionItem item = null;
for (int i = 0; i < items_.size(); i++)
{
item = items_.get(i);
if (item != null)
{
item.isUpdated = false;
}
}
}
/**
* Reset processed flag for all subscription items
*/
public void resetProcessedFlag()
{
SubscriptionItem item = null;
for (int i = 0; i < items_.size(); i++)
{
item = items_.get(i);
if (item != null)
{
item.bProcessed = false;
}
}
}
/**
* Get read-only list of subscription items
*
* @return - read-only array of subscription items
*/
public final ArrayList<SubscriptionItem> getSubscriptionList()
{
return items_;
}
/**
* Add a new subscription item into the list
*
* @param item
* - the subscription item to insert
* @return position in the list of the item if successful, -1 otherwise
*/
public int add(SubscriptionItem item)
{
// validation
if (item == null || item.datapoint == null
|| item.datapoint.getKeyId() == 0)
{
return -1;
}
// validation: existing entry
// no need perform. it will be the responsibility of caller to ensure
// that
// add into the list
items_.add(item);
// insert into the lookup
keyIdLookup_.put(item.datapoint.getKeyId(), item);
return items_.size() - 1;
}
public boolean remove(SubscriptionItem item)
{
// validation
if (item == null || item.datapoint == null
|| item.datapoint.getKeyId() == 0)
{
return false;
}
boolean status = items_.remove(item);
if (status)
{
keyIdLookup_.remove(item.datapoint.getKeyId());
}
return status;
}
/**
* Get a subscription item based on the index in the list
*
* @param idx
* - the index
* @return the subscription item if found, null otherwise
*/
public SubscriptionItem get(int idx)
{
// validation
if (idx <= 0 || idx >= items_.size())
{
return null;
}
return items_.get(idx);
}
/**
* Get a subscription item by the datapoint key id
*
* @param keyId
* - key id of the datapoint to search
* @return the subscription item if found, null otherwise
*/
public SubscriptionItem getByKeyId(int keyId)
{
return keyIdLookup_.get(keyId);
}
}