// BlendSpan bs;
// interpLen entspricht 'T' in der Formel (Gesamtzeit), interpOff entspricht 't' (aktueller Zeitpunkt)
long start, interpOff, interpLen, progressLen;
long progress = 0;
// boolean success = false;
AudioStake as;
// if( span.getLength() < 2 ) return DONE;
if( !initFunctionEvaluation( (Point2D[]) context.getClientArg( "points" ))) return FAILED;
interpLen = span.getLength();
warpedTime = new float[(int) Math.min( interpLen, 4096 )];
interpBuf = new float[2][ warpedTime.length ];
// '-1' because the last sample shall really equal the end point of the shape
t_norm = 1.0 / (interpLen - 1);
progressLen = interpLen*collTrns.size();
try {
for( i = 0; i < collTrns.size(); i++ ) {
trns = (Transmitter) collTrns.get( i );
at = trns.getAudioTrail();
as = at.alloc( span );
// bs = at.beginOverwrite( span, bc, edit );
for( start = span.getStart(), interpOff = 0; start < span.getStop();
start += len, interpOff += len ) {
len = (int) Math.min( 4096, span.getStop() - start );
calcWarpedTime( warpedTime, interpOff * t_norm, t_norm, len );
evaluateFunction( warpedTime, interpBuf, len );
// at.continueWrite( bs, interpBuf, 0, len );
if( bc != null ) {
at.readFrames( srcBuf, 0, new Span( start, start + len ));
if( interpOff < bc.getLen() ) { // EEE getLen?
bc.blend( interpOff, srcBuf, 0, interpBuf, 0, interpBuf, 0, len );
}
if( interpLen - (interpOff + len) < bc.getLen() ) { // EEE getLen?
bc.blend( interpOff - (interpLen - bc.getLen()), interpBuf, 0, srcBuf, 0, interpBuf, 0, len );
}
}
as.writeFrames( interpBuf, 0, new Span( start, start + len ));
progress += len;
context.setProgression( (float) progress / (float) progressLen );
}
// at.finishWrite( bs, edit );
at.editBegin( edit );