Package com.opengamma.core.historicaltimeseries

Examples of com.opengamma.core.historicaltimeseries.HistoricalTimeSeries


  @Override
  public Pair<LocalDate, Double> getLatestDataPoint(
      ExternalIdBundle identifiers, LocalDate currentDate, String dataSource, String dataProvider, String dataField,
      LocalDate start, boolean includeStart, LocalDate end, boolean includeEnd) {
    HistoricalTimeSeries hts = doGetHistoricalTimeSeries(identifiers, currentDate, dataSource, dataProvider, dataField, start, includeStart, end, includeEnd, -1);
    if (hts == null || hts.getTimeSeries() == null || hts.getTimeSeries().isEmpty()) {
      return null;
    } else {
      return new ObjectsPair<LocalDate, Double>(hts.getTimeSeries().getLatestTime(), hts.getTimeSeries().getLatestValue());
    }
  }
View Full Code Here


      LocalDate start, boolean includeStart, LocalDate end, boolean includeEnd, Integer maxPoints) {
    HistoricalTimeSeriesKey seriesKey = new HistoricalTimeSeriesKey(null, currentDate, identifiers, dataSource, dataProvider, dataField);
    SubSeriesKey subseriesKey = new SubSeriesKey(start, end, maxPoints);
    ObjectsPair<HistoricalTimeSeriesKey, SubSeriesKey> key = Pair.of(seriesKey, subseriesKey);
    Element element = _dataCache.get(key);
    HistoricalTimeSeries hts;
    if (element != null) {
      hts = (HistoricalTimeSeries) element.getObjectValue();
      if (MISS.equals(hts)) {
        hts = null;
      } else if (!subseriesKey.isMatch(start, includeStart, end, includeEnd, maxPoints)) {
View Full Code Here

  public HistoricalTimeSeries getHistoricalTimeSeries(
      String dataField, ExternalIdBundle identifierBundle, LocalDate identifierValidityDate, String resolutionKey) {
    ArgumentChecker.notNull(dataField, "dataField");
    ArgumentChecker.notEmpty(identifierBundle, "identifierBundle");
    HistoricalTimeSeriesKey key = new HistoricalTimeSeriesKey(resolutionKey, identifierValidityDate, identifierBundle, null, null, dataField);
    HistoricalTimeSeries hts = getFromDataCache(key);
    if (hts != null) {
      if (MISS.equals(hts)) {
        hts = null;
      }
    } else {
      hts = _underlying.getHistoricalTimeSeries(dataField, identifierBundle, identifierValidityDate, resolutionKey);
      if (hts != null) {
        s_logger.debug("Caching time-series {}", hts);
        _dataCache.put(new Element(key, hts));
        _dataCache.put(new Element(hts.getUniqueId(), hts));
      } else {
        s_logger.debug("Caching miss on {}", key);
        _dataCache.put(new Element(key, MISS));
      }
    }
View Full Code Here

  @Override
  public Pair<LocalDate, Double> getLatestDataPoint(
      String dataField, ExternalIdBundle identifierBundle, LocalDate identifierValidityDate, String resolutionKey,
      LocalDate start, boolean includeStart, LocalDate end, boolean includeEnd) {
    HistoricalTimeSeries hts = getHistoricalTimeSeries(
        dataField, identifierBundle, identifierValidityDate, resolutionKey,
        start, includeStart, end, includeEnd, -1);
    if (hts == null || hts.getTimeSeries() == null || hts.getTimeSeries().isEmpty()) {
      return null;
    } else {
      return new ObjectsPair<LocalDate, Double>(hts.getTimeSeries().getLatestTime(), hts.getTimeSeries().getLatestValue());
    }
  }
View Full Code Here

      LocalDate start, boolean includeStart, LocalDate end, boolean includeEnd, Integer maxPoints) {
    HistoricalTimeSeriesKey seriesKey = new HistoricalTimeSeriesKey(resolutionKey, identifierValidityDate, identifierBundle, null, null, dataField);
    SubSeriesKey subseriesKey = new SubSeriesKey(start, end, maxPoints);
    ObjectsPair<HistoricalTimeSeriesKey, SubSeriesKey> key = Pair.of(seriesKey, subseriesKey);
    Element element = _dataCache.get(key);
    HistoricalTimeSeries hts;
    if (element != null) {
      hts = (HistoricalTimeSeries) element.getObjectValue();
      if (MISS.equals(hts)) {
        hts = null;
      } else if (!subseriesKey.isMatch(start, includeStart, end, includeEnd, maxPoints)) {
        // Pick out the sub-series requested
        hts = getSubSeries(hts, start, includeStart, end, includeEnd, maxPoints);
      }
    } else {
      // If we have the full series cached computing a sub-series could be faster
      Element fullHtsElement = _dataCache.get(seriesKey);
      if (fullHtsElement != null) {
        hts = getSubSeries((HistoricalTimeSeries) fullHtsElement.getObjectValue(), start, includeStart, end, includeEnd, maxPoints);
      } else {
        if (maxPoints == null) {
          hts = _underlying.getHistoricalTimeSeries(dataField, identifierBundle, identifierValidityDate, resolutionKey, subseriesKey.getStart(), true, subseriesKey.getEnd(),
              subseriesKey.getIncludeEnd());
        } else {
          hts = _underlying.getHistoricalTimeSeries(dataField, identifierBundle, identifierValidityDate, resolutionKey, subseriesKey.getStart(), true, subseriesKey.getEnd(),
              subseriesKey.getIncludeEnd(), subseriesKey.getMaxPoints());
        }
        if (hts != null) {
          s_logger.debug("Caching sub time-series {}", hts);
          _dataCache.put(new Element(key, hts));
          _dataCache.put(new Element(new ObjectsPair<UniqueId, SubSeriesKey>(hts.getUniqueId(), subseriesKey), hts));
          if (!subseriesKey.isMatch(start, includeStart, end, includeEnd, maxPoints)) {
            // Pick out the sub-series requested
            hts = getSubSeries(hts, start, includeStart, end, includeEnd, maxPoints);
          }
        } else {
View Full Code Here

    }
    final double[] cs01 = bucketedCS01.getValues();
    int i = 0;
    for (final CurveNodeWithIdentifier node : nodes) {
      final ExternalIdBundle id = ExternalIdBundle.of(node.getIdentifier());
      final HistoricalTimeSeries hts = htsBundle.get(MarketDataRequirementNames.MARKET_VALUE, id);
      if (hts == null) {
        throw new OpenGammaRuntimeException("Could not get historical time series for " + id);
      }
      if (hts.getTimeSeries().isEmpty()) {
        throw new OpenGammaRuntimeException("Time series for " + id + " is empty");
      }
      DateDoubleTimeSeries<?> nodeTimeSeries = samplingFunction.getSampledTimeSeries(hts.getTimeSeries(), schedule);
      if (fxSeries != null) {
        if (isInverse) {
          nodeTimeSeries = nodeTimeSeries.divide(fxSeries);
        } else {
          nodeTimeSeries = nodeTimeSeries.multiply(fxSeries);
View Full Code Here

  }

  private void add(ManageableHistoricalTimeSeriesInfo sourceInfo, boolean verbose) {
    HistoricalTimeSeriesMasterUtils destinationMasterUtils = new HistoricalTimeSeriesMasterUtils(_destinationMaster);
    HistoricalTimeSeries series  = _destinationMaster.getTimeSeries(sourceInfo.getUniqueId());
    destinationMasterUtils.writeTimeSeries(sourceInfo.getName(),
                                           sourceInfo.getDataSource(),
                                           sourceInfo.getDataProvider(),
                                           sourceInfo.getDataField(),
                                           sourceInfo.getObservationTime(),
                                           sourceInfo.getExternalIdBundle().toBundle(),
                                           series.getTimeSeries());
    if (verbose) {
      System.out.println("Added new time series to destination with " + series.getTimeSeries().size() + " data points");
    }
  }
View Full Code Here

    final ValueRequirement desiredValue = desiredValues.iterator().next();
    final Set<String> samplingPeriodName = desiredValue.getConstraints().getValues(ValuePropertyNames.SAMPLING_PERIOD);
    final Set<String> scheduleCalculatorName = desiredValue.getConstraints().getValues(ValuePropertyNames.SCHEDULE_CALCULATOR);
    final Set<String> samplingFunctionName = desiredValue.getConstraints().getValues(ValuePropertyNames.SAMPLING_FUNCTION);
    final Set<String> returnCalculatorName = desiredValue.getConstraints().getValues(ValuePropertyNames.RETURN_CALCULATOR);
    final HistoricalTimeSeries timeSeries = (HistoricalTimeSeries) inputs.getValue(ValueRequirementNames.HISTORICAL_TIME_SERIES);
    final SensitivityAndReturnDataBundle[] dataBundleArray = new SensitivityAndReturnDataBundle[1];
    final Double value = (Double) inputs.getValue(REQUIREMENT_NAME);
    final ValueGreek valueGreek = AvailableValueGreeks.getValueGreekForValueRequirementName(REQUIREMENT_NAME);
    final Sensitivity<?> sensitivity = new ValueGreekSensitivity(valueGreek, position.getUniqueId().toString());
    final Map<UnderlyingType, DoubleTimeSeries<?>> tsReturns = new HashMap<UnderlyingType, DoubleTimeSeries<?>>();
    final Period samplingPeriod = getSamplingPeriod(samplingPeriodName);
    final LocalDate startDate = now.minus(samplingPeriod);
    final Schedule scheduleCalculator = getScheduleCalculator(scheduleCalculatorName);
    final TimeSeriesSamplingFunction samplingFunction = getSamplingFunction(samplingFunctionName);
    final TimeSeriesReturnCalculator returnCalculator = getTimeSeriesReturnCalculator(returnCalculatorName);
    final LocalDate[] schedule = HOLIDAY_REMOVER.getStrippedSchedule(scheduleCalculator.getSchedule(startDate, now, true, false), WEEKEND_CALENDAR); //REVIEW emcleod should "fromEnd" be hard-coded?
    final LocalDateDoubleTimeSeries sampledTS = samplingFunction.getSampledTimeSeries(timeSeries.getTimeSeries(), schedule);
    for (final UnderlyingType underlyingType : valueGreek.getUnderlyingGreek().getUnderlying().getUnderlyings()) {
      if (underlyingType != UnderlyingType.SPOT_PRICE) {
        throw new OpenGammaRuntimeException("Have hard-coded to only use delta; should not have anything with " + underlyingType + " as the underlying type");
      }
      tsReturns.put(underlyingType, returnCalculator.evaluate(sampledTS));
View Full Code Here

    final Set<String> samplingFunctionName = desiredValue.getConstraints().getValues(ValuePropertyNames.SAMPLING_FUNCTION);
    final Period samplingPeriod = getSamplingPeriod(samplingPeriodName);
    final LocalDate startDate = now.minus(samplingPeriod);
    final Currency payCurrency = security.getNumerator();
    final Currency receiveCurrency = security.getDenominator();
    final HistoricalTimeSeries dbTimeSeries = (HistoricalTimeSeries) inputs.getValue(ValueRequirementNames.HISTORICAL_TIME_SERIES);
    if (dbTimeSeries == null) {
      throw new OpenGammaRuntimeException("Could not get identifier / price series pair for " + security);
    }
    DoubleTimeSeries<?> ts = dbTimeSeries.getTimeSeries();
    if (ts == null) {
      throw new OpenGammaRuntimeException("Could not get price series for " + security);
    }
    if (ts.isEmpty()) {
      throw new OpenGammaRuntimeException("Empty price series for " + security);
    }
    // TODO: If we know which way up we want the time series, don't request it in "convention order" and then lookup the convention again here, request it in
    // the desired order in getRequirements using a CurrencyPair
    final CurrencyPairs currencyPairs = OpenGammaExecutionContext.getCurrencyPairsSource(executionContext).getCurrencyPairs(CurrencyPairs.DEFAULT_CURRENCY_PAIRS);
    final CurrencyPair currencyPair = currencyPairs.getCurrencyPair(security.getNumerator(), security.getDenominator());
    if (!payCurrency.equals(currencyPair.getBase()) && receiveCurrency.equals(security.getCurrency())) {
      ts = ts.reciprocal();
    }
    final Object pvObject = inputs.getValue(new ValueRequirement(ValueRequirementNames.PRESENT_VALUE, ComputationTargetType.SECURITY, security.getUniqueId()));
    if (pvObject == null) {
      throw new OpenGammaRuntimeException("Present value was null");
    }
    final double pv = (Double) pvObject;
    final Schedule scheduleCalculator = getScheduleCalculator(scheduleCalculatorName);
    final TimeSeriesSamplingFunction samplingFunction = getSamplingFunction(samplingFunctionName);
    final LocalDate[] schedule = HOLIDAY_REMOVER.getStrippedSchedule(scheduleCalculator.getSchedule(startDate, now, true, false), WEEKEND_CALENDAR); //REVIEW emcleod should "fromEnd" be hard-coded?
    DateDoubleTimeSeries<?> pnlSeries = samplingFunction.getSampledTimeSeries(dbTimeSeries.getTimeSeries(), schedule);
    pnlSeries = DIFFERENCE.evaluate(pnlSeries);
    pnlSeries = pnlSeries.multiply(pv);
    final ValueSpecification spec = new ValueSpecification(ValueRequirementNames.PNL_SERIES, target.toSpecification(), desiredValue.getConstraints());
    return Collections.singleton(new ComputedValue(spec, pnlSeries));
  }
View Full Code Here

        ExternalId id = provider.getInstrument(x, y, endDate);
        if (id.getScheme().equals(ExternalSchemes.BLOOMBERG_TICKER_WEAK)) {
          id = ExternalSchemes.bloombergTickerSecurityId(id.getValue());
        }
        final ExternalIdBundle identifier = ExternalIdBundle.of(id);
        final HistoricalTimeSeries tsForTicker = timeSeriesBundle.get(MarketDataRequirementNames.MARKET_VALUE, identifier);
        if (tsForTicker == null) {
          throw new OpenGammaRuntimeException("Could not get identifier / vol series for " + id);
        }
        final DoubleTimeSeries<?> volHistory = DIFFERENCE.evaluate(samplingFunction.getSampledTimeSeries(tsForTicker.getTimeSeries(), schedule));
        final double vega = vegas[j][i] / 100;
        if (vegaPnL == null) {
          vegaPnL = volHistory.multiply(vega);
        } else {
          vegaPnL = vegaPnL.add(volHistory.multiply(vega));
View Full Code Here

TOP

Related Classes of com.opengamma.core.historicaltimeseries.HistoricalTimeSeries

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.