Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,9 @@

import com.fasterxml.jackson.core.JsonGenerator;
import com.google.common.collect.Iterators;
import com.google.common.collect.Range;
import com.powsybl.commons.json.JsonUtil;
import org.jspecify.annotations.NonNull;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

Expand Down Expand Up @@ -118,6 +120,29 @@ public Iterator<P> iterator() {

protected abstract T createTimeSeries(C chunk);

private void splitByFirstAndLastIndex(C chunkToSplit, List<C> splitChunks, int firstIndex, int lastIndex) {
if (LOGGER.isTraceEnabled()) {
LOGGER.trace("Split chunk [{}, {}]", firstIndex, lastIndex);
}
C newChunk;
// chunkToSplit = [x0, y0] -> newChunk = [x0=firstIndex, y0]
if (firstIndex == chunkToSplit.getOffset()) {
newChunk = chunkToSplit;
} else {
// chunkToSplit = [x0, y0] -> newChunk = [firstIndex, y0]
DataChunk.Split<P, C> split = chunkToSplit.splitAt(firstIndex);
newChunk = split.getChunk2();
}
// chunkToSplit = [x0, y0] -> newChunk = [firstIndex, y0=lastIndex]
if (lastIndex == chunkToSplit.getLength() - 1) {
splitChunks.add(newChunk);
} else {
// chunkToSplit = [x0, y0] -> newChunk = [firstIndex, lastIndex]
DataChunk.Split<P, C> split = newChunk.splitAt(lastIndex + 1);
splitChunks.add(split.getChunk1());
}
}

private void split(C chunkToSplit, List<C> splitChunks, int newChunkSize) {
if (LOGGER.isTraceEnabled()) {
LOGGER.trace("Split chunk [{}, {}]", chunkToSplit.getOffset(), chunkToSplit.getOffset() + chunkToSplit.getLength() - 1);
Expand Down Expand Up @@ -184,6 +209,16 @@ public List<T> split(int newChunkSize) {
return splitNewChunks.stream().map(this::createTimeSeries).collect(Collectors.toList());
}

public List<T> splitByRanges(List<Range<@NonNull Integer>> newChunks) {
List<C> splitNewChunks = new ArrayList<>();
for (C chunkToSplit : getCheckedChunks(false)) {
for (Range<@NonNull Integer> range : newChunks) {
splitByFirstAndLastIndex(chunkToSplit, splitNewChunks, range.lowerEndpoint(), range.upperEndpoint());
}
}
return splitNewChunks.stream().map(this::createTimeSeries).toList();
}

public void writeJson(JsonGenerator generator) {
Objects.requireNonNull(generator);
try {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -9,9 +9,11 @@

import com.fasterxml.jackson.core.JsonGenerator;
import com.google.common.collect.Iterators;
import com.google.common.collect.Range;
import com.google.common.collect.Sets;
import com.powsybl.commons.json.JsonUtil;
import com.powsybl.timeseries.ast.*;
import org.jspecify.annotations.NonNull;

import java.io.IOException;
import java.io.UncheckedIOException;
Expand Down Expand Up @@ -254,6 +256,12 @@ public List<DoubleTimeSeries> split(int newChunkSize) {
return Collections.nCopies(chunkCount, this);
}

@Override
public List<DoubleTimeSeries> splitByRanges(List<Range<@NonNull Integer>> newChunks) {
int chunkCount = newChunks.size();
return Collections.nCopies(chunkCount, this);
}

@Override
public void writeJson(JsonGenerator generator) {
try {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,7 @@
import com.fasterxml.jackson.core.JsonParser;
import com.fasterxml.jackson.core.JsonToken;
import com.google.common.base.Stopwatch;
import com.google.common.collect.Range;
import com.google.common.primitives.Doubles;
import com.powsybl.commons.json.JsonUtil;
import com.powsybl.commons.report.ReportNode;
Expand All @@ -20,6 +21,7 @@
import com.univocity.parsers.csv.CsvParser;
import com.univocity.parsers.csv.CsvParserSettings;
import gnu.trove.list.array.TDoubleArrayList;
import org.jspecify.annotations.NonNull;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

Expand Down Expand Up @@ -78,6 +80,8 @@ enum TimeFormat {

List<T> split(int newChunkSize);

List<T> splitByRanges(List<Range<@NonNull Integer>> newChunks);

void setTimeSeriesNameResolver(TimeSeriesNameResolver resolver);

static StoredDoubleTimeSeries createDouble(String name, TimeSeriesIndex index) {
Expand Down Expand Up @@ -121,27 +125,16 @@ static StringTimeSeries createString(String name, TimeSeriesIndex index, String.
}

static <P extends AbstractPoint, T extends TimeSeries<P, T>> List<List<T>> split(List<T> timeSeriesList, int newChunkSize) {
Objects.requireNonNull(timeSeriesList);
if (timeSeriesList.isEmpty()) {
throw new IllegalArgumentException("Time series list is empty");
}
verifyTimeseriesList(timeSeriesList);
if (newChunkSize < 1) {
throw new IllegalArgumentException("Invalid chunk size: " + newChunkSize);
}
Set<TimeSeriesIndex> indexes = timeSeriesList.stream()
.map(ts -> ts.getMetadata().getIndex())
.filter(index -> !(index instanceof InfiniteTimeSeriesIndex))
.collect(Collectors.toSet());
if (indexes.isEmpty()) {
throw new IllegalArgumentException("Cannot split a list of time series with only infinite index");
}
if (indexes.size() != 1) {
throw new IllegalArgumentException("Cannot split a list of time series with different time indexes: " + indexes);
}
Set<TimeSeriesIndex> indexes = getTimeSeriesIndexes(timeSeriesList);
verifyIndexes(indexes);
TimeSeriesIndex index = indexes.iterator().next();
if (newChunkSize > index.getPointCount()) {
throw new IllegalArgumentException("New chunk size " + newChunkSize + " is greater than point count "
+ index.getPointCount());
+ index.getPointCount());
}
int chunkCount = computeChunkCount(index, newChunkSize);
List<List<T>> splitList = new ArrayList<>(chunkCount);
Expand All @@ -157,6 +150,46 @@ static <P extends AbstractPoint, T extends TimeSeries<P, T>> List<List<T>> split
return splitList;
}

private static <P extends AbstractPoint, T extends TimeSeries<P, T>> Set<TimeSeriesIndex> getTimeSeriesIndexes(List<T> timeSeriesList) {
return timeSeriesList.stream()
.map(ts -> ts.getMetadata().getIndex())
.filter(index -> !(index instanceof InfiniteTimeSeriesIndex))
.collect(Collectors.toSet());
}

private static void verifyIndexes(Set<TimeSeriesIndex> indexes) {
if (indexes.isEmpty()) {
throw new IllegalArgumentException("Cannot split a list of time series with only infinite index");
}
if (indexes.size() != 1) {
throw new IllegalArgumentException("Cannot split a list of time series with different time indexes: " + indexes);
}
}

static <P extends AbstractPoint, T extends TimeSeries<P, T>> List<List<T>> splitByRanges(List<T> timeSeriesList, List<Range<@NonNull Integer>> ranges) {
verifyTimeseriesList(timeSeriesList);
verifyIndexes(getTimeSeriesIndexes(timeSeriesList));
int chunkCount = ranges.size();
List<List<T>> splitList = new ArrayList<>(chunkCount);
for (int i = 0; i < chunkCount; i++) {
splitList.add(new ArrayList<>(timeSeriesList.size()));
}
for (T timeSeries : timeSeriesList) {
List<T> split = timeSeries.splitByRanges(ranges);
for (int i = 0; i < chunkCount; i++) {
splitList.get(i).add(split.get(i));
}
}
return splitList;
}

private static <P extends AbstractPoint, T extends TimeSeries<P, T>> void verifyTimeseriesList(List<T> timeSeriesList) {
Objects.requireNonNull(timeSeriesList);
if (timeSeriesList.isEmpty()) {
throw new IllegalArgumentException("Time series list is empty");
}
}

static Map<Integer, List<TimeSeries>> parseCsv(Path file) {
return parseCsv(file, new TimeSeriesCsvConfig(), ReportNode.NO_OP);
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -9,10 +9,12 @@

import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.type.TypeFactory;
import com.google.common.collect.Range;
import com.google.common.collect.Sets;
import com.powsybl.commons.json.JsonUtil;
import com.powsybl.timeseries.ast.*;
import com.powsybl.timeseries.json.TimeSeriesJsonModule;
import org.jspecify.annotations.NonNull;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.threeten.extra.Interval;
Expand Down Expand Up @@ -62,6 +64,23 @@ void splitBigChunkTest() {
assertEquals(50, list.size());
}

@Test
void rangeSplitBigChunkTest() {
// GIVEN
List<Range<@NonNull Integer>> ranges = new ArrayList<>();
int startStep = 0;
int endStep = 99;
int rangeStep = 2;
for (int i = startStep; i < endStep; i = i + rangeStep) {
ranges.add(Range.closed(i, i + rangeStep - 1));
}
timeSeries.synchronize(new RegularTimeSeriesIndex(Instant.ofEpochMilli(startStep), Instant.ofEpochMilli(endStep), Duration.ofMillis(1)));
// WHEN
List<List<DoubleTimeSeries>> list = TimeSeries.splitByRanges(Collections.singletonList(timeSeries), ranges);
// THEN
assertEquals(ranges.size(), list.size());
}

@Test
void jsonTest() throws IOException {
TimeSeriesIndex index = RegularTimeSeriesIndex.create(Interval.parse("2015-01-01T00:00:00Z/2015-07-20T00:00:00Z"), Duration.ofDays(200));
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -9,8 +9,10 @@

import com.fasterxml.jackson.databind.ObjectMapper;
import com.google.common.collect.Iterators;
import com.google.common.collect.Range;
import com.powsybl.commons.json.JsonUtil;
import com.powsybl.timeseries.json.TimeSeriesJsonModule;
import org.jspecify.annotations.NonNull;
import org.junit.jupiter.api.Test;
import org.mockito.Mockito;
import org.threeten.extra.Interval;
Expand Down Expand Up @@ -83,7 +85,7 @@ void test() throws IOException {
assertEquals(jsonRef, json);
List<TimeSeries> timeSeriesList = TimeSeries.parseJson(json);
assertEquals(1, timeSeriesList.size());
String json2 = JsonUtil.toJson(timeSeriesList.get(0)::writeJson);
String json2 = JsonUtil.toJson(timeSeriesList.getFirst()::writeJson);
assertEquals(json, json2);

// test json with object mapper
Expand All @@ -108,33 +110,67 @@ private void doSplitTest(int chunkposition, int totalsize) {
Mockito.when(index.getPointCount()).thenReturn(totalsize);
TimeSeriesMetadata metadata = new TimeSeriesMetadata("ts1", TimeSeriesDataType.DOUBLE, Collections.emptyMap(), index);
UncompressedDoubleDataChunk chunk = new UncompressedDoubleDataChunk(chunkposition,
new double[] {0d, 0d, 0d, 0d, 0d });
new double[] {0d, 0d, 0d, 0d, 0d });
StoredDoubleTimeSeries timeSeries = new StoredDoubleTimeSeries(metadata, chunk);
List<DoubleTimeSeries> split = timeSeries.split(2);

// check there is 3 new chunks
assertEquals(3, split.size());

// check first chunk
assertInstanceOf(StoredDoubleTimeSeries.class, split.get(0));
assertEquals(1, ((StoredDoubleTimeSeries) split.get(0)).getChunks().size());
assertInstanceOf(UncompressedDoubleDataChunk.class, ((StoredDoubleTimeSeries) split.get(0)).getChunks().get(0));
assertEquals(chunkposition, ((StoredDoubleTimeSeries) split.get(0)).getChunks().get(0).getOffset());
assertEquals(1, ((StoredDoubleTimeSeries) split.get(0)).getChunks().get(0).getLength());
assertInstanceOf(StoredDoubleTimeSeries.class, split.getFirst());
assertEquals(1, ((StoredDoubleTimeSeries) split.getFirst()).getChunks().size());
assertInstanceOf(UncompressedDoubleDataChunk.class, ((StoredDoubleTimeSeries) split.getFirst()).getChunks().getFirst());
assertEquals(chunkposition, ((StoredDoubleTimeSeries) split.get(0)).getChunks().getFirst().getOffset());
assertEquals(1, ((StoredDoubleTimeSeries) split.get(0)).getChunks().getFirst().getLength());

// check second chunk
assertInstanceOf(StoredDoubleTimeSeries.class, split.get(1));
assertEquals(1, ((StoredDoubleTimeSeries) split.get(1)).getChunks().size());
assertInstanceOf(UncompressedDoubleDataChunk.class, ((StoredDoubleTimeSeries) split.get(1)).getChunks().get(0));
assertEquals(chunkposition + 1, ((StoredDoubleTimeSeries) split.get(1)).getChunks().get(0).getOffset());
assertEquals(2, ((StoredDoubleTimeSeries) split.get(1)).getChunks().get(0).getLength());
assertInstanceOf(UncompressedDoubleDataChunk.class, ((StoredDoubleTimeSeries) split.get(1)).getChunks().getFirst());
assertEquals(chunkposition + 1, ((StoredDoubleTimeSeries) split.get(1)).getChunks().getFirst().getOffset());
assertEquals(2, ((StoredDoubleTimeSeries) split.get(1)).getChunks().getFirst().getLength());

// check third chunk
assertInstanceOf(StoredDoubleTimeSeries.class, split.get(2));
assertEquals(1, ((StoredDoubleTimeSeries) split.get(2)).getChunks().size());
assertInstanceOf(UncompressedDoubleDataChunk.class, ((StoredDoubleTimeSeries) split.get(2)).getChunks().get(0));
assertEquals(chunkposition + 3, ((StoredDoubleTimeSeries) split.get(2)).getChunks().get(0).getOffset());
assertEquals(2, ((StoredDoubleTimeSeries) split.get(2)).getChunks().get(0).getLength());
assertInstanceOf(UncompressedDoubleDataChunk.class, ((StoredDoubleTimeSeries) split.get(2)).getChunks().getFirst());
assertEquals(chunkposition + 3, ((StoredDoubleTimeSeries) split.get(2)).getChunks().getFirst().getOffset());
assertEquals(2, ((StoredDoubleTimeSeries) split.get(2)).getChunks().getFirst().getLength());
}

@Test
void rangesSplitTest() {
// [100, 123], [150, 872]
Range<@NonNull Integer> firstRange = Range.closed(100, 123);
Range<@NonNull Integer> secondRange = Range.closed(150, 872);
List<Range<@NonNull Integer>> ranges = List.of(firstRange, secondRange);
TimeSeriesIndex index = Mockito.mock(TimeSeriesIndex.class);
int chunkLength = 100000;
Mockito.when(index.getPointCount()).thenReturn(chunkLength);
TimeSeriesMetadata metadata = new TimeSeriesMetadata("ts1", TimeSeriesDataType.DOUBLE, Collections.emptyMap(), index);
double valueInChunk = 28d;
CompressedDoubleDataChunk chunk = new CompressedDoubleDataChunk(0, chunkLength, new double[]{valueInChunk}, new int[]{chunkLength});
StoredDoubleTimeSeries timeSeries = new StoredDoubleTimeSeries(metadata, chunk);
List<DoubleTimeSeries> split = timeSeries.splitByRanges(ranges);

// check there is 3 new chunks
assertEquals(2, split.size());

// check first chunk
assertInstanceOf(StoredDoubleTimeSeries.class, split.getFirst());
assertEquals(1, ((StoredDoubleTimeSeries) split.getFirst()).getChunks().size());
assertInstanceOf(CompressedDoubleDataChunk.class, ((StoredDoubleTimeSeries) split.getFirst()).getChunks().getFirst());
assertEquals(firstRange.lowerEndpoint(), ((StoredDoubleTimeSeries) split.getFirst()).getChunks().getFirst().getOffset());
assertEquals(firstRange.upperEndpoint() - firstRange.lowerEndpoint() + 1, ((StoredDoubleTimeSeries) split.getFirst()).getChunks().getFirst().getLength());
split.getFirst().toArray();

// check second chunk
assertInstanceOf(StoredDoubleTimeSeries.class, split.get(1));
assertEquals(1, ((StoredDoubleTimeSeries) split.get(1)).getChunks().size());
assertInstanceOf(CompressedDoubleDataChunk.class, ((StoredDoubleTimeSeries) split.get(1)).getChunks().getFirst());
assertEquals(secondRange.lowerEndpoint(), ((StoredDoubleTimeSeries) split.get(1)).getChunks().getFirst().getOffset());
assertEquals(secondRange.upperEndpoint() - secondRange.lowerEndpoint() + 1, ((StoredDoubleTimeSeries) split.get(1)).getChunks().getFirst().getLength());
}

@Test
Expand All @@ -161,31 +197,31 @@ void splitMultiChunkTimeSeriesTest() {
assertEquals(3, split.size());

// check first chunk
assertEquals(1, split.get(0).size());
assertInstanceOf(StoredDoubleTimeSeries.class, split.get(0).get(0));
StoredDoubleTimeSeries ts = (StoredDoubleTimeSeries) split.get(0).get(0);
assertEquals(1, split.getFirst().size());
assertInstanceOf(StoredDoubleTimeSeries.class, split.getFirst().getFirst());
StoredDoubleTimeSeries ts = (StoredDoubleTimeSeries) split.getFirst().getFirst();
assertEquals(1, ts.getChunks().size());
assertInstanceOf(UncompressedDoubleDataChunk.class, ts.getChunks().get(0));
assertEquals(0, ts.getChunks().get(0).getOffset());
assertEquals(2, ts.getChunks().get(0).getLength());
assertInstanceOf(UncompressedDoubleDataChunk.class, ts.getChunks().getFirst());
assertEquals(0, ts.getChunks().getFirst().getOffset());
assertEquals(2, ts.getChunks().getFirst().getLength());

// check second chunk
assertEquals(1, split.get(1).size());
assertInstanceOf(StoredDoubleTimeSeries.class, split.get(1).get(0));
ts = (StoredDoubleTimeSeries) split.get(1).get(0);
assertInstanceOf(StoredDoubleTimeSeries.class, split.get(1).getFirst());
ts = (StoredDoubleTimeSeries) split.get(1).getFirst();
assertEquals(1, ts.getChunks().size());
assertInstanceOf(UncompressedDoubleDataChunk.class, ts.getChunks().get(0));
assertEquals(2, ts.getChunks().get(0).getOffset());
assertEquals(2, ts.getChunks().get(0).getLength());
assertInstanceOf(UncompressedDoubleDataChunk.class, ts.getChunks().getFirst());
assertEquals(2, ts.getChunks().getFirst().getOffset());
assertEquals(2, ts.getChunks().getFirst().getLength());

// check third chunk
assertEquals(1, split.get(2).size());
assertInstanceOf(StoredDoubleTimeSeries.class, split.get(2).get(0));
ts = (StoredDoubleTimeSeries) split.get(2).get(0);
assertInstanceOf(StoredDoubleTimeSeries.class, split.get(2).getFirst());
ts = (StoredDoubleTimeSeries) split.get(2).getFirst();
assertEquals(1, ts.getChunks().size());
assertInstanceOf(UncompressedDoubleDataChunk.class, ts.getChunks().get(0));
assertEquals(4, ts.getChunks().get(0).getOffset());
assertEquals(2, ts.getChunks().get(0).getLength());
assertInstanceOf(UncompressedDoubleDataChunk.class, ts.getChunks().getFirst());
assertEquals(4, ts.getChunks().getFirst().getOffset());
assertEquals(2, ts.getChunks().getFirst().getLength());
}

@Test
Expand Down